From de7feb5aefe2d2eeddea0884056c93c969523f4f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 21 Feb 2020 13:40:35 +0100 Subject: [PATCH 001/111] doc: update 2020.1 release notes to include minor versions --- doc/2020.1.html | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/2020.1.html b/doc/2020.1.html index bc1279578..f3570e8d9 100644 --- a/doc/2020.1.html +++ b/doc/2020.1.html @@ -157,3 +157,17 @@

General bug fixes

  • Some files generated by goyacc weren't recognized as being generated.
  • staticcheck no longer fails to check packages that consist exclusively of tests.
  • + + +

    Staticcheck 2020.1.1 release notes

    + +

    + The 2020.1 release neglected to update the version string stored in + the binary, causing staticcheck -version to incorrectly emit (no version). +

    + +

    Staticcheck 2020.1.2 release notes

    + +

    + The 2020.1.1 release incorrectly identified itself as version 2020.1. +

    From 5bf9fe98906dff156bdb9de7b4e393a7ef60c242 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 21 Feb 2020 21:33:16 +0100 Subject: [PATCH 002/111] lint: don't store token.Pos in the cache The token.Pos stored in the cache will not be correct once we load it from the cache, because the fset will have changed. This would lead to bogus position information for unhandled linter directives. --- lint/lint.go | 4 ++-- lint/runner.go | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index d1f095eb6..73fa322f9 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -64,7 +64,7 @@ type LineIgnore struct { Line int Checks []string Matched bool - Pos token.Pos + Pos token.Position } func (li *LineIgnore) Match(p Problem) bool { @@ -374,7 +374,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error continue } p := Problem{ - Pos: DisplayPosition(pkg.Fset, ig.Pos), + Pos: ig.Pos, Message: "this linter directive didn't match anything; should it be removed?", Check: "", } diff --git a/lint/runner.go b/lint/runner.go index 3235dce82..74106ced8 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -1031,7 +1031,7 @@ func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) { File: pos.Filename, Line: pos.Line, Checks: checks, - Pos: c.Pos(), + Pos: DisplayPosition(pkg.Fset, c.Pos()), } case "file-ignore": ig = &FileIgnore{ From 37fcbfbb57c58f156decf06c733719a8f3005045 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 21 Feb 2020 23:23:57 +0100 Subject: [PATCH 003/111] lint: deduplicate line ignores Account for analyses (U1000 specifically) that don't emit problems for both a package and its test variant. --- lint/lint.go | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/lint/lint.go b/lint/lint.go index 73fa322f9..1a70e0c29 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -325,8 +325,23 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error } var problems []Problem + // Deduplicate line ignores. When U1000 processes a package and + // its test variant, it will only emit a single problem for an + // unused object, not two problems. We will, however, have two + // line ignores, one per package. Without deduplication, one line + // ignore will be marked as matched, while the other one won't, + // subsequently reporting a "this linter directive didn't match + // anything" error. + ignores := map[token.Position]Ignore{} for _, pkg := range pkgs { for _, ig := range pkg.ignores { + if lig, ok := ig.(*LineIgnore); ok { + ig = ignores[lig.Pos] + if ig == nil { + ignores[lig.Pos] = lig + ig = lig + } + } for i := range pkg.problems { p := &pkg.problems[i] if ig.Match(*p) { @@ -354,6 +369,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error if !ok { continue } + ig = ignores[ig.Pos].(*LineIgnore) if ig.Matched { continue } From 709e8b461c1be7861a110889599bcdd09677a272 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 22 Feb 2020 12:45:24 +0100 Subject: [PATCH 004/111] doc: add 2020.1.3 release notes --- doc/2020.1.html | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/doc/2020.1.html b/doc/2020.1.html index f3570e8d9..8c4ce6680 100644 --- a/doc/2020.1.html +++ b/doc/2020.1.html @@ -7,6 +7,9 @@
  • Changed checks
  • General bug fixes
  • +
  • Staticcheck 2020.1.1 release notes
  • +
  • Staticcheck 2020.1.2 release notes
  • +
  • Staticcheck 2020.1.3 release notes
  • Introduction to Staticcheck 2020.1

    @@ -171,3 +174,22 @@

    Staticcheck 2020.1.2 release notes

    The 2020.1.1 release incorrectly identified itself as version 2020.1.

    + +

    Staticcheck 2020.1.3 release notes

    + +

    + This release fixes two bugs involving //lint:ignore directives: + +

      +
    • + When ignoring U1000 and checking a package that contains tests, + Staticcheck would incorrectly complain that the linter directive + didn't match any problems, even when it did. +
    • + +
    • + On repeated runs, the position information for a this linter directive didn't match anything report + would either be missing, or be wildly incorrect. +
    • +
    +

    From 9b987539b3110a8065545705f2f5c97ac136bf4b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 22 Feb 2020 12:47:05 +0100 Subject: [PATCH 005/111] doc: update install instructions to refer to latest release --- doc/staticcheck.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/staticcheck.html b/doc/staticcheck.html index ab2e3f9e7..c0ed953f9 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -22,12 +22,12 @@

    Installation

    If you use Go modules, you can simply run go get honnef.co/go/tools/cmd/staticcheck to obtain the latest released version. If you're still using a GOPATH-based workflow, then the above command will instead fetch the master branch. - It is suggested that you explicitly check out the latest release branch instead, which is currently 2020.1. + It is suggested that you explicitly check out the latest release branch instead, which is currently 2020.1.3. One way of doing so would be as follows:

    cd $GOPATH/src/honnef.co/go/tools/cmd/staticcheck
    -git checkout 2020.1
    +git checkout 2020.1.3
     go get
     go install
     
    From c702d0a1c8d58bb55e039d028e92ae714040936c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 22 Feb 2020 12:51:04 +0100 Subject: [PATCH 006/111] doc: fix link to JSON formatter --- doc/2020.1.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/2020.1.html b/doc/2020.1.html index 8c4ce6680..14f8df550 100644 --- a/doc/2020.1.html +++ b/doc/2020.1.html @@ -58,7 +58,7 @@

    UI improvements

    When using the text or stylish formatters, related information will - appear as indented lines. The json formatter adds a new field + appear as indented lines. The json formatter adds a new field related to problems, containing position information as well as the message. Editors that use gopls will also display the related information. From dce81a6a0e195a590e6c91f78ccb46e4a747adcc Mon Sep 17 00:00:00 2001 From: Kieran Gorman Date: Thu, 26 Mar 2020 10:25:10 +0000 Subject: [PATCH 007/111] facts: handle overlapping prefix/suffix We have a generated file which starts with the comment: ``` // Code generated DO NOT EDIT. ``` This causes staticcheck to panic, because we have an overlapping prefix and suffix when checking for these magic strings, i.e.: ``` "// Code generated " " DO NOT EDIT." ``` Trying to take a slice where end is before start subsequently panics. Add an explicit check to avoid the out of bounds access. Closes: gh-716 [via git-merge-pr] --- facts/generated.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/facts/generated.go b/facts/generated.go index 3e7aef110..f7f166734 100644 --- a/facts/generated.go +++ b/facts/generated.go @@ -45,6 +45,10 @@ func isGenerated(path string) (Generator, bool) { s = bytes.TrimSuffix(s, crnl) s = bytes.TrimSuffix(s, nl) if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) { + if len(s)-len(suffix) < len(prefix) { + return Unknown, true + } + text := string(s[len(prefix) : len(s)-len(suffix)]) switch text { case "by goyacc.": From 17efdd7901b5d278b9ba09ac34fd19192bc3c6e5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 31 Mar 2020 13:05:21 +0200 Subject: [PATCH 008/111] Address staticcheck failures Make CI pass again. --- ir/builder.go | 4 ++-- pattern/pattern.go | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/ir/builder.go b/ir/builder.go index fdf4cb1a9..407a10f81 100644 --- a/ir/builder.go +++ b/ir/builder.go @@ -1210,7 +1210,7 @@ func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { conds := make([]Value, 0, len(s.Body.List)) hasDefault := false - done := fn.newBasicBlock(fmt.Sprintf("switch.done")) + done := fn.newBasicBlock("switch.done") if label != nil { label._break = done } @@ -1256,7 +1256,7 @@ func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { } if !hasDefault { - head := fn.newBasicBlock(fmt.Sprintf("switch.head.implicit-default")) + head := fn.newBasicBlock("switch.head.implicit-default") body := fn.newBasicBlock("switch.body.implicit-default") fn.currentBlock = head emitJump(fn, body, s) diff --git a/pattern/pattern.go b/pattern/pattern.go index d74605602..e39de3a82 100644 --- a/pattern/pattern.go +++ b/pattern/pattern.go @@ -331,7 +331,6 @@ func stringify(n Node) string { var parts []string parts = append(parts, v.Type().Name()) for i := 0; i < v.NumField(); i++ { - //lint:ignore S1025 false positive in staticcheck 2019.2.3 parts = append(parts, fmt.Sprintf("%s", v.Field(i))) } return "(" + strings.Join(parts, " ") + ")" From 3e5ac43f46288d1552377836ab1904bc97661d04 Mon Sep 17 00:00:00 2001 From: Alexey Surikov Date: Thu, 27 Feb 2020 03:00:18 +0100 Subject: [PATCH 009/111] lintutil: validate formatter before linting Closes: gh-701 [via git-merge-pr] --- lint/lintutil/util.go | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 7c3dbdec1..278cd267b 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -239,19 +239,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * exit(0) } - ps, err := Lint(cs, cums, fs.Args(), &Options{ - Tags: tags, - LintTests: tests, - GoVersion: goVersion, - Config: cfg, - PrintAnalyzerMeasurement: measureAnalyzers, - RepeatAnalyzers: debugRepeat, - }) - if err != nil { - fmt.Fprintln(os.Stderr, err) - exit(1) - } - var f format.Formatter switch formatter { case "text": @@ -265,6 +252,19 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * exit(2) } + ps, err := Lint(cs, cums, fs.Args(), &Options{ + Tags: tags, + LintTests: tests, + GoVersion: goVersion, + Config: cfg, + PrintAnalyzerMeasurement: measureAnalyzers, + RepeatAnalyzers: debugRepeat, + }) + if err != nil { + fmt.Fprintln(os.Stderr, err) + exit(1) + } + var ( total int errors int From f5dad9985ec1f17637161853ddde7e976835b90d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 4 Apr 2020 22:26:01 +0200 Subject: [PATCH 010/111] staticcheck: don't crash when checking dot-imported constants in bitwise ops Closes gh-719 --- staticcheck/lint.go | 4 ++++ staticcheck/lint_test.go | 2 +- .../testdata/src/CheckSillyBitwiseOps_dotImport/foo.go | 3 +++ .../testdata/src/CheckSillyBitwiseOps_dotImport/foo_test.go | 5 +++++ 4 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 staticcheck/testdata/src/CheckSillyBitwiseOps_dotImport/foo.go create mode 100644 staticcheck/testdata/src/CheckSillyBitwiseOps_dotImport/foo_test.go diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 2ccfb9e6f..430e08787 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -2707,6 +2707,10 @@ func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { if !ok { return } + if obj.Pkg() != pass.Pkg { + // identifier was dot-imported + return + } if v, _ := constant.Int64Val(obj.Val()); v != 0 { return } diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index 7e7fd592d..b024ab0f9 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -55,7 +55,7 @@ func TestAll(t *testing.T) { "SA4013": {{Dir: "CheckDoubleNegation"}}, "SA4014": {{Dir: "CheckRepeatedIfElse"}}, "SA4015": {{Dir: "CheckMathInt"}}, - "SA4016": {{Dir: "CheckSillyBitwiseOps"}, {Dir: "CheckSillyBitwiseOps_shadowedIota"}}, + "SA4016": {{Dir: "CheckSillyBitwiseOps"}, {Dir: "CheckSillyBitwiseOps_shadowedIota"}, {Dir: "CheckSillyBitwiseOps_dotImport"}}, "SA4017": {{Dir: "CheckPureFunctions"}}, "SA4018": {{Dir: "CheckSelfAssignment"}}, "SA4019": {{Dir: "CheckDuplicateBuildConstraints"}}, diff --git a/staticcheck/testdata/src/CheckSillyBitwiseOps_dotImport/foo.go b/staticcheck/testdata/src/CheckSillyBitwiseOps_dotImport/foo.go new file mode 100644 index 000000000..455406af0 --- /dev/null +++ b/staticcheck/testdata/src/CheckSillyBitwiseOps_dotImport/foo.go @@ -0,0 +1,3 @@ +package foo + +const X = 0 diff --git a/staticcheck/testdata/src/CheckSillyBitwiseOps_dotImport/foo_test.go b/staticcheck/testdata/src/CheckSillyBitwiseOps_dotImport/foo_test.go new file mode 100644 index 000000000..7160b9d24 --- /dev/null +++ b/staticcheck/testdata/src/CheckSillyBitwiseOps_dotImport/foo_test.go @@ -0,0 +1,5 @@ +package foo_test + +import . "CheckSillyBitwiseOps_dotImport" + +var _ = 1 | X From a4eb707c4ac4bb176e9241aeb886ea7d135f66a4 Mon Sep 17 00:00:00 2001 From: Sourya Vatsyayan Date: Sat, 11 Apr 2020 13:36:51 +0530 Subject: [PATCH 011/111] simple: fix suggested fixes for S1004 Also update x/tools to latest version, as `analysistest.RunWithSuggestedFixes` is very recent. Signed-off-by: Sourya Vatsyayan --- go.mod | 4 ++-- go.sum | 12 ++++++++++++ ir/source_test.go | 4 ++-- lint/testutil/util.go | 11 ++++++++--- simple/lint.go | 4 ++-- simple/lint_test.go | 2 +- simple/testdata/src/compare/compare.go | 2 ++ simple/testdata/src/compare/compare.go.golden | 12 ++++++++++++ simple/testdata/src/compare/dummy.go | 3 +++ .../testdata/src/compare_windows/compare-windows.go | 12 ++++++++++++ simple/testdata/src/compare_windows/dummy.go | 3 +++ 11 files changed, 59 insertions(+), 10 deletions(-) create mode 100644 simple/testdata/src/compare/compare.go.golden create mode 100644 simple/testdata/src/compare/dummy.go create mode 100644 simple/testdata/src/compare_windows/compare-windows.go create mode 100644 simple/testdata/src/compare_windows/dummy.go diff --git a/go.mod b/go.mod index 545cd4b37..5a61c1078 100644 --- a/go.mod +++ b/go.mod @@ -7,6 +7,6 @@ require ( github.com/google/renameio v0.1.0 github.com/kisielk/gotool v1.0.0 github.com/rogpeppe/go-internal v1.3.0 - golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e - golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d + golang.org/x/mod v0.2.0 + golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef ) diff --git a/go.sum b/go.sum index 894ea2fa7..9d95629a1 100644 --- a/go.sum +++ b/go.sum @@ -9,23 +9,35 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/rogpeppe/go-internal v1.3.0 h1:RR9dF3JtopPvtkroDZuVD7qquD0bnHlKSqaQhgwt8yk= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529 h1:iMGN4xG0cnqj3t+zOM8wUB0BiPKHEwSxEZCvzcbZuvk= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e h1:JgcxKXxCjrA2tyDP/aNU9K0Ck5Czfk6C7e2tMw7+bSI= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3 h1:0GoQqolDA55aaLxZyTzK/Y2ePZzZTUrRacwib7cNsYQ= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d h1:+R4KGOnez64A81RvjARKc4UT5/tI9ujCIVX+P5KiHuI= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d h1:/iIZNFGxc/a7C3yWjGcnboV+Tkc7mxr+p6fDztwoxuM= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef h1:RHORRhs540cYZYrzgU2CPUyykkwZM78hGdzocOo9P8A= +golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= diff --git a/ir/source_test.go b/ir/source_test.go index 513207027..c3f010f4b 100644 --- a/ir/source_test.go +++ b/ir/source_test.go @@ -52,7 +52,7 @@ func TestObjValueLookup(t *testing.T) { // Each note of the form @ir(x, "BinOp") in testdata/objlookup.go // specifies an expectation that an object named x declared on the // same line is associated with an an ir.Value of type *ir.BinOp. - notes, err := expect.Extract(conf.Fset, f) + notes, err := expect.ExtractGo(conf.Fset, f) if err != nil { t.Fatal(err) } @@ -273,7 +273,7 @@ func testValueForExpr(t *testing.T, testfile string) { return true }) - notes, err := expect.Extract(prog.Fset, f) + notes, err := expect.ExtractGo(prog.Fset, f) if err != nil { t.Fatal(err) } diff --git a/lint/testutil/util.go b/lint/testutil/util.go index c93210a44..b4ce68bec 100644 --- a/lint/testutil/util.go +++ b/lint/testutil/util.go @@ -8,8 +8,9 @@ import ( ) type Test struct { - Dir string - Version string + Dir string + Version string + SuggestedFixes bool } func Run(t *testing.T, analyzers map[string]*analysis.Analyzer, tests map[string][]Test) { @@ -27,7 +28,11 @@ func Run(t *testing.T, analyzers map[string]*analysis.Analyzer, tests map[string t.Fatal(err) } } - analysistest.Run(t, analysistest.TestData(), a, test.Dir) + if test.SuggestedFixes { + analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), a, test.Dir) + } else { + analysistest.Run(t, analysistest.TestData(), a, test.Dir) + } } }) } diff --git a/simple/lint.go b/simple/lint.go index 91571b6ee..5a9571deb 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -271,8 +271,8 @@ func CheckStringsContains(pass *analysis.Pass) (interface{}, error) { var ( checkBytesCompareQ = pattern.MustParse(`(BinaryExpr (CallExpr (Function "bytes.Compare") args) op@(Or "==" "!=") (BasicLit "INT" "0"))`) - checkBytesCompareRn = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "bytes") (Ident "Equal")) args)`) - checkBytesCompareRe = pattern.MustParse(`(UnaryExpr "!" (CallExpr (SelectorExpr (Ident "bytes") (Ident "Equal")) args))`) + checkBytesCompareRe = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "bytes") (Ident "Equal")) args)`) + checkBytesCompareRn = pattern.MustParse(`(UnaryExpr "!" (CallExpr (SelectorExpr (Ident "bytes") (Ident "Equal")) args))`) ) func CheckBytesCompare(pass *analysis.Pass) (interface{}, error) { diff --git a/simple/lint_test.go b/simple/lint_test.go index 2ea3238ea..365f883db 100644 --- a/simple/lint_test.go +++ b/simple/lint_test.go @@ -12,7 +12,7 @@ func TestAll(t *testing.T) { "S1001": {{Dir: "copy"}}, "S1002": {{Dir: "bool-cmp"}}, "S1003": {{Dir: "contains"}}, - "S1004": {{Dir: "compare"}}, + "S1004": {{Dir: "compare", SuggestedFixes: true}, {Dir: "compare_windows"}}, "S1005": {{Dir: "CheckBlankOK"}, {Dir: "receive-blank"}, {Dir: "range_go13", Version: "1.3"}, {Dir: "range_go14", Version: "1.4"}}, "S1006": {{Dir: "for-true"}, {Dir: "generated"}}, "S1007": {{Dir: "regexp-raw"}}, diff --git a/simple/testdata/src/compare/compare.go b/simple/testdata/src/compare/compare.go index 435191635..c5d16aa34 100644 --- a/simple/testdata/src/compare/compare.go +++ b/simple/testdata/src/compare/compare.go @@ -1,3 +1,5 @@ +// +build darwin linux + package pkg import "bytes" diff --git a/simple/testdata/src/compare/compare.go.golden b/simple/testdata/src/compare/compare.go.golden new file mode 100644 index 000000000..c9c8c056f --- /dev/null +++ b/simple/testdata/src/compare/compare.go.golden @@ -0,0 +1,12 @@ +// +build darwin linux + +package pkg + +import "bytes" + +func fn() { + _ = bytes.Equal(nil, nil) // want ` bytes.Equal` + _ = !bytes.Equal(nil, nil) // want `!bytes.Equal` + _ = bytes.Compare(nil, nil) > 0 + _ = bytes.Compare(nil, nil) < 0 +} diff --git a/simple/testdata/src/compare/dummy.go b/simple/testdata/src/compare/dummy.go new file mode 100644 index 000000000..71fe5d93b --- /dev/null +++ b/simple/testdata/src/compare/dummy.go @@ -0,0 +1,3 @@ +// This file exists just to make sure the package is still defined +// when tests DO NOT run on darwin/linux +package pkg diff --git a/simple/testdata/src/compare_windows/compare-windows.go b/simple/testdata/src/compare_windows/compare-windows.go new file mode 100644 index 000000000..4a2907c7f --- /dev/null +++ b/simple/testdata/src/compare_windows/compare-windows.go @@ -0,0 +1,12 @@ +// +build !linux,!darwin + +package pkg + +import "bytes" + +func fn() { + _ = bytes.Compare(nil, nil) == 0 // want ` bytes.Equal` + _ = bytes.Compare(nil, nil) != 0 // want `!bytes.Equal` + _ = bytes.Compare(nil, nil) > 0 + _ = bytes.Compare(nil, nil) < 0 +} diff --git a/simple/testdata/src/compare_windows/dummy.go b/simple/testdata/src/compare_windows/dummy.go new file mode 100644 index 000000000..08afc55a4 --- /dev/null +++ b/simple/testdata/src/compare_windows/dummy.go @@ -0,0 +1,3 @@ +// This file exists just to make sure the package is still defined +// when tests run on darwin/linux +package pkg From de90c50cde58aeb8f5c7c060910f57f7cbb68279 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 12 Apr 2020 11:44:16 +0200 Subject: [PATCH 012/111] simple: enable tests of suggested fixes on Windows --- .gitattributes | 1 + go.sum | 1 + simple/lint_test.go | 2 +- simple/testdata/src/compare/compare.go | 2 -- simple/testdata/src/compare/compare.go.golden | 2 -- simple/testdata/src/compare/dummy.go | 3 --- .../testdata/src/compare_windows/compare-windows.go | 12 ------------ simple/testdata/src/compare_windows/dummy.go | 3 --- 8 files changed, 3 insertions(+), 23 deletions(-) create mode 100644 .gitattributes delete mode 100644 simple/testdata/src/compare/dummy.go delete mode 100644 simple/testdata/src/compare_windows/compare-windows.go delete mode 100644 simple/testdata/src/compare_windows/dummy.go diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..6c929d480 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +*.golden -text diff --git a/go.sum b/go.sum index 9d95629a1..993aa27af 100644 --- a/go.sum +++ b/go.sum @@ -9,6 +9,7 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/rogpeppe/go-internal v1.3.0 h1:RR9dF3JtopPvtkroDZuVD7qquD0bnHlKSqaQhgwt8yk= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529 h1:iMGN4xG0cnqj3t+zOM8wUB0BiPKHEwSxEZCvzcbZuvk= diff --git a/simple/lint_test.go b/simple/lint_test.go index 365f883db..a4fadff47 100644 --- a/simple/lint_test.go +++ b/simple/lint_test.go @@ -12,7 +12,7 @@ func TestAll(t *testing.T) { "S1001": {{Dir: "copy"}}, "S1002": {{Dir: "bool-cmp"}}, "S1003": {{Dir: "contains"}}, - "S1004": {{Dir: "compare", SuggestedFixes: true}, {Dir: "compare_windows"}}, + "S1004": {{Dir: "compare", SuggestedFixes: true}}, "S1005": {{Dir: "CheckBlankOK"}, {Dir: "receive-blank"}, {Dir: "range_go13", Version: "1.3"}, {Dir: "range_go14", Version: "1.4"}}, "S1006": {{Dir: "for-true"}, {Dir: "generated"}}, "S1007": {{Dir: "regexp-raw"}}, diff --git a/simple/testdata/src/compare/compare.go b/simple/testdata/src/compare/compare.go index c5d16aa34..435191635 100644 --- a/simple/testdata/src/compare/compare.go +++ b/simple/testdata/src/compare/compare.go @@ -1,5 +1,3 @@ -// +build darwin linux - package pkg import "bytes" diff --git a/simple/testdata/src/compare/compare.go.golden b/simple/testdata/src/compare/compare.go.golden index c9c8c056f..203496d6a 100644 --- a/simple/testdata/src/compare/compare.go.golden +++ b/simple/testdata/src/compare/compare.go.golden @@ -1,5 +1,3 @@ -// +build darwin linux - package pkg import "bytes" diff --git a/simple/testdata/src/compare/dummy.go b/simple/testdata/src/compare/dummy.go deleted file mode 100644 index 71fe5d93b..000000000 --- a/simple/testdata/src/compare/dummy.go +++ /dev/null @@ -1,3 +0,0 @@ -// This file exists just to make sure the package is still defined -// when tests DO NOT run on darwin/linux -package pkg diff --git a/simple/testdata/src/compare_windows/compare-windows.go b/simple/testdata/src/compare_windows/compare-windows.go deleted file mode 100644 index 4a2907c7f..000000000 --- a/simple/testdata/src/compare_windows/compare-windows.go +++ /dev/null @@ -1,12 +0,0 @@ -// +build !linux,!darwin - -package pkg - -import "bytes" - -func fn() { - _ = bytes.Compare(nil, nil) == 0 // want ` bytes.Equal` - _ = bytes.Compare(nil, nil) != 0 // want `!bytes.Equal` - _ = bytes.Compare(nil, nil) > 0 - _ = bytes.Compare(nil, nil) < 0 -} diff --git a/simple/testdata/src/compare_windows/dummy.go b/simple/testdata/src/compare_windows/dummy.go deleted file mode 100644 index 08afc55a4..000000000 --- a/simple/testdata/src/compare_windows/dummy.go +++ /dev/null @@ -1,3 +0,0 @@ -// This file exists just to make sure the package is still defined -// when tests run on darwin/linux -package pkg From 7e758a3887f90d0ded5f0446112616758183866b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 22 Apr 2020 16:11:44 +0200 Subject: [PATCH 013/111] staticcheck: handle imports of deprecated github.com/golang/protobuf/proto specially github.com/golang/protobuf has deprecated the proto package, but their protoc-gen-go still imports the package and uses one of its constants, "to enforce a weak dependency on a sufficiently new version of the legacy package". Staticcheck would flag the import of this deprecated package in all code generated by protoc-gen-go. Instead of forcing the project to change their project structure, we choose to ignore such imports in code generated by protoc-gen-go. The import still gets flagged in code not generated by protoc-gen-go. Upstream issue: https://github.com/golang/protobuf/issues/1077 --- facts/generated.go | 3 +++ staticcheck/analysis.go | 2 +- staticcheck/lint.go | 6 ++++++ staticcheck/testdata/src/CheckDeprecated/not-protobuf.go | 3 +++ staticcheck/testdata/src/CheckDeprecated/protobuf.go | 5 +++++ .../testdata/src/github.com/golang/protobuf/proto/pkg.go | 4 ++++ 6 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 staticcheck/testdata/src/CheckDeprecated/not-protobuf.go create mode 100644 staticcheck/testdata/src/CheckDeprecated/protobuf.go create mode 100644 staticcheck/testdata/src/github.com/golang/protobuf/proto/pkg.go diff --git a/facts/generated.go b/facts/generated.go index f7f166734..058fd8922 100644 --- a/facts/generated.go +++ b/facts/generated.go @@ -19,6 +19,7 @@ const ( Goyacc Cgo Stringer + ProtocGenGo ) var ( @@ -55,6 +56,8 @@ func isGenerated(path string) (Generator, bool) { return Goyacc, true case "by cmd/cgo;": return Cgo, true + case "by protoc-gen-go.": + return ProtocGenGo, true } if strings.HasPrefix(text, `by "stringer `) { return Stringer, true diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index 75df1e120..6590312d2 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -66,7 +66,7 @@ var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer "SA1018": makeCallCheckerAnalyzer(checkStringsReplaceZeroRules), "SA1019": { Run: CheckDeprecated, - Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Deprecated}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Deprecated, facts.Generated}, }, "SA1020": makeCallCheckerAnalyzer(checkListenAddressRules), "SA1021": makeCallCheckerAnalyzer(checkBytesEqualIPRules), diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 430e08787..12e83f6c4 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -2931,6 +2931,12 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { p := spec.Path.Value path := p[1 : len(p)-1] if depr, ok := deprs.Packages[imp]; ok { + if path == "github.com/golang/protobuf/proto" { + gen, ok := code.Generator(pass, spec.Path.Pos()) + if ok && gen == facts.ProtocGenGo { + return + } + } report.Report(pass, spec, fmt.Sprintf("package %s is deprecated: %s", path, depr.Msg)) } } diff --git a/staticcheck/testdata/src/CheckDeprecated/not-protobuf.go b/staticcheck/testdata/src/CheckDeprecated/not-protobuf.go new file mode 100644 index 000000000..9d94a3ac7 --- /dev/null +++ b/staticcheck/testdata/src/CheckDeprecated/not-protobuf.go @@ -0,0 +1,3 @@ +package pkg + +import _ "github.com/golang/protobuf/proto" // want `Alas, it is deprecated\.` diff --git a/staticcheck/testdata/src/CheckDeprecated/protobuf.go b/staticcheck/testdata/src/CheckDeprecated/protobuf.go new file mode 100644 index 000000000..be3c04ff2 --- /dev/null +++ b/staticcheck/testdata/src/CheckDeprecated/protobuf.go @@ -0,0 +1,5 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. + +package pkg + +import _ "github.com/golang/protobuf/proto" diff --git a/staticcheck/testdata/src/github.com/golang/protobuf/proto/pkg.go b/staticcheck/testdata/src/github.com/golang/protobuf/proto/pkg.go new file mode 100644 index 000000000..1d0160b61 --- /dev/null +++ b/staticcheck/testdata/src/github.com/golang/protobuf/proto/pkg.go @@ -0,0 +1,4 @@ +// Package proto exists. +// +// Deprecated: Alas, it is deprecated. +package proto From cd1ad299aeab34165b1db4678cc0e68b1e77e523 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 13 Apr 2020 11:52:08 +0200 Subject: [PATCH 014/111] Add tests for all suggested fixes --- go.mod | 2 +- go.sum | 6 ++ lint/testutil/util.go | 11 +-- simple/lint_test.go | 2 +- .../src/CheckBlankOK/LintBlankOK.go.golden | 12 +++ .../LintBytesBufferConversions.go.golden | 20 +++++ .../LintDeclareAssign.go.golden | 33 ++++++++ .../LintElaborateSleep.go.golden | 26 ++++++ .../LintErrorsNewSprintf.go.golden | 12 +++ .../LintGuardedDelete.go.golden | 40 +++++++++ .../CheckLoopSlide/LintLoopSlide.go.golden | 21 +++++ .../LintRedundantCanonicalHeaderKey.go.golden | 35 ++++++++ .../LintRedundantSprintf.go.golden | 34 ++++++++ .../LintSimplifyTypeSwitch.go.golden | 33 ++++++++ .../CheckSprintLiteral/CheckSprintLiteral.go | 10 +-- .../CheckSprintLiteral.go.golden | 13 +++ .../LimeTimeUntil_go18.go.golden | 10 +++ .../LintUnnecessaryGuard.go.golden | 47 +++++++++++ .../testdata/src/bool-cmp/bool-cmp.go.golden | 55 ++++++++++++ .../testdata/src/contains/contains.go.golden | 39 +++++++++ simple/testdata/src/convert/convert.go.golden | 32 +++++++ .../src/convert_go17/convert.go.golden | 22 +++++ .../src/convert_go18/convert.go.golden | 22 +++++ simple/testdata/src/copy/copy.go.golden | 24 ++++++ .../src/loop-append/loop-append.go.golden | 76 +++++++++++++++++ .../src/range_go14/range_go14.go.golden | 40 +++++++++ .../src/receive-blank/receive-blank.go.golden | 15 ++++ simple/testdata/src/slicing/slicing.go | 4 +- simple/testdata/src/slicing/slicing.go.golden | 9 ++ .../src/time-since/time-since.go.golden | 9 ++ staticcheck/lint.go | 3 +- .../CheckCanonicalHeaderKey.go | 2 + .../CheckCanonicalHeaderKey.go.golden | 16 ++++ .../CheckDoubleNegation.go.golden | 49 +++++++++++ .../CheckLoopEmptyDefault.go.golden | 30 +++++++ ...eckMissingEnumTypesInDeclaration.go.golden | 68 +++++++++++++++ .../CheckNonOctalFileMode.go.golden | 7 ++ .../CheckTimeSleepConstant.go.golden | 35 ++++++++ .../CheckToLowerToUpperComparison.go.golden | 43 ++++++++++ .../CheckUnsafePrintf.go.golden | 22 +++++ .../#CheckUntrappableSignal.go.golden# | 83 +++++++++++++++++++ .../.#CheckUntrappableSignal.go.golden | 1 + .../CheckUntrappableSignal.go.golden | 83 +++++++++++++++++++ .../CheckUntrappableSignal_unix.go.golden | 16 ++++ .../checkStdlibUsageNilContext.go.golden | 24 ++++++ .../checkStdlibUsageSeeker.go.golden | 12 +++ .../CheckHTTPStatusCodes.go.golden | 21 +++++ .../CheckInvisibleCharacters.go.golden | 12 +++ .../CheckYodaConditions.go.golden | 16 ++++ 49 files changed, 1238 insertions(+), 19 deletions(-) create mode 100644 simple/testdata/src/CheckBlankOK/LintBlankOK.go.golden create mode 100644 simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go.golden create mode 100644 simple/testdata/src/CheckDeclareAssign/LintDeclareAssign.go.golden create mode 100644 simple/testdata/src/CheckElaborateSleep/LintElaborateSleep.go.golden create mode 100644 simple/testdata/src/CheckErrorsNewSprintf/LintErrorsNewSprintf.go.golden create mode 100644 simple/testdata/src/CheckGuardedDelete/LintGuardedDelete.go.golden create mode 100644 simple/testdata/src/CheckLoopSlide/LintLoopSlide.go.golden create mode 100644 simple/testdata/src/CheckRedundantCanonicalHeaderKey/LintRedundantCanonicalHeaderKey.go.golden create mode 100644 simple/testdata/src/CheckRedundantSprintf/LintRedundantSprintf.go.golden create mode 100644 simple/testdata/src/CheckSimplifyTypeSwitch/LintSimplifyTypeSwitch.go.golden create mode 100644 simple/testdata/src/CheckSprintLiteral/CheckSprintLiteral.go.golden create mode 100644 simple/testdata/src/CheckTimeUntil_go18/LimeTimeUntil_go18.go.golden create mode 100644 simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go.golden create mode 100644 simple/testdata/src/bool-cmp/bool-cmp.go.golden create mode 100644 simple/testdata/src/contains/contains.go.golden create mode 100644 simple/testdata/src/convert/convert.go.golden create mode 100644 simple/testdata/src/convert_go17/convert.go.golden create mode 100644 simple/testdata/src/convert_go18/convert.go.golden create mode 100644 simple/testdata/src/copy/copy.go.golden create mode 100644 simple/testdata/src/loop-append/loop-append.go.golden create mode 100644 simple/testdata/src/range_go14/range_go14.go.golden create mode 100644 simple/testdata/src/receive-blank/receive-blank.go.golden create mode 100644 simple/testdata/src/slicing/slicing.go.golden create mode 100644 simple/testdata/src/time-since/time-since.go.golden create mode 100644 staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go.golden create mode 100644 staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go.golden create mode 100644 staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go.golden create mode 100644 staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden create mode 100644 staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go.golden create mode 100644 staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go.golden create mode 100644 staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go.golden create mode 100644 staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go.golden create mode 100644 staticcheck/testdata/src/CheckUntrappableSignal/#CheckUntrappableSignal.go.golden# create mode 120000 staticcheck/testdata/src/CheckUntrappableSignal/.#CheckUntrappableSignal.go.golden create mode 100644 staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go.golden create mode 100644 staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal_unix.go.golden create mode 100644 staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go.golden create mode 100644 staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go.golden create mode 100644 stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go.golden create mode 100644 stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go.golden create mode 100644 stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go.golden diff --git a/go.mod b/go.mod index 5a61c1078..3b3c8b956 100644 --- a/go.mod +++ b/go.mod @@ -8,5 +8,5 @@ require ( github.com/kisielk/gotool v1.0.0 github.com/rogpeppe/go-internal v1.3.0 golang.org/x/mod v0.2.0 - golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef + golang.org/x/tools v0.0.0-20200427214658-4697a2867c88 ) diff --git a/go.sum b/go.sum index 993aa27af..6e1c076f7 100644 --- a/go.sum +++ b/go.sum @@ -36,6 +36,12 @@ golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d h1:/iIZNFGxc/a7C3yWjGcnboV golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef h1:RHORRhs540cYZYrzgU2CPUyykkwZM78hGdzocOo9P8A= golang.org/x/tools v0.0.0-20200410194907-79a7a3126eef/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200414001008-ae52e4b55789 h1:XIwSFCdbDLy0ZuF/kFMSCWxxlGKE7SYIQF8heqDkFc0= +golang.org/x/tools v0.0.0-20200414001008-ae52e4b55789/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e h1:3Dzrrxi54Io7Aoyb0PYLsI47K2TxkRQg+cqUn+m04do= +golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200427214658-4697a2867c88 h1:Nj7oNnL9tSACMt2JvszZN6P4IXiy1t6E/YRMr7YtaSw= +golang.org/x/tools v0.0.0-20200427214658-4697a2867c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= diff --git a/lint/testutil/util.go b/lint/testutil/util.go index b4ce68bec..999921bd5 100644 --- a/lint/testutil/util.go +++ b/lint/testutil/util.go @@ -8,9 +8,8 @@ import ( ) type Test struct { - Dir string - Version string - SuggestedFixes bool + Dir string + Version string } func Run(t *testing.T, analyzers map[string]*analysis.Analyzer, tests map[string][]Test) { @@ -28,11 +27,7 @@ func Run(t *testing.T, analyzers map[string]*analysis.Analyzer, tests map[string t.Fatal(err) } } - if test.SuggestedFixes { - analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), a, test.Dir) - } else { - analysistest.Run(t, analysistest.TestData(), a, test.Dir) - } + analysistest.RunWithSuggestedFixes(t, analysistest.TestData(), a, test.Dir) } }) } diff --git a/simple/lint_test.go b/simple/lint_test.go index a4fadff47..2ea3238ea 100644 --- a/simple/lint_test.go +++ b/simple/lint_test.go @@ -12,7 +12,7 @@ func TestAll(t *testing.T) { "S1001": {{Dir: "copy"}}, "S1002": {{Dir: "bool-cmp"}}, "S1003": {{Dir: "contains"}}, - "S1004": {{Dir: "compare", SuggestedFixes: true}}, + "S1004": {{Dir: "compare"}}, "S1005": {{Dir: "CheckBlankOK"}, {Dir: "receive-blank"}, {Dir: "range_go13", Version: "1.3"}, {Dir: "range_go14", Version: "1.4"}}, "S1006": {{Dir: "for-true"}, {Dir: "generated"}}, "S1007": {{Dir: "regexp-raw"}}, diff --git a/simple/testdata/src/CheckBlankOK/LintBlankOK.go.golden b/simple/testdata/src/CheckBlankOK/LintBlankOK.go.golden new file mode 100644 index 000000000..6d46c88d5 --- /dev/null +++ b/simple/testdata/src/CheckBlankOK/LintBlankOK.go.golden @@ -0,0 +1,12 @@ +package pkg + +func fn() { + var m map[int]int + var ch chan int + var fn func() (int, bool) + + x := m[0] // want `unnecessary assignment to the blank identifier` + x = <-ch // want `unnecessary assignment to the blank identifier` + x, _ = fn() + _ = x +} diff --git a/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go.golden b/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go.golden new file mode 100644 index 000000000..7c2be7db5 --- /dev/null +++ b/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go.golden @@ -0,0 +1,20 @@ +package pkg + +import ( + "bytes" +) + +func fn() { + buf := bytes.NewBufferString("str") + _ = buf.String() // want `should use buf\.String\(\) instead of string\(buf\.Bytes\(\)\)` + _ = buf.Bytes() // want `should use buf\.Bytes\(\) instead of \[\]byte\(buf\.String\(\)\)` + + m := map[string]*bytes.Buffer{"key": buf} + _ = m["key"].String() // want `should use m\["key"\]\.String\(\) instead of string\(m\["key"\]\.Bytes\(\)\)` + _ = m["key"].Bytes() // want `should use m\["key"\]\.Bytes\(\) instead of \[\]byte\(m\["key"\]\.String\(\)\)` + + string := func(_ interface{}) interface{} { + return nil + } + _ = string(m["key"].Bytes()) +} diff --git a/simple/testdata/src/CheckDeclareAssign/LintDeclareAssign.go.golden b/simple/testdata/src/CheckDeclareAssign/LintDeclareAssign.go.golden new file mode 100644 index 000000000..4617e281a --- /dev/null +++ b/simple/testdata/src/CheckDeclareAssign/LintDeclareAssign.go.golden @@ -0,0 +1,33 @@ +package pkg + +func fn() { + var x int = 1 + _ = x + + var y interface{} = 1 + _ = y + + if true { + var x string = "" + _ = x + } + + var z []string + z = append(z, "") + _ = z + + var f func() + f = func() { f() } + _ = f + + var a int + a = 1 + a = 2 + _ = a + + var b int + b = 1 + // do stuff + b = 2 + _ = b +} diff --git a/simple/testdata/src/CheckElaborateSleep/LintElaborateSleep.go.golden b/simple/testdata/src/CheckElaborateSleep/LintElaborateSleep.go.golden new file mode 100644 index 000000000..b838904e8 --- /dev/null +++ b/simple/testdata/src/CheckElaborateSleep/LintElaborateSleep.go.golden @@ -0,0 +1,26 @@ +package pkg + +import ( + "fmt" + "time" +) + +func fn() { + time.Sleep(0) + + time.Sleep(0) + + select { // want `should use time.Sleep` + case <-time.After(0): + fmt.Println("yay") + } + + const d = 0 + time.Sleep(d) + + var ch chan int + select { + case <-time.After(0): + case <-ch: + } +} diff --git a/simple/testdata/src/CheckErrorsNewSprintf/LintErrorsNewSprintf.go.golden b/simple/testdata/src/CheckErrorsNewSprintf/LintErrorsNewSprintf.go.golden new file mode 100644 index 000000000..74756fe12 --- /dev/null +++ b/simple/testdata/src/CheckErrorsNewSprintf/LintErrorsNewSprintf.go.golden @@ -0,0 +1,12 @@ +package pkg + +import ( + "errors" + "fmt" +) + +func fn() { + _ = fmt.Errorf("%d", 0) + _ = errors.New("") + _ = fmt.Errorf("%d", 0) // want `should use fmt\.Errorf` +} diff --git a/simple/testdata/src/CheckGuardedDelete/LintGuardedDelete.go.golden b/simple/testdata/src/CheckGuardedDelete/LintGuardedDelete.go.golden new file mode 100644 index 000000000..2fcac5f6e --- /dev/null +++ b/simple/testdata/src/CheckGuardedDelete/LintGuardedDelete.go.golden @@ -0,0 +1,40 @@ +// Package pkg ... +package pkg + +func fn(m map[int]int) { + delete(m, 0) + if _, ok := m[0]; !ok { + delete(m, 0) + } + if _, ok := m[0]; ok { + println("deleting") + delete(m, 0) + } + if v, ok := m[0]; ok && v > 0 { + delete(m, 0) + } + + var key int + delete(m, key) + if _, ok := m[key]; ok { + delete(m, 0) + } + if _, ok := m[key]; ok { + delete(m, key) + } else { + println("not deleted") + } + + var ok bool + if _, ok = m[key]; ok { + delete(m, 0) + } + if ok { + println("deleted") + } + + delete := func(a, b interface{}) {} + if _, ok := m[0]; ok { + delete(m, 0) + } +} diff --git a/simple/testdata/src/CheckLoopSlide/LintLoopSlide.go.golden b/simple/testdata/src/CheckLoopSlide/LintLoopSlide.go.golden new file mode 100644 index 000000000..b63e3b80d --- /dev/null +++ b/simple/testdata/src/CheckLoopSlide/LintLoopSlide.go.golden @@ -0,0 +1,21 @@ +package pkg + +func fn() { + var n int + var bs []int + var offset int + + copy(bs[:n], bs[offset:]) + + for i := 1; i < n; i++ { // not currently supported + bs[i] = bs[offset+i] + } + + for i := 1; i < n; i++ { // not currently supported + bs[i] = bs[i+offset] + } + + for i := 0; i <= n; i++ { + bs[i] = bs[offset+i] + } +} diff --git a/simple/testdata/src/CheckRedundantCanonicalHeaderKey/LintRedundantCanonicalHeaderKey.go.golden b/simple/testdata/src/CheckRedundantCanonicalHeaderKey/LintRedundantCanonicalHeaderKey.go.golden new file mode 100644 index 000000000..662d35704 --- /dev/null +++ b/simple/testdata/src/CheckRedundantCanonicalHeaderKey/LintRedundantCanonicalHeaderKey.go.golden @@ -0,0 +1,35 @@ +package pkg + +import ( + "net/http" + "strings" +) + +func fn1() { + var headers http.Header + + // Matches + headers.Add("test", "test") // want `calling net/http.CanonicalHeaderKey on the 'key' argument of` + headers.Del("test") // want `calling net/http.CanonicalHeaderKey on the 'key' argument of` + headers.Get("test") // want `calling net/http.CanonicalHeaderKey on the 'key' argument of` + headers.Set("test", "test") // want `calling net/http.CanonicalHeaderKey on the 'key' argument of` + + // Non-matches + headers.Add("test", "test") + headers.Del("test") + headers.Get("test") + headers.Set("test", "test") + + headers.Add("test", http.CanonicalHeaderKey("test")) + headers.Set("test", http.CanonicalHeaderKey("test")) + + headers.Add(http.CanonicalHeaderKey("test")+"1", "test") + headers.Del(http.CanonicalHeaderKey("test") + "1") + headers.Get(http.CanonicalHeaderKey("test") + "1") + headers.Set(http.CanonicalHeaderKey("test")+"1", "test") + + headers.Add(strings.ToUpper(http.CanonicalHeaderKey("test")), "test") + headers.Del(strings.ToUpper(http.CanonicalHeaderKey("test"))) + headers.Get(strings.ToUpper(http.CanonicalHeaderKey("test"))) + headers.Set(strings.ToUpper(http.CanonicalHeaderKey("test")), "test") +} diff --git a/simple/testdata/src/CheckRedundantSprintf/LintRedundantSprintf.go.golden b/simple/testdata/src/CheckRedundantSprintf/LintRedundantSprintf.go.golden new file mode 100644 index 000000000..9df9e44ef --- /dev/null +++ b/simple/testdata/src/CheckRedundantSprintf/LintRedundantSprintf.go.golden @@ -0,0 +1,34 @@ +package pkg + +import "fmt" + +type T1 string +type T2 T1 +type T3 int +type T4 int +type T5 int +type T6 string + +func (T3) String() string { return "" } +func (T6) String() string { return "" } +func (T4) String(arg int) string { return "" } +func (T5) String() {} + +func fn() { + var t1 T1 + var t2 T2 + var t3 T3 + var t4 T4 + var t5 T5 + var t6 T6 + _ = "test" // want `is already a string` + _ = string(t1) // want `is a string` + _ = string(t2) // want `is a string` + _ = t3.String() // want `should use String\(\) instead of fmt\.Sprintf` + _ = t3.String() // want `is already a string` + _ = fmt.Sprintf("%s", t4) + _ = fmt.Sprintf("%s", t5) + _ = fmt.Sprintf("%s %s", t1, t2) + _ = fmt.Sprintf("%v", t1) + _ = t6.String() // want `should use String\(\) instead of fmt\.Sprintf` +} diff --git a/simple/testdata/src/CheckSimplifyTypeSwitch/LintSimplifyTypeSwitch.go.golden b/simple/testdata/src/CheckSimplifyTypeSwitch/LintSimplifyTypeSwitch.go.golden new file mode 100644 index 000000000..4bb782142 --- /dev/null +++ b/simple/testdata/src/CheckSimplifyTypeSwitch/LintSimplifyTypeSwitch.go.golden @@ -0,0 +1,33 @@ +package pkg + +import "fmt" + +func gen() interface{} { return nil } + +func fn(x, y interface{}) { + switch z := x.(type) { + case int: + _ = z + fmt.Println(x.(int)) + } + switch x.(type) { + case int: + fmt.Println(x.(int), y.(int)) + } + switch x := x.(type) { // want `assigning the result of this type assertion` + case int: + fmt.Println(x) + } + switch x.(type) { + case int: + fmt.Println(x.(string)) + } + switch x.(type) { + case int: + fmt.Println(y.(int)) + } + switch (gen()).(type) { + case int: + fmt.Println((gen()).(int)) + } +} diff --git a/simple/testdata/src/CheckSprintLiteral/CheckSprintLiteral.go b/simple/testdata/src/CheckSprintLiteral/CheckSprintLiteral.go index 4b3244b6a..a3dd2726b 100644 --- a/simple/testdata/src/CheckSprintLiteral/CheckSprintLiteral.go +++ b/simple/testdata/src/CheckSprintLiteral/CheckSprintLiteral.go @@ -3,11 +3,11 @@ package pkg import "fmt" func fn() { - fmt.Sprint("foo") // want `unnecessary use of fmt\.Sprint` - fmt.Sprintf("foo") // want `unnecessary use of fmt\.Sprintf` - fmt.Sprintf("foo %d") - fmt.Sprintf("foo %d", 1) + _ = fmt.Sprint("foo") // want `unnecessary use of fmt\.Sprint` + _ = fmt.Sprintf("foo") // want `unnecessary use of fmt\.Sprintf` + _ = fmt.Sprintf("foo %d") + _ = fmt.Sprintf("foo %d", 1) var x string - fmt.Sprint(x) + _ = fmt.Sprint(x) } diff --git a/simple/testdata/src/CheckSprintLiteral/CheckSprintLiteral.go.golden b/simple/testdata/src/CheckSprintLiteral/CheckSprintLiteral.go.golden new file mode 100644 index 000000000..0df84d541 --- /dev/null +++ b/simple/testdata/src/CheckSprintLiteral/CheckSprintLiteral.go.golden @@ -0,0 +1,13 @@ +package pkg + +import "fmt" + +func fn() { + _ = "foo" // want `unnecessary use of fmt\.Sprint` + _ = "foo" // want `unnecessary use of fmt\.Sprintf` + _ = fmt.Sprintf("foo %d") + _ = fmt.Sprintf("foo %d", 1) + + var x string + _ = fmt.Sprint(x) +} diff --git a/simple/testdata/src/CheckTimeUntil_go18/LimeTimeUntil_go18.go.golden b/simple/testdata/src/CheckTimeUntil_go18/LimeTimeUntil_go18.go.golden new file mode 100644 index 000000000..99f9513e0 --- /dev/null +++ b/simple/testdata/src/CheckTimeUntil_go18/LimeTimeUntil_go18.go.golden @@ -0,0 +1,10 @@ +package pkg + +import "time" + +func fn(t time.Time) { + time.Until(t) // want `time\.Until` + t.Sub(t) + t2 := time.Now() + t.Sub(t2) +} diff --git a/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go.golden b/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go.golden new file mode 100644 index 000000000..85446b042 --- /dev/null +++ b/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go.golden @@ -0,0 +1,47 @@ +package pkg + +func fn() { + var m = map[string][]string{} + + m["k1"] = append(m["k1"], "v1", "v2") + + if _, ok := m["k1"]; ok { + m["k1"] = append(m["k1"], "v1", "v2") + } else { + m["k1"] = []string{"v1"} + } + + if _, ok := m["k1"]; ok { + m["k2"] = append(m["k2"], "v1") + } else { + m["k1"] = []string{"v1"} + } + + k1 := "key" + m[k1] = append(m[k1], "v1", "v2") + + // ellipsis is not currently supported + v := []string{"v1", "v2"} + if _, ok := m["k1"]; ok { + m["k1"] = append(m["k1"], v...) + } else { + m["k1"] = v + } + + var m2 map[string]int + m2["k"] += 4 + + if _, ok := m2["k"]; ok { + m2["k"] += 4 + } else { + m2["k"] = 3 + } + + m2["k"]++ + + if _, ok := m2["k"]; ok { + m2["k"] -= 1 + } else { + m2["k"] = 1 + } +} diff --git a/simple/testdata/src/bool-cmp/bool-cmp.go.golden b/simple/testdata/src/bool-cmp/bool-cmp.go.golden new file mode 100644 index 000000000..f0045f337 --- /dev/null +++ b/simple/testdata/src/bool-cmp/bool-cmp.go.golden @@ -0,0 +1,55 @@ +package pkg + +func fn1() bool { return false } +func fn2() bool { return false } + +func fn() { + type T bool + var x T + const t T = false + if x == t { + } + if fn1() { // want `simplified to fn1\(\)` + } + if !fn1() { // want `simplified to !fn1\(\)` + } + if !fn1() { // want `simplified to !fn1\(\)` + } + if fn1() { // want `simplified to fn1\(\)` + } + if fn1() && (fn1() || fn1()) || (fn1() && fn1()) { // want `simplified to \(fn1\(\) && fn1\(\)\)` + } + + if !(fn1() && fn2()) { // want `simplified to !\(fn1\(\) && fn2\(\)\)` + } + + var y bool + for !y { // want `simplified to !y` + } + if !y { // want `simplified to !y` + } + if y { // want `simplified to y` + } + if y { // want `simplified to y` + } + if !y { // want `simplified to !y` + } + if !y { // want `simplified to !y` + } + if y { // want `simplified to y` + } + if y { // want `simplified to y` + } + if !y { // want `simplified to !y` + } + if !y { // want `simplified to !y` + } + if y { // want `simplified to y` + } + if !y == !false { // not matched because we expect true/false on one side, not !false + } + + var z interface{} + if z == true { + } +} diff --git a/simple/testdata/src/contains/contains.go.golden b/simple/testdata/src/contains/contains.go.golden new file mode 100644 index 000000000..a02d1045a --- /dev/null +++ b/simple/testdata/src/contains/contains.go.golden @@ -0,0 +1,39 @@ +package pkg + +import ( + "bytes" + "strings" +) + +func fn() { + _ = strings.ContainsRune("", 'x') // want ` strings\.ContainsRune` + _ = strings.ContainsRune("", 'x') // want ` strings\.ContainsRune` + _ = strings.IndexRune("", 'x') > 0 + _ = strings.IndexRune("", 'x') >= -1 + _ = strings.ContainsRune("", 'x') // want ` strings\.ContainsRune` + _ = !strings.ContainsRune("", 'x') // want `!strings\.ContainsRune` + _ = strings.IndexRune("", 'x') != 0 + _ = !strings.ContainsRune("", 'x') // want `!strings\.ContainsRune` + + _ = strings.ContainsAny("", "") // want ` strings\.ContainsAny` + _ = strings.ContainsAny("", "") // want ` strings\.ContainsAny` + _ = strings.IndexAny("", "") > 0 + _ = strings.IndexAny("", "") >= -1 + _ = strings.ContainsAny("", "") // want ` strings\.ContainsAny` + _ = !strings.ContainsAny("", "") // want `!strings\.ContainsAny` + _ = strings.IndexAny("", "") != 0 + _ = !strings.ContainsAny("", "") // want `!strings\.ContainsAny` + + _ = strings.Contains("", "") // want ` strings\.Contains` + _ = strings.Contains("", "") // want ` strings\.Contains` + _ = strings.Index("", "") > 0 + _ = strings.Index("", "") >= -1 + _ = strings.Contains("", "") // want ` strings\.Contains` + _ = !strings.Contains("", "") // want `!strings\.Contains` + _ = strings.Index("", "") != 0 + _ = !strings.Contains("", "") // want `!strings\.Contains` + + _ = bytes.ContainsRune(nil, 'x') // want ` bytes\.ContainsRune` + _ = bytes.ContainsAny(nil, "") // want ` bytes\.ContainsAny` + _ = bytes.Contains(nil, nil) // want ` bytes\.Contains` +} diff --git a/simple/testdata/src/convert/convert.go.golden b/simple/testdata/src/convert/convert.go.golden new file mode 100644 index 000000000..d2e8b1cbc --- /dev/null +++ b/simple/testdata/src/convert/convert.go.golden @@ -0,0 +1,32 @@ +package pkg + +type t1 struct { + a int + b int +} + +type t2 struct { + a int + b int +} + +type t3 t1 + +func fn() { + v1 := t1{1, 2} + v2 := t2{1, 2} + _ = t2(v1) // want `should convert v1` + _ = t2(v1) // want `should convert v1` + _ = t2(v1) // want `should convert v1` + _ = t3(v1) // want `should convert v1` + + _ = t3{v1.a, v2.b} + + _ = t2{v1.b, v1.a} + _ = t2{a: v1.b, b: v1.a} + _ = t2{a: v1.a} + _ = t1{v1.a, v1.b} + + v := t1{1, 2} + _ = &t2{v.a, v.b} +} diff --git a/simple/testdata/src/convert_go17/convert.go.golden b/simple/testdata/src/convert_go17/convert.go.golden new file mode 100644 index 000000000..96515d000 --- /dev/null +++ b/simple/testdata/src/convert_go17/convert.go.golden @@ -0,0 +1,22 @@ +package pkg + +type t1 struct { + a int + b int +} + +type t2 struct { + a int + b int +} + +type t3 struct { + a int `tag` + b int `tag` +} + +func fn() { + v1 := t1{1, 2} + _ = t2(v1) // want `should convert v1` + _ = t3{v1.a, v1.b} +} diff --git a/simple/testdata/src/convert_go18/convert.go.golden b/simple/testdata/src/convert_go18/convert.go.golden new file mode 100644 index 000000000..5c71e19ab --- /dev/null +++ b/simple/testdata/src/convert_go18/convert.go.golden @@ -0,0 +1,22 @@ +package pkg + +type t1 struct { + a int + b int +} + +type t2 struct { + a int + b int +} + +type t3 struct { + a int `tag` + b int `tag` +} + +func fn() { + v1 := t1{1, 2} + _ = t2(v1) // want `should convert v1` + _ = t3(v1) // want `should convert v1` +} diff --git a/simple/testdata/src/copy/copy.go.golden b/simple/testdata/src/copy/copy.go.golden new file mode 100644 index 000000000..9cea76f73 --- /dev/null +++ b/simple/testdata/src/copy/copy.go.golden @@ -0,0 +1,24 @@ +package pkg + +func fn() { + var b1, b2 []byte + copy(b2, b1) + + copy(b2, b1) + + type T [][16]byte + var a T + b := make([]interface{}, len(a)) + for i := range b { + b[i] = a[i] + } + + var b3, b4 []*byte + copy(b4, b3) + + var m map[int]byte + for i, v := range b1 { + m[i] = v + } + +} diff --git a/simple/testdata/src/loop-append/loop-append.go.golden b/simple/testdata/src/loop-append/loop-append.go.golden new file mode 100644 index 000000000..ee815c7b7 --- /dev/null +++ b/simple/testdata/src/loop-append/loop-append.go.golden @@ -0,0 +1,76 @@ +package pkg + +type T struct { + F string +} + +func fn1() { + var x []interface{} + var y []int + + for _, v := range y { + x = append(x, v) + } + + var a, b []int + b = append(b, a...) + + var m map[string]int + var c []int + for _, v := range m { + c = append(c, v) + } + + var t []T + var m2 map[string][]T + + for _, tt := range t { + m2[tt.F] = append(m2[tt.F], tt) + } + + var out []T + for _, tt := range t { + out = append(m2[tt.F], tt) + } + _ = out +} + +func fn2() { + var v struct { + V int + } + var in []int + var out []int + + for _, v.V = range in { + out = append(out, v.V) + } +} + +func fn3() { + var t []T + var out [][]T + var m2 map[string][]T + + for _, tt := range t { + out = append(out, m2[tt.F]) + } +} + +func fn4() { + var a, b, c []int + for _, v := range a { + b = append(c, v) + } + _ = b +} + +func fn5() { + var t []T + var m2 map[string][]T + var out []T + for _, tt := range t { + out = append(m2[tt.F], tt) + } + _ = out +} diff --git a/simple/testdata/src/range_go14/range_go14.go.golden b/simple/testdata/src/range_go14/range_go14.go.golden new file mode 100644 index 000000000..23f7dea2e --- /dev/null +++ b/simple/testdata/src/range_go14/range_go14.go.golden @@ -0,0 +1,40 @@ +package pkg + +func fn() { + var m map[string]int + + // with := + for x := range m { // want `unnecessary assignment to the blank identifier` + _ = x + } + // with = + var y string + _ = y + for y = range m { // want `unnecessary assignment to the blank identifier` + } + + for range m { // want `unnecessary assignment to the blank identifier` + } + + for range m { // want `unnecessary assignment to the blank identifier` + } + + // all OK: + for x := range m { + _ = x + } + for x, y := range m { + _, _ = x, y + } + for _, y := range m { + _ = y + } + var x int + _ = x + for y = range m { + } + for y, x = range m { + } + for _, x = range m { + } +} diff --git a/simple/testdata/src/receive-blank/receive-blank.go.golden b/simple/testdata/src/receive-blank/receive-blank.go.golden new file mode 100644 index 000000000..b729b009b --- /dev/null +++ b/simple/testdata/src/receive-blank/receive-blank.go.golden @@ -0,0 +1,15 @@ +package pkg + +func fn() { + var ch chan int + <-ch + <-ch // want `unnecessary assignment to the blank identifier` + select { + case <-ch: + case <-ch: // want `unnecessary assignment to the blank identifier` + } + x := <-ch + y, _ := <-ch, <-ch + _, z := <-ch, <-ch + _, _, _ = x, y, z +} diff --git a/simple/testdata/src/slicing/slicing.go b/simple/testdata/src/slicing/slicing.go index a1de03d24..72f0517fa 100644 --- a/simple/testdata/src/slicing/slicing.go +++ b/simple/testdata/src/slicing/slicing.go @@ -1,9 +1,9 @@ package pkg func fn() { - var s []int + var s [5]int _ = s[:len(s)] // want `omit second index` - len := func(s []int) int { return -1 } + len := func(s [5]int) int { return -1 } _ = s[:len(s)] } diff --git a/simple/testdata/src/slicing/slicing.go.golden b/simple/testdata/src/slicing/slicing.go.golden new file mode 100644 index 000000000..47073fd29 --- /dev/null +++ b/simple/testdata/src/slicing/slicing.go.golden @@ -0,0 +1,9 @@ +package pkg + +func fn() { + var s [5]int + _ = s[:] // want `omit second index` + + len := func(s [5]int) int { return -1 } + _ = s[:len(s)] +} diff --git a/simple/testdata/src/time-since/time-since.go.golden b/simple/testdata/src/time-since/time-since.go.golden new file mode 100644 index 000000000..1b7dc55c7 --- /dev/null +++ b/simple/testdata/src/time-since/time-since.go.golden @@ -0,0 +1,9 @@ +package pkg + +import "time" + +func fn() { + t1 := time.Now() + _ = time.Since(t1) // want `time\.Since` + _ = time.Date(0, 0, 0, 0, 0, 0, 0, nil).Sub(t1) +} diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 12e83f6c4..7d7c5021d 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -1536,8 +1536,7 @@ func CheckCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) { if !push { return false } - assign, ok := node.(*ast.AssignStmt) - if ok { + if assign, ok := node.(*ast.AssignStmt); ok { // TODO(dh): This risks missing some Header reads, for // example in `h1["foo"] = h2["foo"]` – these edge // cases are probably rare enough to ignore for now. diff --git a/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go b/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go index 8f4bffe6f..6be8a4344 100644 --- a/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go +++ b/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go @@ -3,10 +3,12 @@ package pkg import "net/http" func fn() { + const hdr = "foo" var r http.Request h := http.Header{} var m map[string][]string _ = h["foo"] // want `keys in http\.Header are canonicalized` + _ = h[hdr] // want `keys in http\.Header are canonicalized` h["foo"] = nil _ = r.Header["foo"] // want `keys in http\.Header are canonicalized` r.Header["foo"] = nil diff --git a/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go.golden b/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go.golden new file mode 100644 index 000000000..ae7e7a737 --- /dev/null +++ b/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go.golden @@ -0,0 +1,16 @@ +package pkg + +import "net/http" + +func fn() { + const hdr = "foo" + var r http.Request + h := http.Header{} + var m map[string][]string + _ = h["Foo"] // want `keys in http\.Header are canonicalized` + _ = h[http.CanonicalHeaderKey(hdr)] // want `keys in http\.Header are canonicalized` + h["foo"] = nil + _ = r.Header["Foo"] // want `keys in http\.Header are canonicalized` + r.Header["foo"] = nil + _ = m["foo"] +} diff --git a/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go.golden b/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go.golden new file mode 100644 index 000000000..c20c9dfcf --- /dev/null +++ b/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go.golden @@ -0,0 +1,49 @@ +-- turn into single negation -- +package pkg + +func fn(b1, b2 bool) { + if !b1 { // want `negating a boolean twice` + println() + } + + if b1 && !b2 { // want `negating a boolean twice` + println() + } + + if !b1 { // want `negating a boolean twice` + println() + } + + if !b1 { + println() + } + + if !b1 && !b2 { + println() + } +} + +-- remove double negation -- +package pkg + +func fn(b1, b2 bool) { + if b1 { // want `negating a boolean twice` + println() + } + + if b1 && b2 { // want `negating a boolean twice` + println() + } + + if b1 { // want `negating a boolean twice` + println() + } + + if !b1 { + println() + } + + if !b1 && !b2 { + println() + } +} diff --git a/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go.golden b/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go.golden new file mode 100644 index 000000000..02af55612 --- /dev/null +++ b/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go.golden @@ -0,0 +1,30 @@ +package pkg + +func fn() { + var ch chan int + select { + case <-ch: + default: + } + + for { + select { + case <-ch: + // want `should not have an empty default case` + } + } + + for { + select { + case <-ch: + default: + println("foo") + } + } + + for { + select { + case <-ch: + } + } +} diff --git a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden new file mode 100644 index 000000000..f5f3c53ee --- /dev/null +++ b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden @@ -0,0 +1,68 @@ +package pkg + +const c1 int = 0 +const c2 = 0 + +const ( + c3 int = iota + c4 + c5 +) + +const ( + c6 int = 1 // want `only the first constant in this group has an explicit type` + c7 int = 2 + c8 int = 3 +) + +const ( + c9 int = 1 + c10 = 2 + c11 = 3 + c12 int = 4 +) + +const ( + c13 = 1 + c14 int = 2 + c15 int = 3 + c16 int = 4 +) + +const ( + c17 = 1 + c18 int = 2 + c19 = 3 + c20 int = 4 +) + +const ( + c21 int = 1 + + c22 = 2 +) + +const ( + c23 int = 1 + c24 int = 2 + + c25 string = "" // want `only the first constant in this group has an explicit type` + c26 string = "" + + c27 = 1 + c28 int = 2 + + c29 int = 1 + c30 = 2 + c31 int = 2 + + c32 string = "" // want `only the first constant in this group has an explicit type` + c33 string = "" +) + +const ( + c34 int = 1 // want `only the first constant in this group has an explicit type` + c35 int = 2 + + c36 int = 2 +) diff --git a/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go.golden b/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go.golden new file mode 100644 index 000000000..471d69178 --- /dev/null +++ b/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go.golden @@ -0,0 +1,7 @@ +package pkg + +import "os" + +func fn() { + os.OpenFile("", 0, 0644) // want `file mode.+` +} diff --git a/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go.golden b/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go.golden new file mode 100644 index 000000000..df7763cc0 --- /dev/null +++ b/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go.golden @@ -0,0 +1,35 @@ +-- explicitly use nanoseconds -- +package pkg + +import "time" + +const c1 = 1 +const c2 = 200 + +func fn() { + time.Sleep(1 * time.Nanosecond) // want `sleeping for 1` + time.Sleep(42 * time.Nanosecond) // want `sleeping for 42` + time.Sleep(201) + time.Sleep(c1) + time.Sleep(c2) + time.Sleep(2 * time.Nanosecond) + time.Sleep(time.Nanosecond) +} + +-- use seconds -- +package pkg + +import "time" + +const c1 = 1 +const c2 = 200 + +func fn() { + time.Sleep(1 * time.Second) // want `sleeping for 1` + time.Sleep(42 * time.Second) // want `sleeping for 42` + time.Sleep(201) + time.Sleep(c1) + time.Sleep(c2) + time.Sleep(2 * time.Nanosecond) + time.Sleep(time.Nanosecond) +} diff --git a/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go.golden b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go.golden new file mode 100644 index 000000000..c90cffe1f --- /dev/null +++ b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go.golden @@ -0,0 +1,43 @@ +package pkg + +import "strings" + +func fn() { + const ( + s1 = "foo" + s2 = "bar" + ) + + if strings.EqualFold(s1, s2) { // want `should use strings\.EqualFold instead` + panic("") + } + + if strings.EqualFold(s1, s2) { // want `should use strings\.EqualFold instead` + panic("") + } + + if !strings.EqualFold(s1, s2) { // want `should use strings\.EqualFold instead` + panic("") + } + + switch strings.EqualFold(s1, s2) { // want `should use strings\.EqualFold instead` + case true, false: + panic("") + } + + if strings.EqualFold(s1, s2) || s1+s2 == s2+s1 { // want `should use strings\.EqualFold instead` + panic("") + } + + if strings.ToLower(s1) > strings.ToLower(s2) { + panic("") + } + + if strings.ToLower(s1) < strings.ToLower(s2) { + panic("") + } + + if strings.ToLower(s1) == strings.ToUpper(s2) { + panic("") + } +} diff --git a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go.golden b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go.golden new file mode 100644 index 000000000..230d746b8 --- /dev/null +++ b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go.golden @@ -0,0 +1,22 @@ +package pkg + +import ( + "fmt" + "log" + "os" +) + +func fn(s string) { + fn2 := func() string { return "" } + fmt.Print(fn2()) // want `should use print-style function` + _ = fmt.Sprint(fn2()) // want `should use print-style function` + log.Print(fn2()) // want `should use print-style function` + fmt.Print(s) // want `should use print-style function` + fmt.Printf(s, "") + fmt.Fprint(os.Stdout, s) // want `should use print-style function` + fmt.Fprintf(os.Stdout, s, "") + + fmt.Printf(fn2(), "") + fmt.Printf("") + fmt.Printf("%s", "") +} diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/#CheckUntrappableSignal.go.golden# b/staticcheck/testdata/src/CheckUntrappableSignal/#CheckUntrappableSignal.go.golden# new file mode 100644 index 000000000..aff90a10c --- /dev/null +++ b/staticcheck/testdata/src/CheckUntrappableSignal/#CheckUntrappableSignal.go.golden# @@ -0,0 +1,83 @@ +-- remove syscall.SIGKILL from list of arguments -- +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + signal.Ignore() // want `cannot be trapped` + signal.Ignore(os.Kill) // want `cannot be trapped` + signal.Notify(c, os.Kill) // want `cannot be trapped` + signal.Reset(os.Kill) // want `cannot be trapped` + signal.Ignore() // want `cannot be trapped` + signal.Notify(c) // want `cannot be trapped` + signal.Reset() // want `cannot be trapped` +} + +-- remove os.Kill from list of arguments -- +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + signal.Ignore(os.Signal(syscall.SIGKILL)) // want `cannot be trapped` + signal.Ignore() // want `cannot be trapped` + signal.Notify(c) // want `cannot be trapped` + signal.Reset() // want `cannot be trapped` + signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` + signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` + signal.Reset(syscall.SIGKILL) // want `cannot be trapped` +} + +-- use syscall.SIGTERM instead of syscall.SIGKILL -- +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` + signal.Ignore(os.Kill) // want `cannot be trapped` + signal.Notify(c, os.Kill) // want `cannot be trapped` + signal.Reset(os.Kill) // want `cannot be trapped` + signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` + signal.Notify(c, syscall.SIGTERM) // want `cannot be trapped` + signal.Reset(syscall.SIGTERM) // want `cannot be trapped` +} + +-- use syscall.SIGTERM instead of os.Kill -- +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + signal.Ignore(os.Signal(syscall.SIGKILL)) // want `cannot be trapped` + signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` + signal.Notify(c, syscall.SIGTERM) // want `cannot be trapped` + signal.Reset(syscall.SIGTERM) // want `cannot be trapped` + signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` + signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` + signal.Reset(syscall.SIGKILL) // want `cannot be trapped` +} diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/.#CheckUntrappableSignal.go.golden b/staticcheck/testdata/src/CheckUntrappableSignal/.#CheckUntrappableSignal.go.golden new file mode 120000 index 000000000..ebff53d0d --- /dev/null +++ b/staticcheck/testdata/src/CheckUntrappableSignal/.#CheckUntrappableSignal.go.golden @@ -0,0 +1 @@ +dominikh@nixos.1490:1586915592 \ No newline at end of file diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go.golden b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go.golden new file mode 100644 index 000000000..aff90a10c --- /dev/null +++ b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go.golden @@ -0,0 +1,83 @@ +-- remove syscall.SIGKILL from list of arguments -- +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + signal.Ignore() // want `cannot be trapped` + signal.Ignore(os.Kill) // want `cannot be trapped` + signal.Notify(c, os.Kill) // want `cannot be trapped` + signal.Reset(os.Kill) // want `cannot be trapped` + signal.Ignore() // want `cannot be trapped` + signal.Notify(c) // want `cannot be trapped` + signal.Reset() // want `cannot be trapped` +} + +-- remove os.Kill from list of arguments -- +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + signal.Ignore(os.Signal(syscall.SIGKILL)) // want `cannot be trapped` + signal.Ignore() // want `cannot be trapped` + signal.Notify(c) // want `cannot be trapped` + signal.Reset() // want `cannot be trapped` + signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` + signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` + signal.Reset(syscall.SIGKILL) // want `cannot be trapped` +} + +-- use syscall.SIGTERM instead of syscall.SIGKILL -- +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` + signal.Ignore(os.Kill) // want `cannot be trapped` + signal.Notify(c, os.Kill) // want `cannot be trapped` + signal.Reset(os.Kill) // want `cannot be trapped` + signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` + signal.Notify(c, syscall.SIGTERM) // want `cannot be trapped` + signal.Reset(syscall.SIGTERM) // want `cannot be trapped` +} + +-- use syscall.SIGTERM instead of os.Kill -- +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn() { + c := make(chan os.Signal, 1) + signal.Notify(c, os.Interrupt) + signal.Ignore(os.Signal(syscall.SIGKILL)) // want `cannot be trapped` + signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` + signal.Notify(c, syscall.SIGTERM) // want `cannot be trapped` + signal.Reset(syscall.SIGTERM) // want `cannot be trapped` + signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` + signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` + signal.Reset(syscall.SIGKILL) // want `cannot be trapped` +} diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal_unix.go.golden b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal_unix.go.golden new file mode 100644 index 000000000..3255cae97 --- /dev/null +++ b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal_unix.go.golden @@ -0,0 +1,16 @@ +// +build android darwin dragonfly freebsd linux netbsd openbsd solaris + +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn2() { + c := make(chan os.Signal, 1) + signal.Ignore() // want `cannot be trapped` + signal.Notify(c) // want `cannot be trapped` + signal.Reset() // want `cannot be trapped` +} diff --git a/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go.golden b/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go.golden new file mode 100644 index 000000000..cef6d7a0b --- /dev/null +++ b/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go.golden @@ -0,0 +1,24 @@ +package pkg + +import "context" + +func fn1(ctx context.Context) {} +func fn2(x string, ctx context.Context) {} +func fn4() {} + +type T struct{} + +func (*T) Foo() {} + +func fn3() { + fn1(context.Background()) // want `do not pass a nil Context` + fn1(context.TODO()) + fn2("", nil) + fn4() + + // don't flag this conversion + _ = (func(context.Context))(nil) + // and don't crash on these + _ = (func())(nil) + (*T).Foo(nil) +} diff --git a/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go.golden b/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go.golden new file mode 100644 index 000000000..24327dd99 --- /dev/null +++ b/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go.golden @@ -0,0 +1,12 @@ +package pkg + +import "io" + +func fn() { + const SeekStart = 0 + var s io.Seeker + s.Seek(0, 0) + s.Seek(0, io.SeekStart) + s.Seek(0, io.SeekStart) // want `the first argument of io\.Seeker is the offset` + s.Seek(SeekStart, 0) +} diff --git a/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go.golden b/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go.golden new file mode 100644 index 000000000..a69279d09 --- /dev/null +++ b/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go.golden @@ -0,0 +1,21 @@ +// Package pkg ... +package pkg + +import "net/http" + +func fn() { + // Check all the supported functions + http.Error(nil, "", http.StatusVariantAlsoNegotiates) // want `http\.StatusVariantAlsoNegotiates` + http.Redirect(nil, nil, "", http.StatusVariantAlsoNegotiates) // want `http\.StatusVariantAlsoNegotiates` + http.StatusText(http.StatusVariantAlsoNegotiates) // want `http\.StatusVariantAlsoNegotiates` + http.RedirectHandler("", http.StatusVariantAlsoNegotiates) // want `http\.StatusVariantAlsoNegotiates` + + // Don't flag literals with no known constant + http.StatusText(600) + + // Don't flag constants + http.StatusText(http.StatusAccepted) + + // Don't flag items on the whitelist (well known codes) + http.StatusText(404) +} diff --git a/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go.golden b/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go.golden new file mode 100644 index 000000000..9e237af9e --- /dev/null +++ b/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go.golden @@ -0,0 +1,12 @@ +// Package pkg ... +package pkg + +var ( + a = "\a" // want `Unicode control character U\+0007` + b = "\a\x1a" // want `Unicode control characters` + c = "Test test" + d = `T +est` + e = `Zero\u200bWidth` // want `Unicode format character U\+200B` + f = "\u200b" +) diff --git a/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go.golden b/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go.golden new file mode 100644 index 000000000..3ab673457 --- /dev/null +++ b/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go.golden @@ -0,0 +1,16 @@ +// Package pkg ... +package pkg + +func fn(x string, y int) { + if x == "" { // want `Yoda` + } + if y == 0 { // want `Yoda` + } + if 0 > y { + } + if "" == "" { + } + + if "" == "" || y == 0 { // want `Yoda` + } +} From 9ccca0dc4e20e0f723e016f5f2c0bee0b68a105e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 15 Apr 2020 10:29:07 +0200 Subject: [PATCH 015/111] internal/go/gcimporter: import copy of golang.org/x/tools/go/internal/gcimporter Import from commit 33e937220d8f91f1d242ad15aebc3e245aca5515 --- internal/go/gcimporter/bexport.go | 852 +++++++++++++ internal/go/gcimporter/bexport_test.go | 420 +++++++ internal/go/gcimporter/bimport.go | 1039 ++++++++++++++++ internal/go/gcimporter/exportdata.go | 93 ++ internal/go/gcimporter/gcimporter.go | 1078 +++++++++++++++++ internal/go/gcimporter/gcimporter11_test.go | 129 ++ internal/go/gcimporter/gcimporter_test.go | 525 ++++++++ internal/go/gcimporter/iexport.go | 739 +++++++++++ internal/go/gcimporter/iexport_test.go | 310 +++++ internal/go/gcimporter/iimport.go | 630 ++++++++++ internal/go/gcimporter/israce_test.go | 11 + internal/go/gcimporter/newInterface10.go | 21 + internal/go/gcimporter/newInterface11.go | 13 + internal/go/gcimporter/testdata/a.go | 14 + internal/go/gcimporter/testdata/b.go | 11 + internal/go/gcimporter/testdata/exports.go | 89 ++ internal/go/gcimporter/testdata/issue15920.go | 11 + internal/go/gcimporter/testdata/issue20046.go | 9 + internal/go/gcimporter/testdata/issue25301.go | 17 + internal/go/gcimporter/testdata/p.go | 13 + .../go/gcimporter/testdata/versions/test.go | 30 + .../testdata/versions/test_go1.11_0i.a | Bin 0 -> 2420 bytes .../testdata/versions/test_go1.11_6b.a | Bin 0 -> 2426 bytes .../testdata/versions/test_go1.11_999b.a | Bin 0 -> 2600 bytes .../testdata/versions/test_go1.11_999i.a | Bin 0 -> 2420 bytes .../testdata/versions/test_go1.7_0.a | Bin 0 -> 1862 bytes .../testdata/versions/test_go1.7_1.a | Bin 0 -> 2316 bytes .../testdata/versions/test_go1.8_4.a | Bin 0 -> 1658 bytes .../testdata/versions/test_go1.8_5.a | Bin 0 -> 1658 bytes internal/testenv/testenv.go | 185 +++ internal/testenv/testenv_112.go | 27 + 31 files changed, 6266 insertions(+) create mode 100644 internal/go/gcimporter/bexport.go create mode 100644 internal/go/gcimporter/bexport_test.go create mode 100644 internal/go/gcimporter/bimport.go create mode 100644 internal/go/gcimporter/exportdata.go create mode 100644 internal/go/gcimporter/gcimporter.go create mode 100644 internal/go/gcimporter/gcimporter11_test.go create mode 100644 internal/go/gcimporter/gcimporter_test.go create mode 100644 internal/go/gcimporter/iexport.go create mode 100644 internal/go/gcimporter/iexport_test.go create mode 100644 internal/go/gcimporter/iimport.go create mode 100644 internal/go/gcimporter/israce_test.go create mode 100644 internal/go/gcimporter/newInterface10.go create mode 100644 internal/go/gcimporter/newInterface11.go create mode 100644 internal/go/gcimporter/testdata/a.go create mode 100644 internal/go/gcimporter/testdata/b.go create mode 100644 internal/go/gcimporter/testdata/exports.go create mode 100644 internal/go/gcimporter/testdata/issue15920.go create mode 100644 internal/go/gcimporter/testdata/issue20046.go create mode 100644 internal/go/gcimporter/testdata/issue25301.go create mode 100644 internal/go/gcimporter/testdata/p.go create mode 100644 internal/go/gcimporter/testdata/versions/test.go create mode 100644 internal/go/gcimporter/testdata/versions/test_go1.11_0i.a create mode 100644 internal/go/gcimporter/testdata/versions/test_go1.11_6b.a create mode 100644 internal/go/gcimporter/testdata/versions/test_go1.11_999b.a create mode 100644 internal/go/gcimporter/testdata/versions/test_go1.11_999i.a create mode 100644 internal/go/gcimporter/testdata/versions/test_go1.7_0.a create mode 100644 internal/go/gcimporter/testdata/versions/test_go1.7_1.a create mode 100644 internal/go/gcimporter/testdata/versions/test_go1.8_4.a create mode 100644 internal/go/gcimporter/testdata/versions/test_go1.8_5.a create mode 100644 internal/testenv/testenv.go create mode 100644 internal/testenv/testenv_112.go diff --git a/internal/go/gcimporter/bexport.go b/internal/go/gcimporter/bexport.go new file mode 100644 index 000000000..a807d0aaa --- /dev/null +++ b/internal/go/gcimporter/bexport.go @@ -0,0 +1,852 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go; +// see that file for specification of the format. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "math" + "math/big" + "sort" + "strings" +) + +// If debugFormat is set, each integer and string value is preceded by a marker +// and position information in the encoding. This mechanism permits an importer +// to recognize immediately when it is out of sync. The importer recognizes this +// mode automatically (i.e., it can import export data produced with debugging +// support even if debugFormat is not set at the time of import). This mode will +// lead to massively larger export data (by a factor of 2 to 3) and should only +// be enabled during development and debugging. +// +// NOTE: This flag is the first flag to enable if importing dies because of +// (suspected) format errors, and whenever a change is made to the format. +const debugFormat = false // default: false + +// If trace is set, debugging output is printed to std out. +const trace = false // default: false + +// Current export format version. Increase with each format change. +// Note: The latest binary (non-indexed) export format is at version 6. +// This exporter is still at level 4, but it doesn't matter since +// the binary importer can handle older versions just fine. +// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE +// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE +// 4: type name objects support type aliases, uses aliasTag +// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used) +// 2: removed unused bool in ODCL export (compiler only) +// 1: header format change (more regular), export package for _ struct fields +// 0: Go1.7 encoding +const exportVersion = 4 + +// trackAllTypes enables cycle tracking for all types, not just named +// types. The existing compiler invariants assume that unnamed types +// that are not completely set up are not used, or else there are spurious +// errors. +// If disabled, only named types are tracked, possibly leading to slightly +// less efficient encoding in rare cases. It also prevents the export of +// some corner-case type declarations (but those are not handled correctly +// with with the textual export format either). +// TODO(gri) enable and remove once issues caused by it are fixed +const trackAllTypes = false + +type exporter struct { + fset *token.FileSet + out bytes.Buffer + + // object -> index maps, indexed in order of serialization + strIndex map[string]int + pkgIndex map[*types.Package]int + typIndex map[types.Type]int + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + + // debugging support + written int // bytes written + indent int // for trace +} + +// internalError represents an error generated inside this package. +type internalError string + +func (e internalError) Error() string { return "gcimporter: " + string(e) } + +func internalErrorf(format string, args ...interface{}) error { + return internalError(fmt.Sprintf(format, args...)) +} + +// BExportData returns binary export data for pkg. +// If no file set is provided, position info will be missing. +func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { + defer func() { + if e := recover(); e != nil { + if ierr, ok := e.(internalError); ok { + err = ierr + return + } + // Not an internal error; panic again. + panic(e) + } + }() + + p := exporter{ + fset: fset, + strIndex: map[string]int{"": 0}, // empty string is mapped to 0 + pkgIndex: make(map[*types.Package]int), + typIndex: make(map[types.Type]int), + posInfoFormat: true, // TODO(gri) might become a flag, eventually + } + + // write version info + // The version string must start with "version %d" where %d is the version + // number. Additional debugging information may follow after a blank; that + // text is ignored by the importer. + p.rawStringln(fmt.Sprintf("version %d", exportVersion)) + var debug string + if debugFormat { + debug = "debug" + } + p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly + p.bool(trackAllTypes) + p.bool(p.posInfoFormat) + + // --- generic export data --- + + // populate type map with predeclared "known" types + for index, typ := range predeclared() { + p.typIndex[typ] = index + } + if len(p.typIndex) != len(predeclared()) { + return nil, internalError("duplicate entries in type map?") + } + + // write package data + p.pkg(pkg, true) + if trace { + p.tracef("\n") + } + + // write objects + objcount := 0 + scope := pkg.Scope() + for _, name := range scope.Names() { + if !ast.IsExported(name) { + continue + } + if trace { + p.tracef("\n") + } + p.obj(scope.Lookup(name)) + objcount++ + } + + // indicate end of list + if trace { + p.tracef("\n") + } + p.tag(endTag) + + // for self-verification only (redundant) + p.int(objcount) + + if trace { + p.tracef("\n") + } + + // --- end of export data --- + + return p.out.Bytes(), nil +} + +func (p *exporter) pkg(pkg *types.Package, emptypath bool) { + if pkg == nil { + panic(internalError("unexpected nil pkg")) + } + + // if we saw the package before, write its index (>= 0) + if i, ok := p.pkgIndex[pkg]; ok { + p.index('P', i) + return + } + + // otherwise, remember the package, write the package tag (< 0) and package data + if trace { + p.tracef("P%d = { ", len(p.pkgIndex)) + defer p.tracef("} ") + } + p.pkgIndex[pkg] = len(p.pkgIndex) + + p.tag(packageTag) + p.string(pkg.Name()) + if emptypath { + p.string("") + } else { + p.string(pkg.Path()) + } +} + +func (p *exporter) obj(obj types.Object) { + switch obj := obj.(type) { + case *types.Const: + p.tag(constTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + p.value(obj.Val()) + + case *types.TypeName: + if obj.IsAlias() { + p.tag(aliasTag) + p.pos(obj) + p.qualifiedName(obj) + } else { + p.tag(typeTag) + } + p.typ(obj.Type()) + + case *types.Var: + p.tag(varTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + + case *types.Func: + p.tag(funcTag) + p.pos(obj) + p.qualifiedName(obj) + sig := obj.Type().(*types.Signature) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + + default: + panic(internalErrorf("unexpected object %v (%T)", obj, obj)) + } +} + +func (p *exporter) pos(obj types.Object) { + if !p.posInfoFormat { + return + } + + file, line := p.fileLine(obj) + if file == p.prevFile { + // common case: write line delta + // delta == 0 means different file or no line change + delta := line - p.prevLine + p.int(delta) + if delta == 0 { + p.int(-1) // -1 means no file change + } + } else { + // different file + p.int(0) + // Encode filename as length of common prefix with previous + // filename, followed by (possibly empty) suffix. Filenames + // frequently share path prefixes, so this can save a lot + // of space and make export data size less dependent on file + // path length. The suffix is unlikely to be empty because + // file names tend to end in ".go". + n := commonPrefixLen(p.prevFile, file) + p.int(n) // n >= 0 + p.string(file[n:]) // write suffix only + p.prevFile = file + p.int(line) + } + p.prevLine = line +} + +func (p *exporter) fileLine(obj types.Object) (file string, line int) { + if p.fset != nil { + pos := p.fset.Position(obj.Pos()) + file = pos.Filename + line = pos.Line + } + return +} + +func commonPrefixLen(a, b string) int { + if len(a) > len(b) { + a, b = b, a + } + // len(a) <= len(b) + i := 0 + for i < len(a) && a[i] == b[i] { + i++ + } + return i +} + +func (p *exporter) qualifiedName(obj types.Object) { + p.string(obj.Name()) + p.pkg(obj.Pkg(), false) +} + +func (p *exporter) typ(t types.Type) { + if t == nil { + panic(internalError("nil type")) + } + + // Possible optimization: Anonymous pointer types *T where + // T is a named type are common. We could canonicalize all + // such types *T to a single type PT = *T. This would lead + // to at most one *T entry in typIndex, and all future *T's + // would be encoded as the respective index directly. Would + // save 1 byte (pointerTag) per *T and reduce the typIndex + // size (at the cost of a canonicalization map). We can do + // this later, without encoding format change. + + // if we saw the type before, write its index (>= 0) + if i, ok := p.typIndex[t]; ok { + p.index('T', i) + return + } + + // otherwise, remember the type, write the type tag (< 0) and type data + if trackAllTypes { + if trace { + p.tracef("T%d = {>\n", len(p.typIndex)) + defer p.tracef("<\n} ") + } + p.typIndex[t] = len(p.typIndex) + } + + switch t := t.(type) { + case *types.Named: + if !trackAllTypes { + // if we don't track all types, track named types now + p.typIndex[t] = len(p.typIndex) + } + + p.tag(namedTag) + p.pos(t.Obj()) + p.qualifiedName(t.Obj()) + p.typ(t.Underlying()) + if !types.IsInterface(t) { + p.assocMethods(t) + } + + case *types.Array: + p.tag(arrayTag) + p.int64(t.Len()) + p.typ(t.Elem()) + + case *types.Slice: + p.tag(sliceTag) + p.typ(t.Elem()) + + case *dddSlice: + p.tag(dddTag) + p.typ(t.elem) + + case *types.Struct: + p.tag(structTag) + p.fieldList(t) + + case *types.Pointer: + p.tag(pointerTag) + p.typ(t.Elem()) + + case *types.Signature: + p.tag(signatureTag) + p.paramList(t.Params(), t.Variadic()) + p.paramList(t.Results(), false) + + case *types.Interface: + p.tag(interfaceTag) + p.iface(t) + + case *types.Map: + p.tag(mapTag) + p.typ(t.Key()) + p.typ(t.Elem()) + + case *types.Chan: + p.tag(chanTag) + p.int(int(3 - t.Dir())) // hack + p.typ(t.Elem()) + + default: + panic(internalErrorf("unexpected type %T: %s", t, t)) + } +} + +func (p *exporter) assocMethods(named *types.Named) { + // Sort methods (for determinism). + var methods []*types.Func + for i := 0; i < named.NumMethods(); i++ { + methods = append(methods, named.Method(i)) + } + sort.Sort(methodsByName(methods)) + + p.int(len(methods)) + + if trace && methods != nil { + p.tracef("associated methods {>\n") + } + + for i, m := range methods { + if trace && i > 0 { + p.tracef("\n") + } + + p.pos(m) + name := m.Name() + p.string(name) + if !exported(name) { + p.pkg(m.Pkg(), false) + } + + sig := m.Type().(*types.Signature) + p.paramList(types.NewTuple(sig.Recv()), false) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + p.int(0) // dummy value for go:nointerface pragma - ignored by importer + } + + if trace && methods != nil { + p.tracef("<\n} ") + } +} + +type methodsByName []*types.Func + +func (x methodsByName) Len() int { return len(x) } +func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } + +func (p *exporter) fieldList(t *types.Struct) { + if trace && t.NumFields() > 0 { + p.tracef("fields {>\n") + defer p.tracef("<\n} ") + } + + p.int(t.NumFields()) + for i := 0; i < t.NumFields(); i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.field(t.Field(i)) + p.string(t.Tag(i)) + } +} + +func (p *exporter) field(f *types.Var) { + if !f.IsField() { + panic(internalError("field expected")) + } + + p.pos(f) + p.fieldName(f) + p.typ(f.Type()) +} + +func (p *exporter) iface(t *types.Interface) { + // TODO(gri): enable importer to load embedded interfaces, + // then emit Embeddeds and ExplicitMethods separately here. + p.int(0) + + n := t.NumMethods() + if trace && n > 0 { + p.tracef("methods {>\n") + defer p.tracef("<\n} ") + } + p.int(n) + for i := 0; i < n; i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.method(t.Method(i)) + } +} + +func (p *exporter) method(m *types.Func) { + sig := m.Type().(*types.Signature) + if sig.Recv() == nil { + panic(internalError("method expected")) + } + + p.pos(m) + p.string(m.Name()) + if m.Name() != "_" && !ast.IsExported(m.Name()) { + p.pkg(m.Pkg(), false) + } + + // interface method; no need to encode receiver. + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) +} + +func (p *exporter) fieldName(f *types.Var) { + name := f.Name() + + if f.Anonymous() { + // anonymous field - we distinguish between 3 cases: + // 1) field name matches base type name and is exported + // 2) field name matches base type name and is not exported + // 3) field name doesn't match base type name (alias name) + bname := basetypeName(f.Type()) + if name == bname { + if ast.IsExported(name) { + name = "" // 1) we don't need to know the field name or package + } else { + name = "?" // 2) use unexported name "?" to force package export + } + } else { + // 3) indicate alias and export name as is + // (this requires an extra "@" but this is a rare case) + p.string("@") + } + } + + p.string(name) + if name != "" && !ast.IsExported(name) { + p.pkg(f.Pkg(), false) + } +} + +func basetypeName(typ types.Type) string { + switch typ := deref(typ).(type) { + case *types.Basic: + return typ.Name() + case *types.Named: + return typ.Obj().Name() + default: + return "" // unnamed type + } +} + +func (p *exporter) paramList(params *types.Tuple, variadic bool) { + // use negative length to indicate unnamed parameters + // (look at the first parameter only since either all + // names are present or all are absent) + n := params.Len() + if n > 0 && params.At(0).Name() == "" { + n = -n + } + p.int(n) + for i := 0; i < params.Len(); i++ { + q := params.At(i) + t := q.Type() + if variadic && i == params.Len()-1 { + t = &dddSlice{t.(*types.Slice).Elem()} + } + p.typ(t) + if n > 0 { + name := q.Name() + p.string(name) + if name != "_" { + p.pkg(q.Pkg(), false) + } + } + p.string("") // no compiler-specific info + } +} + +func (p *exporter) value(x constant.Value) { + if trace { + p.tracef("= ") + } + + switch x.Kind() { + case constant.Bool: + tag := falseTag + if constant.BoolVal(x) { + tag = trueTag + } + p.tag(tag) + + case constant.Int: + if v, exact := constant.Int64Val(x); exact { + // common case: x fits into an int64 - use compact encoding + p.tag(int64Tag) + p.int64(v) + return + } + // uncommon case: large x - use float encoding + // (powers of 2 will be encoded efficiently with exponent) + p.tag(floatTag) + p.float(constant.ToFloat(x)) + + case constant.Float: + p.tag(floatTag) + p.float(x) + + case constant.Complex: + p.tag(complexTag) + p.float(constant.Real(x)) + p.float(constant.Imag(x)) + + case constant.String: + p.tag(stringTag) + p.string(constant.StringVal(x)) + + case constant.Unknown: + // package contains type errors + p.tag(unknownTag) + + default: + panic(internalErrorf("unexpected value %v (%T)", x, x)) + } +} + +func (p *exporter) float(x constant.Value) { + if x.Kind() != constant.Float { + panic(internalErrorf("unexpected constant %v, want float", x)) + } + // extract sign (there is no -0) + sign := constant.Sign(x) + if sign == 0 { + // x == 0 + p.int(0) + return + } + // x != 0 + + var f big.Float + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + r := valueToRat(num) + f.SetRat(r.Quo(r, valueToRat(denom))) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + f.SetFloat64(math.MaxFloat64) // FIXME + } + + // extract exponent such that 0.5 <= m < 1.0 + var m big.Float + exp := f.MantExp(&m) + + // extract mantissa as *big.Int + // - set exponent large enough so mant satisfies mant.IsInt() + // - get *big.Int from mant + m.SetMantExp(&m, int(m.MinPrec())) + mant, acc := m.Int(nil) + if acc != big.Exact { + panic(internalError("internal error")) + } + + p.int(sign) + p.int(exp) + p.string(string(mant.Bytes())) +} + +func valueToRat(x constant.Value) *big.Rat { + // Convert little-endian to big-endian. + // I can't believe this is necessary. + bytes := constant.Bytes(x) + for i := 0; i < len(bytes)/2; i++ { + bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] + } + return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) +} + +func (p *exporter) bool(b bool) bool { + if trace { + p.tracef("[") + defer p.tracef("= %v] ", b) + } + + x := 0 + if b { + x = 1 + } + p.int(x) + return b +} + +// ---------------------------------------------------------------------------- +// Low-level encoders + +func (p *exporter) index(marker byte, index int) { + if index < 0 { + panic(internalError("invalid index < 0")) + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%c%d ", marker, index) + } + p.rawInt64(int64(index)) +} + +func (p *exporter) tag(tag int) { + if tag >= 0 { + panic(internalError("invalid tag >= 0")) + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%s ", tagString[-tag]) + } + p.rawInt64(int64(tag)) +} + +func (p *exporter) int(x int) { + p.int64(int64(x)) +} + +func (p *exporter) int64(x int64) { + if debugFormat { + p.marker('i') + } + if trace { + p.tracef("%d ", x) + } + p.rawInt64(x) +} + +func (p *exporter) string(s string) { + if debugFormat { + p.marker('s') + } + if trace { + p.tracef("%q ", s) + } + // if we saw the string before, write its index (>= 0) + // (the empty string is mapped to 0) + if i, ok := p.strIndex[s]; ok { + p.rawInt64(int64(i)) + return + } + // otherwise, remember string and write its negative length and bytes + p.strIndex[s] = len(p.strIndex) + p.rawInt64(-int64(len(s))) + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } +} + +// marker emits a marker byte and position information which makes +// it easy for a reader to detect if it is "out of sync". Used for +// debugFormat format only. +func (p *exporter) marker(m byte) { + p.rawByte(m) + // Enable this for help tracking down the location + // of an incorrect marker when running in debugFormat. + if false && trace { + p.tracef("#%d ", p.written) + } + p.rawInt64(int64(p.written)) +} + +// rawInt64 should only be used by low-level encoders. +func (p *exporter) rawInt64(x int64) { + var tmp [binary.MaxVarintLen64]byte + n := binary.PutVarint(tmp[:], x) + for i := 0; i < n; i++ { + p.rawByte(tmp[i]) + } +} + +// rawStringln should only be used to emit the initial version string. +func (p *exporter) rawStringln(s string) { + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } + p.rawByte('\n') +} + +// rawByte is the bottleneck interface to write to p.out. +// rawByte escapes b as follows (any encoding does that +// hides '$'): +// +// '$' => '|' 'S' +// '|' => '|' '|' +// +// Necessary so other tools can find the end of the +// export data by searching for "$$". +// rawByte should only be used by low-level encoders. +func (p *exporter) rawByte(b byte) { + switch b { + case '$': + // write '$' as '|' 'S' + b = 'S' + fallthrough + case '|': + // write '|' as '|' '|' + p.out.WriteByte('|') + p.written++ + } + p.out.WriteByte(b) + p.written++ +} + +// tracef is like fmt.Printf but it rewrites the format string +// to take care of indentation. +func (p *exporter) tracef(format string, args ...interface{}) { + if strings.ContainsAny(format, "<>\n") { + var buf bytes.Buffer + for i := 0; i < len(format); i++ { + // no need to deal with runes + ch := format[i] + switch ch { + case '>': + p.indent++ + continue + case '<': + p.indent-- + continue + } + buf.WriteByte(ch) + if ch == '\n' { + for j := p.indent; j > 0; j-- { + buf.WriteString(". ") + } + } + } + format = buf.String() + } + fmt.Printf(format, args...) +} + +// Debugging support. +// (tagString is only used when tracing is enabled) +var tagString = [...]string{ + // Packages + -packageTag: "package", + + // Types + -namedTag: "named type", + -arrayTag: "array", + -sliceTag: "slice", + -dddTag: "ddd", + -structTag: "struct", + -pointerTag: "pointer", + -signatureTag: "signature", + -interfaceTag: "interface", + -mapTag: "map", + -chanTag: "chan", + + // Values + -falseTag: "false", + -trueTag: "true", + -int64Tag: "int64", + -floatTag: "float", + -fractionTag: "fraction", + -complexTag: "complex", + -stringTag: "string", + -unknownTag: "unknown", + + // Type aliases + -aliasTag: "alias", +} diff --git a/internal/go/gcimporter/bexport_test.go b/internal/go/gcimporter/bexport_test.go new file mode 100644 index 000000000..9e1d1c5d4 --- /dev/null +++ b/internal/go/gcimporter/bexport_test.go @@ -0,0 +1,420 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gcimporter_test + +import ( + "fmt" + "go/ast" + "go/build" + "go/constant" + "go/parser" + "go/token" + "go/types" + "reflect" + "runtime" + "strings" + "testing" + + "honnef.co/go/tools/internal/go/gcimporter" + + "golang.org/x/tools/go/buildutil" + "golang.org/x/tools/go/loader" +) + +var isRace = false + +func TestBExportData_stdlib(t *testing.T) { + if runtime.Compiler == "gccgo" { + t.Skip("gccgo standard library is inaccessible") + } + if runtime.GOOS == "android" { + t.Skipf("incomplete std lib on %s", runtime.GOOS) + } + if isRace { + t.Skipf("stdlib tests take too long in race mode and flake on builders") + } + + // Load, parse and type-check the program. + ctxt := build.Default // copy + ctxt.GOPATH = "" // disable GOPATH + conf := loader.Config{ + Build: &ctxt, + AllowErrors: true, + } + for _, path := range buildutil.AllPackages(conf.Build) { + conf.Import(path) + } + + // Create a package containing type and value errors to ensure + // they are properly encoded/decoded. + f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors +const UnknownValue = "" + 0 +type UnknownType undefined +`) + if err != nil { + t.Fatal(err) + } + conf.CreateFromFiles("haserrors", f) + + prog, err := conf.Load() + if err != nil { + t.Fatalf("Load failed: %v", err) + } + + numPkgs := len(prog.AllPackages) + if want := 248; numPkgs < want { + t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want) + } + + for pkg, info := range prog.AllPackages { + if info.Files == nil { + continue // empty directory + } + exportdata, err := gcimporter.BExportData(conf.Fset, pkg) + if err != nil { + t.Fatal(err) + } + + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + n, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path()) + if err != nil { + t.Errorf("BImportData(%s): %v", pkg.Path(), err) + continue + } + if n != len(exportdata) { + t.Errorf("BImportData(%s) decoded %d bytes, want %d", + pkg.Path(), n, len(exportdata)) + } + + // Compare the packages' corresponding members. + for _, name := range pkg.Scope().Names() { + if !ast.IsExported(name) { + continue + } + obj1 := pkg.Scope().Lookup(name) + obj2 := pkg2.Scope().Lookup(name) + if obj2 == nil { + t.Errorf("%s.%s not found, want %s", pkg.Path(), name, obj1) + continue + } + + fl1 := fileLine(conf.Fset, obj1) + fl2 := fileLine(fset2, obj2) + if fl1 != fl2 { + t.Errorf("%s.%s: got posn %s, want %s", + pkg.Path(), name, fl2, fl1) + } + + if err := equalObj(obj1, obj2); err != nil { + t.Errorf("%s.%s: %s\ngot: %s\nwant: %s", + pkg.Path(), name, err, obj2, obj1) + } + } + } +} + +func fileLine(fset *token.FileSet, obj types.Object) string { + posn := fset.Position(obj.Pos()) + return fmt.Sprintf("%s:%d", posn.Filename, posn.Line) +} + +// equalObj reports how x and y differ. They are assumed to belong to +// different universes so cannot be compared directly. +func equalObj(x, y types.Object) error { + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return fmt.Errorf("%T vs %T", x, y) + } + xt := x.Type() + yt := y.Type() + switch x.(type) { + case *types.Var, *types.Func: + // ok + case *types.Const: + xval := x.(*types.Const).Val() + yval := y.(*types.Const).Val() + // Use string comparison for floating-point values since rounding is permitted. + if constant.Compare(xval, token.NEQ, yval) && + !(xval.Kind() == constant.Float && xval.String() == yval.String()) { + return fmt.Errorf("unequal constants %s vs %s", xval, yval) + } + case *types.TypeName: + xt = xt.Underlying() + yt = yt.Underlying() + default: + return fmt.Errorf("unexpected %T", x) + } + return equalType(xt, yt) +} + +func equalType(x, y types.Type) error { + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return fmt.Errorf("unequal kinds: %T vs %T", x, y) + } + switch x := x.(type) { + case *types.Interface: + y := y.(*types.Interface) + // TODO(gri): enable separate emission of Embedded interfaces + // and ExplicitMethods then use this logic. + // if x.NumEmbeddeds() != y.NumEmbeddeds() { + // return fmt.Errorf("unequal number of embedded interfaces: %d vs %d", + // x.NumEmbeddeds(), y.NumEmbeddeds()) + // } + // for i := 0; i < x.NumEmbeddeds(); i++ { + // xi := x.Embedded(i) + // yi := y.Embedded(i) + // if xi.String() != yi.String() { + // return fmt.Errorf("mismatched %th embedded interface: %s vs %s", + // i, xi, yi) + // } + // } + // if x.NumExplicitMethods() != y.NumExplicitMethods() { + // return fmt.Errorf("unequal methods: %d vs %d", + // x.NumExplicitMethods(), y.NumExplicitMethods()) + // } + // for i := 0; i < x.NumExplicitMethods(); i++ { + // xm := x.ExplicitMethod(i) + // ym := y.ExplicitMethod(i) + // if xm.Name() != ym.Name() { + // return fmt.Errorf("mismatched %th method: %s vs %s", i, xm, ym) + // } + // if err := equalType(xm.Type(), ym.Type()); err != nil { + // return fmt.Errorf("mismatched %s method: %s", xm.Name(), err) + // } + // } + if x.NumMethods() != y.NumMethods() { + return fmt.Errorf("unequal methods: %d vs %d", + x.NumMethods(), y.NumMethods()) + } + for i := 0; i < x.NumMethods(); i++ { + xm := x.Method(i) + ym := y.Method(i) + if xm.Name() != ym.Name() { + return fmt.Errorf("mismatched %dth method: %s vs %s", i, xm, ym) + } + if err := equalType(xm.Type(), ym.Type()); err != nil { + return fmt.Errorf("mismatched %s method: %s", xm.Name(), err) + } + } + case *types.Array: + y := y.(*types.Array) + if x.Len() != y.Len() { + return fmt.Errorf("unequal array lengths: %d vs %d", x.Len(), y.Len()) + } + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("array elements: %s", err) + } + case *types.Basic: + y := y.(*types.Basic) + if x.Kind() != y.Kind() { + return fmt.Errorf("unequal basic types: %s vs %s", x, y) + } + case *types.Chan: + y := y.(*types.Chan) + if x.Dir() != y.Dir() { + return fmt.Errorf("unequal channel directions: %d vs %d", x.Dir(), y.Dir()) + } + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("channel elements: %s", err) + } + case *types.Map: + y := y.(*types.Map) + if err := equalType(x.Key(), y.Key()); err != nil { + return fmt.Errorf("map keys: %s", err) + } + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("map values: %s", err) + } + case *types.Named: + y := y.(*types.Named) + if x.String() != y.String() { + return fmt.Errorf("unequal named types: %s vs %s", x, y) + } + case *types.Pointer: + y := y.(*types.Pointer) + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("pointer elements: %s", err) + } + case *types.Signature: + y := y.(*types.Signature) + if err := equalType(x.Params(), y.Params()); err != nil { + return fmt.Errorf("parameters: %s", err) + } + if err := equalType(x.Results(), y.Results()); err != nil { + return fmt.Errorf("results: %s", err) + } + if x.Variadic() != y.Variadic() { + return fmt.Errorf("unequal variadicity: %t vs %t", + x.Variadic(), y.Variadic()) + } + if (x.Recv() != nil) != (y.Recv() != nil) { + return fmt.Errorf("unequal receivers: %s vs %s", x.Recv(), y.Recv()) + } + if x.Recv() != nil { + // TODO(adonovan): fix: this assertion fires for interface methods. + // The type of the receiver of an interface method is a named type + // if the Package was loaded from export data, or an unnamed (interface) + // type if the Package was produced by type-checking ASTs. + // if err := equalType(x.Recv().Type(), y.Recv().Type()); err != nil { + // return fmt.Errorf("receiver: %s", err) + // } + } + case *types.Slice: + y := y.(*types.Slice) + if err := equalType(x.Elem(), y.Elem()); err != nil { + return fmt.Errorf("slice elements: %s", err) + } + case *types.Struct: + y := y.(*types.Struct) + if x.NumFields() != y.NumFields() { + return fmt.Errorf("unequal struct fields: %d vs %d", + x.NumFields(), y.NumFields()) + } + for i := 0; i < x.NumFields(); i++ { + xf := x.Field(i) + yf := y.Field(i) + if xf.Name() != yf.Name() { + return fmt.Errorf("mismatched fields: %s vs %s", xf, yf) + } + if err := equalType(xf.Type(), yf.Type()); err != nil { + return fmt.Errorf("struct field %s: %s", xf.Name(), err) + } + if x.Tag(i) != y.Tag(i) { + return fmt.Errorf("struct field %s has unequal tags: %q vs %q", + xf.Name(), x.Tag(i), y.Tag(i)) + } + } + case *types.Tuple: + y := y.(*types.Tuple) + if x.Len() != y.Len() { + return fmt.Errorf("unequal tuple lengths: %d vs %d", x.Len(), y.Len()) + } + for i := 0; i < x.Len(); i++ { + if err := equalType(x.At(i).Type(), y.At(i).Type()); err != nil { + return fmt.Errorf("tuple element %d: %s", i, err) + } + } + } + return nil +} + +// TestVeryLongFile tests the position of an import object declared in +// a very long input file. Line numbers greater than maxlines are +// reported as line 1, not garbage or token.NoPos. +func TestVeryLongFile(t *testing.T) { + // parse and typecheck + longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int" + fset1 := token.NewFileSet() + f, err := parser.ParseFile(fset1, "foo.go", longFile, 0) + if err != nil { + t.Fatal(err) + } + var conf types.Config + pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + + // export + exportdata, err := gcimporter.BExportData(fset1, pkg) + if err != nil { + t.Fatal(err) + } + + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path()) + if err != nil { + t.Fatalf("BImportData(%s): %v", pkg.Path(), err) + } + + // compare + posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos()) + posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos()) + if want := "foo.go:1:1"; posn2.String() != want { + t.Errorf("X position = %s, want %s (orig was %s)", + posn2, want, posn1) + } +} + +const src = ` +package p + +type ( + T0 = int32 + T1 = struct{} + T2 = struct{ T1 } + Invalid = foo // foo is undeclared +) +` + +func checkPkg(t *testing.T, pkg *types.Package, label string) { + T1 := types.NewStruct(nil, nil) + T2 := types.NewStruct([]*types.Var{types.NewField(0, pkg, "T1", T1, true)}, nil) + + for _, test := range []struct { + name string + typ types.Type + }{ + {"T0", types.Typ[types.Int32]}, + {"T1", T1}, + {"T2", T2}, + {"Invalid", types.Typ[types.Invalid]}, + } { + obj := pkg.Scope().Lookup(test.name) + if obj == nil { + t.Errorf("%s: %s not found", label, test.name) + continue + } + tname, _ := obj.(*types.TypeName) + if tname == nil { + t.Errorf("%s: %v not a type name", label, obj) + continue + } + if !tname.IsAlias() { + t.Errorf("%s: %v: not marked as alias", label, tname) + continue + } + if got := tname.Type(); !types.Identical(got, test.typ) { + t.Errorf("%s: %v: got %v; want %v", label, tname, got, test.typ) + } + } +} + +func TestTypeAliases(t *testing.T) { + // parse and typecheck + fset1 := token.NewFileSet() + f, err := parser.ParseFile(fset1, "p.go", src, 0) + if err != nil { + t.Fatal(err) + } + var conf types.Config + pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil) + if err == nil { + // foo in undeclared in src; we should see an error + t.Fatal("invalid source type-checked without error") + } + if pkg1 == nil { + // despite incorrect src we should see a (partially) type-checked package + t.Fatal("nil package returned") + } + checkPkg(t, pkg1, "export") + + // export + exportdata, err := gcimporter.BExportData(fset1, pkg1) + if err != nil { + t.Fatal(err) + } + + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg1.Path()) + if err != nil { + t.Fatalf("BImportData(%s): %v", pkg1.Path(), err) + } + checkPkg(t, pkg2, "import") +} diff --git a/internal/go/gcimporter/bimport.go b/internal/go/gcimporter/bimport.go new file mode 100644 index 000000000..e9f73d14a --- /dev/null +++ b/internal/go/gcimporter/bimport.go @@ -0,0 +1,1039 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go. + +package gcimporter + +import ( + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +type importer struct { + imports map[string]*types.Package + data []byte + importpath string + buf []byte // for reading strings + version int // export format version + + // object lists + strList []string // in order of appearance + pathList []string // in order of appearance + pkgList []*types.Package // in order of appearance + typList []types.Type // in order of appearance + interfaceList []*types.Interface // for delayed completion only + trackAllTypes bool + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + fake fakeFileSet + + // debugging support + debugFormat bool + read int // bytes read +} + +// BImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + // catch panics and return them as errors + const currentVersion = 6 + version := -1 // unknown version + defer func() { + if e := recover(); e != nil { + // Return a (possibly nil or incomplete) package unchanged (see #16088). + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + p := importer{ + imports: imports, + data: data, + importpath: path, + version: version, + strList: []string{""}, // empty string is mapped to 0 + pathList: []string{""}, // empty string is mapped to 0 + fake: fakeFileSet{ + fset: fset, + files: make(map[string]*token.File), + }, + } + + // read version info + var versionstr string + if b := p.rawByte(); b == 'c' || b == 'd' { + // Go1.7 encoding; first byte encodes low-level + // encoding format (compact vs debug). + // For backward-compatibility only (avoid problems with + // old installed packages). Newly compiled packages use + // the extensible format string. + // TODO(gri) Remove this support eventually; after Go1.8. + if b == 'd' { + p.debugFormat = true + } + p.trackAllTypes = p.rawByte() == 'a' + p.posInfoFormat = p.int() != 0 + versionstr = p.string() + if versionstr == "v1" { + version = 0 + } + } else { + // Go1.8 extensible encoding + // read version string and extract version number (ignore anything after the version number) + versionstr = p.rawStringln(b) + if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { + if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { + version = v + } + } + } + p.version = version + + // read version specific flags - extend as necessary + switch p.version { + // case currentVersion: + // ... + // fallthrough + case currentVersion, 5, 4, 3, 2, 1: + p.debugFormat = p.rawStringln(p.rawByte()) == "debug" + p.trackAllTypes = p.int() != 0 + p.posInfoFormat = p.int() != 0 + case 0: + // Go1.7 encoding format - nothing to do here + default: + errorf("unknown bexport format version %d (%q)", p.version, versionstr) + } + + // --- generic export data --- + + // populate typList with predeclared "known" types + p.typList = append(p.typList, predeclared()...) + + // read package data + pkg = p.pkg() + + // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go) + objcount := 0 + for { + tag := p.tagOrIndex() + if tag == endTag { + break + } + p.obj(tag) + objcount++ + } + + // self-verification + if count := p.int(); count != objcount { + errorf("got %d objects; want %d", objcount, count) + } + + // ignore compiler-specific import data + + // complete interfaces + // TODO(gri) re-investigate if we still need to do this in a delayed fashion + for _, typ := range p.interfaceList { + typ.Complete() + } + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), p.pkgList[1:]...) + sort.Sort(byPath(list)) + pkg.SetImports(list) + + // package was imported completely and without errors + pkg.MarkComplete() + + return p.read, pkg, nil +} + +func errorf(format string, args ...interface{}) { + panic(fmt.Sprintf(format, args...)) +} + +func (p *importer) pkg() *types.Package { + // if the package was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.pkgList[i] + } + + // otherwise, i is the package tag (< 0) + if i != packageTag { + errorf("unexpected package tag %d version %d", i, p.version) + } + + // read package data + name := p.string() + var path string + if p.version >= 5 { + path = p.path() + } else { + path = p.string() + } + if p.version >= 6 { + p.int() // package height; unused by go/types + } + + // we should never see an empty package name + if name == "" { + errorf("empty package name in import") + } + + // an empty path denotes the package we are currently importing; + // it must be the first package we see + if (path == "") != (len(p.pkgList) == 0) { + errorf("package path %q for pkg index %d", path, len(p.pkgList)) + } + + // if the package was imported before, use that one; otherwise create a new one + if path == "" { + path = p.importpath + } + pkg := p.imports[path] + if pkg == nil { + pkg = types.NewPackage(path, name) + p.imports[path] = pkg + } else if pkg.Name() != name { + errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path) + } + p.pkgList = append(p.pkgList, pkg) + + return pkg +} + +// objTag returns the tag value for each object kind. +func objTag(obj types.Object) int { + switch obj.(type) { + case *types.Const: + return constTag + case *types.TypeName: + return typeTag + case *types.Var: + return varTag + case *types.Func: + return funcTag + default: + errorf("unexpected object: %v (%T)", obj, obj) // panics + panic("unreachable") + } +} + +func sameObj(a, b types.Object) bool { + // Because unnamed types are not canonicalized, we cannot simply compare types for + // (pointer) identity. + // Ideally we'd check equality of constant values as well, but this is good enough. + return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type()) +} + +func (p *importer) declare(obj types.Object) { + pkg := obj.Pkg() + if alt := pkg.Scope().Insert(obj); alt != nil { + // This can only trigger if we import a (non-type) object a second time. + // Excluding type aliases, this cannot happen because 1) we only import a package + // once; and b) we ignore compiler-specific export data which may contain + // functions whose inlined function bodies refer to other functions that + // were already imported. + // However, type aliases require reexporting the original type, so we need + // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go, + // method importer.obj, switch case importing functions). + // TODO(gri) review/update this comment once the gc compiler handles type aliases. + if !sameObj(obj, alt) { + errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt) + } + } +} + +func (p *importer) obj(tag int) { + switch tag { + case constTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + val := p.value() + p.declare(types.NewConst(pos, pkg, name, typ, val)) + + case aliasTag: + // TODO(gri) verify type alias hookup is correct + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + p.declare(types.NewTypeName(pos, pkg, name, typ)) + + case typeTag: + p.typ(nil, nil) + + case varTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + p.declare(types.NewVar(pos, pkg, name, typ)) + + case funcTag: + pos := p.pos() + pkg, name := p.qualifiedName() + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(nil, params, result, isddd) + p.declare(types.NewFunc(pos, pkg, name, sig)) + + default: + errorf("unexpected object tag %d", tag) + } +} + +const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go + +func (p *importer) pos() token.Pos { + if !p.posInfoFormat { + return token.NoPos + } + + file := p.prevFile + line := p.prevLine + delta := p.int() + line += delta + if p.version >= 5 { + if delta == deltaNewFile { + if n := p.int(); n >= 0 { + // file changed + file = p.path() + line = n + } + } + } else { + if delta == 0 { + if n := p.int(); n >= 0 { + // file changed + file = p.prevFile[:n] + p.string() + line = p.int() + } + } + } + p.prevFile = file + p.prevLine = line + + return p.fake.pos(file, line, 0) +} + +// Synthesize a token.Pos +type fakeFileSet struct { + fset *token.FileSet + files map[string]*token.File +} + +func (s *fakeFileSet) pos(file string, line, column int) token.Pos { + // TODO(mdempsky): Make use of column. + + // Since we don't know the set of needed file positions, we + // reserve maxlines positions per file. + const maxlines = 64 * 1024 + f := s.files[file] + if f == nil { + f = s.fset.AddFile(file, -1, maxlines) + s.files[file] = f + // Allocate the fake linebreak indices on first use. + // TODO(adonovan): opt: save ~512KB using a more complex scheme? + fakeLinesOnce.Do(func() { + fakeLines = make([]int, maxlines) + for i := range fakeLines { + fakeLines[i] = i + } + }) + f.SetLines(fakeLines) + } + + if line > maxlines { + line = 1 + } + + // Treat the file as if it contained only newlines + // and column=1: use the line number as the offset. + return f.Pos(line - 1) +} + +var ( + fakeLines []int + fakeLinesOnce sync.Once +) + +func (p *importer) qualifiedName() (pkg *types.Package, name string) { + name = p.string() + pkg = p.pkg() + return +} + +func (p *importer) record(t types.Type) { + p.typList = append(p.typList, t) +} + +// A dddSlice is a types.Type representing ...T parameters. +// It only appears for parameter types and does not escape +// the importer. +type dddSlice struct { + elem types.Type +} + +func (t *dddSlice) Underlying() types.Type { return t } +func (t *dddSlice) String() string { return "..." + t.elem.String() } + +// parent is the package which declared the type; parent == nil means +// the package currently imported. The parent package is needed for +// exported struct fields and interface methods which don't contain +// explicit package information in the export data. +// +// A non-nil tname is used as the "owner" of the result type; i.e., +// the result type is the underlying type of tname. tname is used +// to give interface methods a named receiver type where possible. +func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type { + // if the type was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.typList[i] + } + + // otherwise, i is the type tag (< 0) + switch i { + case namedTag: + // read type object + pos := p.pos() + parent, name := p.qualifiedName() + scope := parent.Scope() + obj := scope.Lookup(name) + + // if the object doesn't exist yet, create and insert it + if obj == nil { + obj = types.NewTypeName(pos, parent, name, nil) + scope.Insert(obj) + } + + if _, ok := obj.(*types.TypeName); !ok { + errorf("pkg = %s, name = %s => %s", parent, name, obj) + } + + // associate new named type with obj if it doesn't exist yet + t0 := types.NewNamed(obj.(*types.TypeName), nil, nil) + + // but record the existing type, if any + tname := obj.Type().(*types.Named) // tname is either t0 or the existing type + p.record(tname) + + // read underlying type + t0.SetUnderlying(p.typ(parent, t0)) + + // interfaces don't have associated methods + if types.IsInterface(t0) { + return tname + } + + // read associated methods + for i := p.int(); i > 0; i-- { + // TODO(gri) replace this with something closer to fieldName + pos := p.pos() + name := p.string() + if !exported(name) { + p.pkg() + } + + recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver? + params, isddd := p.paramList() + result, _ := p.paramList() + p.int() // go:nointerface pragma - discarded + + sig := types.NewSignature(recv.At(0), params, result, isddd) + t0.AddMethod(types.NewFunc(pos, parent, name, sig)) + } + + return tname + + case arrayTag: + t := new(types.Array) + if p.trackAllTypes { + p.record(t) + } + + n := p.int64() + *t = *types.NewArray(p.typ(parent, nil), n) + return t + + case sliceTag: + t := new(types.Slice) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewSlice(p.typ(parent, nil)) + return t + + case dddTag: + t := new(dddSlice) + if p.trackAllTypes { + p.record(t) + } + + t.elem = p.typ(parent, nil) + return t + + case structTag: + t := new(types.Struct) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewStruct(p.fieldList(parent)) + return t + + case pointerTag: + t := new(types.Pointer) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewPointer(p.typ(parent, nil)) + return t + + case signatureTag: + t := new(types.Signature) + if p.trackAllTypes { + p.record(t) + } + + params, isddd := p.paramList() + result, _ := p.paramList() + *t = *types.NewSignature(nil, params, result, isddd) + return t + + case interfaceTag: + // Create a dummy entry in the type list. This is safe because we + // cannot expect the interface type to appear in a cycle, as any + // such cycle must contain a named type which would have been + // first defined earlier. + // TODO(gri) Is this still true now that we have type aliases? + // See issue #23225. + n := len(p.typList) + if p.trackAllTypes { + p.record(nil) + } + + var embeddeds []types.Type + for n := p.int(); n > 0; n-- { + p.pos() + embeddeds = append(embeddeds, p.typ(parent, nil)) + } + + t := newInterface(p.methodList(parent, tname), embeddeds) + p.interfaceList = append(p.interfaceList, t) + if p.trackAllTypes { + p.typList[n] = t + } + return t + + case mapTag: + t := new(types.Map) + if p.trackAllTypes { + p.record(t) + } + + key := p.typ(parent, nil) + val := p.typ(parent, nil) + *t = *types.NewMap(key, val) + return t + + case chanTag: + t := new(types.Chan) + if p.trackAllTypes { + p.record(t) + } + + dir := chanDir(p.int()) + val := p.typ(parent, nil) + *t = *types.NewChan(dir, val) + return t + + default: + errorf("unexpected type tag %d", i) // panics + panic("unreachable") + } +} + +func chanDir(d int) types.ChanDir { + // tag values must match the constants in cmd/compile/internal/gc/go.go + switch d { + case 1 /* Crecv */ : + return types.RecvOnly + case 2 /* Csend */ : + return types.SendOnly + case 3 /* Cboth */ : + return types.SendRecv + default: + errorf("unexpected channel dir %d", d) + return 0 + } +} + +func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) { + if n := p.int(); n > 0 { + fields = make([]*types.Var, n) + tags = make([]string, n) + for i := range fields { + fields[i], tags[i] = p.field(parent) + } + } + return +} + +func (p *importer) field(parent *types.Package) (*types.Var, string) { + pos := p.pos() + pkg, name, alias := p.fieldName(parent) + typ := p.typ(parent, nil) + tag := p.string() + + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + errorf("named base type expected") + } + anonymous = true + } else if alias { + // anonymous field: we have an explicit name because it's an alias + anonymous = true + } + + return types.NewField(pos, pkg, name, typ, anonymous), tag +} + +func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) { + if n := p.int(); n > 0 { + methods = make([]*types.Func, n) + for i := range methods { + methods[i] = p.method(parent, baseType) + } + } + return +} + +func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func { + pos := p.pos() + pkg, name, _ := p.fieldName(parent) + // If we don't have a baseType, use a nil receiver. + // A receiver using the actual interface type (which + // we don't know yet) will be filled in when we call + // types.Interface.Complete. + var recv *types.Var + if baseType != nil { + recv = types.NewVar(token.NoPos, parent, "", baseType) + } + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(recv, params, result, isddd) + return types.NewFunc(pos, pkg, name, sig) +} + +func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) { + name = p.string() + pkg = parent + if pkg == nil { + // use the imported package instead + pkg = p.pkgList[0] + } + if p.version == 0 && name == "_" { + // version 0 didn't export a package for _ fields + return + } + switch name { + case "": + // 1) field name matches base type name and is exported: nothing to do + case "?": + // 2) field name matches base type name and is not exported: need package + name = "" + pkg = p.pkg() + case "@": + // 3) field name doesn't match type name (alias) + name = p.string() + alias = true + fallthrough + default: + if !exported(name) { + pkg = p.pkg() + } + } + return +} + +func (p *importer) paramList() (*types.Tuple, bool) { + n := p.int() + if n == 0 { + return nil, false + } + // negative length indicates unnamed parameters + named := true + if n < 0 { + n = -n + named = false + } + // n > 0 + params := make([]*types.Var, n) + isddd := false + for i := range params { + params[i], isddd = p.param(named) + } + return types.NewTuple(params...), isddd +} + +func (p *importer) param(named bool) (*types.Var, bool) { + t := p.typ(nil, nil) + td, isddd := t.(*dddSlice) + if isddd { + t = types.NewSlice(td.elem) + } + + var pkg *types.Package + var name string + if named { + name = p.string() + if name == "" { + errorf("expected named parameter") + } + if name != "_" { + pkg = p.pkg() + } + if i := strings.Index(name, "·"); i > 0 { + name = name[:i] // cut off gc-specific parameter numbering + } + } + + // read and discard compiler-specific info + p.string() + + return types.NewVar(token.NoPos, pkg, name, t), isddd +} + +func exported(name string) bool { + ch, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(ch) +} + +func (p *importer) value() constant.Value { + switch tag := p.tagOrIndex(); tag { + case falseTag: + return constant.MakeBool(false) + case trueTag: + return constant.MakeBool(true) + case int64Tag: + return constant.MakeInt64(p.int64()) + case floatTag: + return p.float() + case complexTag: + re := p.float() + im := p.float() + return constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + case stringTag: + return constant.MakeString(p.string()) + case unknownTag: + return constant.MakeUnknown() + default: + errorf("unexpected value tag %d", tag) // panics + panic("unreachable") + } +} + +func (p *importer) float() constant.Value { + sign := p.int() + if sign == 0 { + return constant.MakeInt64(0) + } + + exp := p.int() + mant := []byte(p.string()) // big endian + + // remove leading 0's if any + for len(mant) > 0 && mant[0] == 0 { + mant = mant[1:] + } + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 { + mant[i], mant[j] = mant[j], mant[i] + } + + // adjust exponent (constant.MakeFromBytes creates an integer value, + // but mant represents the mantissa bits such that 0.5 <= mant < 1.0) + exp -= len(mant) << 3 + if len(mant) > 0 { + for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 { + exp++ + } + } + + x := constant.MakeFromBytes(mant) + switch { + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + } + + if sign < 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +// ---------------------------------------------------------------------------- +// Low-level decoders + +func (p *importer) tagOrIndex() int { + if p.debugFormat { + p.marker('t') + } + + return int(p.rawInt64()) +} + +func (p *importer) int() int { + x := p.int64() + if int64(int(x)) != x { + errorf("exported integer too large") + } + return int(x) +} + +func (p *importer) int64() int64 { + if p.debugFormat { + p.marker('i') + } + + return p.rawInt64() +} + +func (p *importer) path() string { + if p.debugFormat { + p.marker('p') + } + // if the path was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.pathList[i] + } + // otherwise, i is the negative path length (< 0) + a := make([]string, -i) + for n := range a { + a[n] = p.string() + } + s := strings.Join(a, "/") + p.pathList = append(p.pathList, s) + return s +} + +func (p *importer) string() string { + if p.debugFormat { + p.marker('s') + } + // if the string was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.strList[i] + } + // otherwise, i is the negative string length (< 0) + if n := int(-i); n <= cap(p.buf) { + p.buf = p.buf[:n] + } else { + p.buf = make([]byte, n) + } + for i := range p.buf { + p.buf[i] = p.rawByte() + } + s := string(p.buf) + p.strList = append(p.strList, s) + return s +} + +func (p *importer) marker(want byte) { + if got := p.rawByte(); got != want { + errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read) + } + + pos := p.read + if n := int(p.rawInt64()); n != pos { + errorf("incorrect position: got %d; want %d", n, pos) + } +} + +// rawInt64 should only be used by low-level decoders. +func (p *importer) rawInt64() int64 { + i, err := binary.ReadVarint(p) + if err != nil { + errorf("read error: %v", err) + } + return i +} + +// rawStringln should only be used to read the initial version string. +func (p *importer) rawStringln(b byte) string { + p.buf = p.buf[:0] + for b != '\n' { + p.buf = append(p.buf, b) + b = p.rawByte() + } + return string(p.buf) +} + +// needed for binary.ReadVarint in rawInt64 +func (p *importer) ReadByte() (byte, error) { + return p.rawByte(), nil +} + +// byte is the bottleneck interface for reading p.data. +// It unescapes '|' 'S' to '$' and '|' '|' to '|'. +// rawByte should only be used by low-level decoders. +func (p *importer) rawByte() byte { + b := p.data[0] + r := 1 + if b == '|' { + b = p.data[1] + r = 2 + switch b { + case 'S': + b = '$' + case '|': + // nothing to do + default: + errorf("unexpected escape sequence in export data") + } + } + p.data = p.data[r:] + p.read += r + return b + +} + +// ---------------------------------------------------------------------------- +// Export format + +// Tags. Must be < 0. +const ( + // Objects + packageTag = -(iota + 1) + constTag + typeTag + varTag + funcTag + endTag + + // Types + namedTag + arrayTag + sliceTag + dddTag + structTag + pointerTag + signatureTag + interfaceTag + mapTag + chanTag + + // Values + falseTag + trueTag + int64Tag + floatTag + fractionTag // not used by gc + complexTag + stringTag + nilTag // only used by gc (appears in exported inlined function bodies) + unknownTag // not used by gc (only appears in packages with errors) + + // Type aliases + aliasTag +) + +var predeclOnce sync.Once +var predecl []types.Type // initialized lazily + +func predeclared() []types.Type { + predeclOnce.Do(func() { + // initialize lazily to be sure that all + // elements have been initialized before + predecl = []types.Type{ // basic types + types.Typ[types.Bool], + types.Typ[types.Int], + types.Typ[types.Int8], + types.Typ[types.Int16], + types.Typ[types.Int32], + types.Typ[types.Int64], + types.Typ[types.Uint], + types.Typ[types.Uint8], + types.Typ[types.Uint16], + types.Typ[types.Uint32], + types.Typ[types.Uint64], + types.Typ[types.Uintptr], + types.Typ[types.Float32], + types.Typ[types.Float64], + types.Typ[types.Complex64], + types.Typ[types.Complex128], + types.Typ[types.String], + + // basic type aliases + types.Universe.Lookup("byte").Type(), + types.Universe.Lookup("rune").Type(), + + // error + types.Universe.Lookup("error").Type(), + + // untyped types + types.Typ[types.UntypedBool], + types.Typ[types.UntypedInt], + types.Typ[types.UntypedRune], + types.Typ[types.UntypedFloat], + types.Typ[types.UntypedComplex], + types.Typ[types.UntypedString], + types.Typ[types.UntypedNil], + + // package unsafe + types.Typ[types.UnsafePointer], + + // invalid type + types.Typ[types.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + anyType{}, + } + }) + return predecl +} + +type anyType struct{} + +func (t anyType) Underlying() types.Type { return t } +func (t anyType) String() string { return "any" } diff --git a/internal/go/gcimporter/exportdata.go b/internal/go/gcimporter/exportdata.go new file mode 100644 index 000000000..f33dc5613 --- /dev/null +++ b/internal/go/gcimporter/exportdata.go @@ -0,0 +1,93 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go. + +// This file implements FindExportData. + +package gcimporter + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" +) + +func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { + // See $GOROOT/include/ar.h. + hdr := make([]byte, 16+12+6+6+8+10+2) + _, err = io.ReadFull(r, hdr) + if err != nil { + return + } + // leave for debugging + if false { + fmt.Printf("header: %s", hdr) + } + s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) + size, err = strconv.Atoi(s) + if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { + err = fmt.Errorf("invalid archive header") + return + } + name = strings.TrimSpace(string(hdr[:16])) + return +} + +// FindExportData positions the reader r at the beginning of the +// export data section of an underlying GC-created object/archive +// file by reading from it. The reader must be positioned at the +// start of the file before calling this function. The hdr result +// is the string before the export data, either "$$" or "$$B". +// +func FindExportData(r *bufio.Reader) (hdr string, err error) { + // Read first line to make sure this is an object file. + line, err := r.ReadSlice('\n') + if err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + + if string(line) == "!\n" { + // Archive file. Scan to __.PKGDEF. + var name string + if name, _, err = readGopackHeader(r); err != nil { + return + } + + // First entry should be __.PKGDEF. + if name != "__.PKGDEF" { + err = fmt.Errorf("go archive is missing __.PKGDEF") + return + } + + // Read first line of __.PKGDEF data, so that line + // is once again the first line of the input. + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + + // Now at __.PKGDEF in archive or still at beginning of file. + // Either way, line should begin with "go object ". + if !strings.HasPrefix(string(line), "go object ") { + err = fmt.Errorf("not a Go object file") + return + } + + // Skip over object header to export data. + // Begins after first line starting with $$. + for line[0] != '$' { + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + hdr = string(line) + + return +} diff --git a/internal/go/gcimporter/gcimporter.go b/internal/go/gcimporter/gcimporter.go new file mode 100644 index 000000000..981320890 --- /dev/null +++ b/internal/go/gcimporter/gcimporter.go @@ -0,0 +1,1078 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go, +// but it also contains the original source-based importer code for Go1.6. +// Once we stop supporting 1.6, we can remove that code. + +// Package gcimporter provides various functions for reading +// gc-generated object files that can be used to implement the +// Importer interface defined by the Go 1.5 standard library package. +package gcimporter + +import ( + "bufio" + "errors" + "fmt" + "go/build" + "go/constant" + "go/token" + "go/types" + "io" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "text/scanner" +) + +// debugging/development support +const debug = false + +var pkgExts = [...]string{".a", ".o"} + +// FindPkg returns the filename and unique package id for an import +// path based on package information provided by build.Import (using +// the build.Default build.Context). A relative srcDir is interpreted +// relative to the current working directory. +// If no file was found, an empty filename is returned. +// +func FindPkg(path, srcDir string) (filename, id string) { + if path == "" { + return + } + + var noext string + switch { + default: + // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x" + // Don't require the source files to be present. + if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282 + srcDir = abs + } + bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary) + if bp.PkgObj == "" { + id = path // make sure we have an id to print in error message + return + } + noext = strings.TrimSuffix(bp.PkgObj, ".a") + id = bp.ImportPath + + case build.IsLocalImport(path): + // "./x" -> "/this/directory/x.ext", "/this/directory/x" + noext = filepath.Join(srcDir, path) + id = noext + + case filepath.IsAbs(path): + // for completeness only - go/build.Import + // does not support absolute imports + // "/x" -> "/x.ext", "/x" + noext = path + id = path + } + + if false { // for debugging + if path != id { + fmt.Printf("%s -> %s\n", path, id) + } + } + + // try extensions + for _, ext := range pkgExts { + filename = noext + ext + if f, err := os.Stat(filename); err == nil && !f.IsDir() { + return + } + } + + filename = "" // not found + return +} + +// ImportData imports a package by reading the gc-generated export data, +// adds the corresponding package object to the packages map indexed by id, +// and returns the object. +// +// The packages map must contains all packages already imported. The data +// reader position must be the beginning of the export data section. The +// filename is only used in error messages. +// +// If packages[id] contains the completely imported package, that package +// can be used directly, and there is no need to call this function (but +// there is also no harm but for extra time used). +// +func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) { + // support for parser error handling + defer func() { + switch r := recover().(type) { + case nil: + // nothing to do + case importError: + err = r + default: + panic(r) // internal error + } + }() + + var p parser + p.init(filename, id, data, packages) + pkg = p.parseExport() + + return +} + +// Import imports a gc-generated package given its import path and srcDir, adds +// the corresponding package object to the packages map, and returns the object. +// The packages map must contain all packages already imported. +// +func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) { + var rc io.ReadCloser + var filename, id string + if lookup != nil { + // With custom lookup specified, assume that caller has + // converted path to a canonical import path for use in the map. + if path == "unsafe" { + return types.Unsafe, nil + } + id = path + + // No need to re-import if the package was imported completely before. + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + f, err := lookup(path) + if err != nil { + return nil, err + } + rc = f + } else { + filename, id = FindPkg(path, srcDir) + if filename == "" { + if path == "unsafe" { + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %q", id) + } + + // no need to re-import if the package was imported completely before + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + if err != nil { + // add file name to error + err = fmt.Errorf("%s: %v", filename, err) + } + }() + rc = f + } + defer rc.Close() + + var hdr string + buf := bufio.NewReader(rc) + if hdr, err = FindExportData(buf); err != nil { + return + } + + switch hdr { + case "$$\n": + // Work-around if we don't have a filename; happens only if lookup != nil. + // Either way, the filename is only needed for importer error messages, so + // this is fine. + if filename == "" { + filename = path + } + return ImportData(packages, filename, id, buf) + + case "$$B\n": + var data []byte + data, err = ioutil.ReadAll(buf) + if err != nil { + break + } + + // TODO(gri): allow clients of go/importer to provide a FileSet. + // Or, define a new standard go/types/gcexportdata package. + fset := token.NewFileSet() + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + if len(data) > 0 && data[0] == 'i' { + _, pkg, err = IImportData(fset, packages, data[1:], id) + } else { + _, pkg, err = BImportData(fset, packages, data, id) + } + + default: + err = fmt.Errorf("unknown export data header: %q", hdr) + } + + return +} + +// ---------------------------------------------------------------------------- +// Parser + +// TODO(gri) Imported objects don't have position information. +// Ideally use the debug table line info; alternatively +// create some fake position (or the position of the +// import). That way error messages referring to imported +// objects can print meaningful information. + +// parser parses the exports inside a gc compiler-produced +// object/archive file and populates its scope with the results. +type parser struct { + scanner scanner.Scanner + tok rune // current token + lit string // literal string; only valid for Ident, Int, String tokens + id string // package id of imported package + sharedPkgs map[string]*types.Package // package id -> package object (across importer) + localPkgs map[string]*types.Package // package id -> package object (just this package) +} + +func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) { + p.scanner.Init(src) + p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } + p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments + p.scanner.Whitespace = 1<<'\t' | 1<<' ' + p.scanner.Filename = filename // for good error messages + p.next() + p.id = id + p.sharedPkgs = packages + if debug { + // check consistency of packages map + for _, pkg := range packages { + if pkg.Name() == "" { + fmt.Printf("no package name for %s\n", pkg.Path()) + } + } + } +} + +func (p *parser) next() { + p.tok = p.scanner.Scan() + switch p.tok { + case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·': + p.lit = p.scanner.TokenText() + default: + p.lit = "" + } + if debug { + fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit) + } +} + +func declTypeName(pkg *types.Package, name string) *types.TypeName { + scope := pkg.Scope() + if obj := scope.Lookup(name); obj != nil { + return obj.(*types.TypeName) + } + obj := types.NewTypeName(token.NoPos, pkg, name, nil) + // a named type may be referred to before the underlying type + // is known - set it up + types.NewNamed(obj, nil, nil) + scope.Insert(obj) + return obj +} + +// ---------------------------------------------------------------------------- +// Error handling + +// Internal errors are boxed as importErrors. +type importError struct { + pos scanner.Position + err error +} + +func (e importError) Error() string { + return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err) +} + +func (p *parser) error(err interface{}) { + if s, ok := err.(string); ok { + err = errors.New(s) + } + // panic with a runtime.Error if err is not an error + panic(importError{p.scanner.Pos(), err.(error)}) +} + +func (p *parser) errorf(format string, args ...interface{}) { + p.error(fmt.Sprintf(format, args...)) +} + +func (p *parser) expect(tok rune) string { + lit := p.lit + if p.tok != tok { + p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit) + } + p.next() + return lit +} + +func (p *parser) expectSpecial(tok string) { + sep := 'x' // not white space + i := 0 + for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' { + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + i++ + } + if i < len(tok) { + p.errorf("expected %q, got %q", tok, tok[0:i]) + } +} + +func (p *parser) expectKeyword(keyword string) { + lit := p.expect(scanner.Ident) + if lit != keyword { + p.errorf("expected keyword %s, got %q", keyword, lit) + } +} + +// ---------------------------------------------------------------------------- +// Qualified and unqualified names + +// PackageId = string_lit . +// +func (p *parser) parsePackageID() string { + id, err := strconv.Unquote(p.expect(scanner.String)) + if err != nil { + p.error(err) + } + // id == "" stands for the imported package id + // (only known at time of package installation) + if id == "" { + id = p.id + } + return id +} + +// PackageName = ident . +// +func (p *parser) parsePackageName() string { + return p.expect(scanner.Ident) +} + +// dotIdentifier = ( ident | '·' ) { ident | int | '·' } . +func (p *parser) parseDotIdent() string { + ident := "" + if p.tok != scanner.Int { + sep := 'x' // not white space + for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' { + ident += p.lit + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + } + } + if ident == "" { + p.expect(scanner.Ident) // use expect() for error handling + } + return ident +} + +// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) . +// +func (p *parser) parseQualifiedName() (id, name string) { + p.expect('@') + id = p.parsePackageID() + p.expect('.') + // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields. + if p.tok == '?' { + p.next() + } else { + name = p.parseDotIdent() + } + return +} + +// getPkg returns the package for a given id. If the package is +// not found, create the package and add it to the p.localPkgs +// and p.sharedPkgs maps. name is the (expected) name of the +// package. If name == "", the package name is expected to be +// set later via an import clause in the export data. +// +// id identifies a package, usually by a canonical package path like +// "encoding/json" but possibly by a non-canonical import path like +// "./json". +// +func (p *parser) getPkg(id, name string) *types.Package { + // package unsafe is not in the packages maps - handle explicitly + if id == "unsafe" { + return types.Unsafe + } + + pkg := p.localPkgs[id] + if pkg == nil { + // first import of id from this package + pkg = p.sharedPkgs[id] + if pkg == nil { + // first import of id by this importer; + // add (possibly unnamed) pkg to shared packages + pkg = types.NewPackage(id, name) + p.sharedPkgs[id] = pkg + } + // add (possibly unnamed) pkg to local packages + if p.localPkgs == nil { + p.localPkgs = make(map[string]*types.Package) + } + p.localPkgs[id] = pkg + } else if name != "" { + // package exists already and we have an expected package name; + // make sure names match or set package name if necessary + if pname := pkg.Name(); pname == "" { + pkg.SetName(name) + } else if pname != name { + p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name) + } + } + return pkg +} + +// parseExportedName is like parseQualifiedName, but +// the package id is resolved to an imported *types.Package. +// +func (p *parser) parseExportedName() (pkg *types.Package, name string) { + id, name := p.parseQualifiedName() + pkg = p.getPkg(id, "") + return +} + +// ---------------------------------------------------------------------------- +// Types + +// BasicType = identifier . +// +func (p *parser) parseBasicType() types.Type { + id := p.expect(scanner.Ident) + obj := types.Universe.Lookup(id) + if obj, ok := obj.(*types.TypeName); ok { + return obj.Type() + } + p.errorf("not a basic type: %s", id) + return nil +} + +// ArrayType = "[" int_lit "]" Type . +// +func (p *parser) parseArrayType(parent *types.Package) types.Type { + // "[" already consumed and lookahead known not to be "]" + lit := p.expect(scanner.Int) + p.expect(']') + elem := p.parseType(parent) + n, err := strconv.ParseInt(lit, 10, 64) + if err != nil { + p.error(err) + } + return types.NewArray(elem, n) +} + +// MapType = "map" "[" Type "]" Type . +// +func (p *parser) parseMapType(parent *types.Package) types.Type { + p.expectKeyword("map") + p.expect('[') + key := p.parseType(parent) + p.expect(']') + elem := p.parseType(parent) + return types.NewMap(key, elem) +} + +// Name = identifier | "?" | QualifiedName . +// +// For unqualified and anonymous names, the returned package is the parent +// package unless parent == nil, in which case the returned package is the +// package being imported. (The parent package is not nil if the the name +// is an unqualified struct field or interface method name belonging to a +// type declared in another package.) +// +// For qualified names, the returned package is nil (and not created if +// it doesn't exist yet) unless materializePkg is set (which creates an +// unnamed package with valid package path). In the latter case, a +// subsequent import clause is expected to provide a name for the package. +// +func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) { + pkg = parent + if pkg == nil { + pkg = p.sharedPkgs[p.id] + } + switch p.tok { + case scanner.Ident: + name = p.lit + p.next() + case '?': + // anonymous + p.next() + case '@': + // exported name prefixed with package path + pkg = nil + var id string + id, name = p.parseQualifiedName() + if materializePkg { + pkg = p.getPkg(id, "") + } + default: + p.error("name expected") + } + return +} + +func deref(typ types.Type) types.Type { + if p, _ := typ.(*types.Pointer); p != nil { + return p.Elem() + } + return typ +} + +// Field = Name Type [ string_lit ] . +// +func (p *parser) parseField(parent *types.Package) (*types.Var, string) { + pkg, name := p.parseName(parent, true) + + if name == "_" { + // Blank fields should be package-qualified because they + // are unexported identifiers, but gc does not qualify them. + // Assuming that the ident belongs to the current package + // causes types to change during re-exporting, leading + // to spurious "can't assign A to B" errors from go/types. + // As a workaround, pretend all blank fields belong + // to the same unique dummy package. + const blankpkg = "<_>" + pkg = p.getPkg(blankpkg, blankpkg) + } + + typ := p.parseType(parent) + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + p.errorf("anonymous field expected") + } + anonymous = true + } + tag := "" + if p.tok == scanner.String { + s := p.expect(scanner.String) + var err error + tag, err = strconv.Unquote(s) + if err != nil { + p.errorf("invalid struct tag %s: %s", s, err) + } + } + return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag +} + +// StructType = "struct" "{" [ FieldList ] "}" . +// FieldList = Field { ";" Field } . +// +func (p *parser) parseStructType(parent *types.Package) types.Type { + var fields []*types.Var + var tags []string + + p.expectKeyword("struct") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + fld, tag := p.parseField(parent) + if tag != "" && tags == nil { + tags = make([]string, i) + } + if tags != nil { + tags = append(tags, tag) + } + fields = append(fields, fld) + } + p.expect('}') + + return types.NewStruct(fields, tags) +} + +// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] . +// +func (p *parser) parseParameter() (par *types.Var, isVariadic bool) { + _, name := p.parseName(nil, false) + // remove gc-specific parameter numbering + if i := strings.Index(name, "·"); i >= 0 { + name = name[:i] + } + if p.tok == '.' { + p.expectSpecial("...") + isVariadic = true + } + typ := p.parseType(nil) + if isVariadic { + typ = types.NewSlice(typ) + } + // ignore argument tag (e.g. "noescape") + if p.tok == scanner.String { + p.next() + } + // TODO(gri) should we provide a package? + par = types.NewVar(token.NoPos, nil, name, typ) + return +} + +// Parameters = "(" [ ParameterList ] ")" . +// ParameterList = { Parameter "," } Parameter . +// +func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) { + p.expect('(') + for p.tok != ')' && p.tok != scanner.EOF { + if len(list) > 0 { + p.expect(',') + } + par, variadic := p.parseParameter() + list = append(list, par) + if variadic { + if isVariadic { + p.error("... not on final argument") + } + isVariadic = true + } + } + p.expect(')') + + return +} + +// Signature = Parameters [ Result ] . +// Result = Type | Parameters . +// +func (p *parser) parseSignature(recv *types.Var) *types.Signature { + params, isVariadic := p.parseParameters() + + // optional result type + var results []*types.Var + if p.tok == '(' { + var variadic bool + results, variadic = p.parseParameters() + if variadic { + p.error("... not permitted on result type") + } + } + + return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic) +} + +// InterfaceType = "interface" "{" [ MethodList ] "}" . +// MethodList = Method { ";" Method } . +// Method = Name Signature . +// +// The methods of embedded interfaces are always "inlined" +// by the compiler and thus embedded interfaces are never +// visible in the export data. +// +func (p *parser) parseInterfaceType(parent *types.Package) types.Type { + var methods []*types.Func + + p.expectKeyword("interface") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + pkg, name := p.parseName(parent, true) + sig := p.parseSignature(nil) + methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig)) + } + p.expect('}') + + // Complete requires the type's embedded interfaces to be fully defined, + // but we do not define any + return newInterface(methods, nil).Complete() +} + +// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . +// +func (p *parser) parseChanType(parent *types.Package) types.Type { + dir := types.SendRecv + if p.tok == scanner.Ident { + p.expectKeyword("chan") + if p.tok == '<' { + p.expectSpecial("<-") + dir = types.SendOnly + } + } else { + p.expectSpecial("<-") + p.expectKeyword("chan") + dir = types.RecvOnly + } + elem := p.parseType(parent) + return types.NewChan(dir, elem) +} + +// Type = +// BasicType | TypeName | ArrayType | SliceType | StructType | +// PointerType | FuncType | InterfaceType | MapType | ChanType | +// "(" Type ")" . +// +// BasicType = ident . +// TypeName = ExportedName . +// SliceType = "[" "]" Type . +// PointerType = "*" Type . +// FuncType = "func" Signature . +// +func (p *parser) parseType(parent *types.Package) types.Type { + switch p.tok { + case scanner.Ident: + switch p.lit { + default: + return p.parseBasicType() + case "struct": + return p.parseStructType(parent) + case "func": + // FuncType + p.next() + return p.parseSignature(nil) + case "interface": + return p.parseInterfaceType(parent) + case "map": + return p.parseMapType(parent) + case "chan": + return p.parseChanType(parent) + } + case '@': + // TypeName + pkg, name := p.parseExportedName() + return declTypeName(pkg, name).Type() + case '[': + p.next() // look ahead + if p.tok == ']' { + // SliceType + p.next() + return types.NewSlice(p.parseType(parent)) + } + return p.parseArrayType(parent) + case '*': + // PointerType + p.next() + return types.NewPointer(p.parseType(parent)) + case '<': + return p.parseChanType(parent) + case '(': + // "(" Type ")" + p.next() + typ := p.parseType(parent) + p.expect(')') + return typ + } + p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit) + return nil +} + +// ---------------------------------------------------------------------------- +// Declarations + +// ImportDecl = "import" PackageName PackageId . +// +func (p *parser) parseImportDecl() { + p.expectKeyword("import") + name := p.parsePackageName() + p.getPkg(p.parsePackageID(), name) +} + +// int_lit = [ "+" | "-" ] { "0" ... "9" } . +// +func (p *parser) parseInt() string { + s := "" + switch p.tok { + case '-': + s = "-" + p.next() + case '+': + p.next() + } + return s + p.expect(scanner.Int) +} + +// number = int_lit [ "p" int_lit ] . +// +func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) { + // mantissa + mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0) + if mant == nil { + panic("invalid mantissa") + } + + if p.lit == "p" { + // exponent (base 2) + p.next() + exp, err := strconv.ParseInt(p.parseInt(), 10, 0) + if err != nil { + p.error(err) + } + if exp < 0 { + denom := constant.MakeInt64(1) + denom = constant.Shift(denom, token.SHL, uint(-exp)) + typ = types.Typ[types.UntypedFloat] + val = constant.BinaryOp(mant, token.QUO, denom) + return + } + if exp > 0 { + mant = constant.Shift(mant, token.SHL, uint(exp)) + } + typ = types.Typ[types.UntypedFloat] + val = mant + return + } + + typ = types.Typ[types.UntypedInt] + val = mant + return +} + +// ConstDecl = "const" ExportedName [ Type ] "=" Literal . +// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit . +// bool_lit = "true" | "false" . +// complex_lit = "(" float_lit "+" float_lit "i" ")" . +// rune_lit = "(" int_lit "+" int_lit ")" . +// string_lit = `"` { unicode_char } `"` . +// +func (p *parser) parseConstDecl() { + p.expectKeyword("const") + pkg, name := p.parseExportedName() + + var typ0 types.Type + if p.tok != '=' { + // constant types are never structured - no need for parent type + typ0 = p.parseType(nil) + } + + p.expect('=') + var typ types.Type + var val constant.Value + switch p.tok { + case scanner.Ident: + // bool_lit + if p.lit != "true" && p.lit != "false" { + p.error("expected true or false") + } + typ = types.Typ[types.UntypedBool] + val = constant.MakeBool(p.lit == "true") + p.next() + + case '-', scanner.Int: + // int_lit + typ, val = p.parseNumber() + + case '(': + // complex_lit or rune_lit + p.next() + if p.tok == scanner.Char { + p.next() + p.expect('+') + typ = types.Typ[types.UntypedRune] + _, val = p.parseNumber() + p.expect(')') + break + } + _, re := p.parseNumber() + p.expect('+') + _, im := p.parseNumber() + p.expectKeyword("i") + p.expect(')') + typ = types.Typ[types.UntypedComplex] + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + case scanner.Char: + // rune_lit + typ = types.Typ[types.UntypedRune] + val = constant.MakeFromLiteral(p.lit, token.CHAR, 0) + p.next() + + case scanner.String: + // string_lit + typ = types.Typ[types.UntypedString] + val = constant.MakeFromLiteral(p.lit, token.STRING, 0) + p.next() + + default: + p.errorf("expected literal got %s", scanner.TokenString(p.tok)) + } + + if typ0 == nil { + typ0 = typ + } + + pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val)) +} + +// TypeDecl = "type" ExportedName Type . +// +func (p *parser) parseTypeDecl() { + p.expectKeyword("type") + pkg, name := p.parseExportedName() + obj := declTypeName(pkg, name) + + // The type object may have been imported before and thus already + // have a type associated with it. We still need to parse the type + // structure, but throw it away if the object already has a type. + // This ensures that all imports refer to the same type object for + // a given type declaration. + typ := p.parseType(pkg) + + if name := obj.Type().(*types.Named); name.Underlying() == nil { + name.SetUnderlying(typ) + } +} + +// VarDecl = "var" ExportedName Type . +// +func (p *parser) parseVarDecl() { + p.expectKeyword("var") + pkg, name := p.parseExportedName() + typ := p.parseType(pkg) + pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ)) +} + +// Func = Signature [ Body ] . +// Body = "{" ... "}" . +// +func (p *parser) parseFunc(recv *types.Var) *types.Signature { + sig := p.parseSignature(recv) + if p.tok == '{' { + p.next() + for i := 1; i > 0; p.next() { + switch p.tok { + case '{': + i++ + case '}': + i-- + } + } + } + return sig +} + +// MethodDecl = "func" Receiver Name Func . +// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" . +// +func (p *parser) parseMethodDecl() { + // "func" already consumed + p.expect('(') + recv, _ := p.parseParameter() // receiver + p.expect(')') + + // determine receiver base type object + base := deref(recv.Type()).(*types.Named) + + // parse method name, signature, and possibly inlined body + _, name := p.parseName(nil, false) + sig := p.parseFunc(recv) + + // methods always belong to the same package as the base type object + pkg := base.Obj().Pkg() + + // add method to type unless type was imported before + // and method exists already + // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small. + base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) +} + +// FuncDecl = "func" ExportedName Func . +// +func (p *parser) parseFuncDecl() { + // "func" already consumed + pkg, name := p.parseExportedName() + typ := p.parseFunc(nil) + pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ)) +} + +// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" . +// +func (p *parser) parseDecl() { + if p.tok == scanner.Ident { + switch p.lit { + case "import": + p.parseImportDecl() + case "const": + p.parseConstDecl() + case "type": + p.parseTypeDecl() + case "var": + p.parseVarDecl() + case "func": + p.next() // look ahead + if p.tok == '(' { + p.parseMethodDecl() + } else { + p.parseFuncDecl() + } + } + } + p.expect('\n') +} + +// ---------------------------------------------------------------------------- +// Export + +// Export = "PackageClause { Decl } "$$" . +// PackageClause = "package" PackageName [ "safe" ] "\n" . +// +func (p *parser) parseExport() *types.Package { + p.expectKeyword("package") + name := p.parsePackageName() + if p.tok == scanner.Ident && p.lit == "safe" { + // package was compiled with -u option - ignore + p.next() + } + p.expect('\n') + + pkg := p.getPkg(p.id, name) + + for p.tok != '$' && p.tok != scanner.EOF { + p.parseDecl() + } + + if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' { + // don't call next()/expect() since reading past the + // export data may cause scanner errors (e.g. NUL chars) + p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch) + } + + if n := p.scanner.ErrorCount; n != 0 { + p.errorf("expected no scanner errors, got %d", n) + } + + // Record all locally referenced packages as imports. + var imports []*types.Package + for id, pkg2 := range p.localPkgs { + if pkg2.Name() == "" { + p.errorf("%s package has no name", id) + } + if id == p.id { + continue // avoid self-edge + } + imports = append(imports, pkg2) + } + sort.Sort(byPath(imports)) + pkg.SetImports(imports) + + // package was imported completely and without errors + pkg.MarkComplete() + + return pkg +} + +type byPath []*types.Package + +func (a byPath) Len() int { return len(a) } +func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() } diff --git a/internal/go/gcimporter/gcimporter11_test.go b/internal/go/gcimporter/gcimporter11_test.go new file mode 100644 index 000000000..627300d08 --- /dev/null +++ b/internal/go/gcimporter/gcimporter11_test.go @@ -0,0 +1,129 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.11 + +package gcimporter + +import ( + "go/types" + "runtime" + "strings" + "testing" +) + +var importedObjectTests = []struct { + name string + want string +}{ + // non-interfaces + {"crypto.Hash", "type Hash uint"}, + {"go/ast.ObjKind", "type ObjKind int"}, + {"go/types.Qualifier", "type Qualifier func(*Package) string"}, + {"go/types.Comparable", "func Comparable(T Type) bool"}, + {"math.Pi", "const Pi untyped float"}, + {"math.Sin", "func Sin(x float64) float64"}, + {"go/ast.NotNilFilter", "func NotNilFilter(_ string, v reflect.Value) bool"}, + {"go/internal/gcimporter.FindPkg", "func FindPkg(path string, srcDir string) (filename string, id string)"}, + + // interfaces + {"context.Context", "type Context interface{Deadline() (deadline time.Time, ok bool); Done() <-chan struct{}; Err() error; Value(key interface{}) interface{}}"}, + {"crypto.Decrypter", "type Decrypter interface{Decrypt(rand io.Reader, msg []byte, opts DecrypterOpts) (plaintext []byte, err error); Public() PublicKey}"}, + {"encoding.BinaryMarshaler", "type BinaryMarshaler interface{MarshalBinary() (data []byte, err error)}"}, + {"io.Reader", "type Reader interface{Read(p []byte) (n int, err error)}"}, + {"io.ReadWriter", "type ReadWriter interface{Reader; Writer}"}, + {"go/ast.Node", "type Node interface{End() go/token.Pos; Pos() go/token.Pos}"}, + {"go/types.Type", "type Type interface{String() string; Underlying() Type}"}, +} + +func TestImportedTypes(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + for _, test := range importedObjectTests { + s := strings.Split(test.name, ".") + if len(s) != 2 { + t.Fatal("inconsistent test data") + } + importPath := s[0] + objName := s[1] + + pkg, err := Import(make(map[string]*types.Package), importPath, ".", nil) + if err != nil { + t.Error(err) + continue + } + + obj := pkg.Scope().Lookup(objName) + if obj == nil { + t.Errorf("%s: object not found", test.name) + continue + } + + got := types.ObjectString(obj, types.RelativeTo(pkg)) + if got != test.want { + t.Errorf("%s: got %q; want %q", test.name, got, test.want) + } + + if named, _ := obj.Type().(*types.Named); named != nil { + verifyInterfaceMethodRecvs(t, named, 0) + } + } +} + +// verifyInterfaceMethodRecvs verifies that method receiver types +// are named if the methods belong to a named interface type. +func verifyInterfaceMethodRecvs(t *testing.T, named *types.Named, level int) { + // avoid endless recursion in case of an embedding bug that lead to a cycle + if level > 10 { + t.Errorf("%s: embeds itself", named) + return + } + + iface, _ := named.Underlying().(*types.Interface) + if iface == nil { + return // not an interface + } + + // check explicitly declared methods + for i := 0; i < iface.NumExplicitMethods(); i++ { + m := iface.ExplicitMethod(i) + recv := m.Type().(*types.Signature).Recv() + if recv == nil { + t.Errorf("%s: missing receiver type", m) + continue + } + if recv.Type() != named { + t.Errorf("%s: got recv type %s; want %s", m, recv.Type(), named) + } + } + + // check embedded interfaces (if they are named, too) + for i := 0; i < iface.NumEmbeddeds(); i++ { + // embedding of interfaces cannot have cycles; recursion will terminate + if etype, _ := iface.EmbeddedType(i).(*types.Named); etype != nil { + verifyInterfaceMethodRecvs(t, etype, level+1) + } + } +} +func TestIssue25301(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + compileAndImportPkg(t, "issue25301") +} diff --git a/internal/go/gcimporter/gcimporter_test.go b/internal/go/gcimporter/gcimporter_test.go new file mode 100644 index 000000000..66a332ce9 --- /dev/null +++ b/internal/go/gcimporter/gcimporter_test.go @@ -0,0 +1,525 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/gcimporter_test.go, +// adjusted to make it build with code from (std lib) internal/testenv copied. + +package gcimporter + +import ( + "bytes" + "fmt" + "go/types" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "runtime" + "strings" + "testing" + "time" + + "honnef.co/go/tools/internal/testenv" +) + +func TestMain(m *testing.M) { + testenv.ExitIfSmallMachine() + os.Exit(m.Run()) +} + +// ---------------------------------------------------------------------------- +// The following three functions (Builder, HasGoBuild, MustHaveGoBuild) were +// copied from $GOROOT/src/internal/testenv since that package is not available +// in x/tools. + +// Builder reports the name of the builder running this test +// (for example, "linux-amd64" or "windows-386-gce"). +// If the test is not running on the build infrastructure, +// Builder returns the empty string. +func Builder() string { + return os.Getenv("GO_BUILDER_NAME") +} + +// HasGoBuild reports whether the current system can build programs with ``go build'' +// and then run them with os.StartProcess or exec.Command. +func HasGoBuild() bool { + switch runtime.GOOS { + case "android", "nacl": + return false + case "darwin": + if strings.HasPrefix(runtime.GOARCH, "arm") { + return false + } + } + return true +} + +// MustHaveGoBuild checks that the current system can build programs with ``go build'' +// and then run them with os.StartProcess or exec.Command. +// If not, MustHaveGoBuild calls t.Skip with an explanation. +func MustHaveGoBuild(t *testing.T) { + testenv.NeedsTool(t, "go") + if !HasGoBuild() { + t.Skipf("skipping test: 'go build' not available on %s/%s", runtime.GOOS, runtime.GOARCH) + } +} + +// ---------------------------------------------------------------------------- + +// skipSpecialPlatforms causes the test to be skipped for platforms where +// builders (build.golang.org) don't have access to compiled packages for +// import. +func skipSpecialPlatforms(t *testing.T) { + switch platform := runtime.GOOS + "-" + runtime.GOARCH; platform { + case "nacl-amd64p32", + "nacl-386", + "nacl-arm", + "darwin-arm", + "darwin-arm64": + t.Skipf("no compiled packages available for import on %s", platform) + } +} + +// compile runs the compiler on filename, with dirname as the working directory, +// and writes the output file to outdirname. +func compile(t *testing.T, dirname, filename, outdirname string) string { + /* testenv. */ MustHaveGoBuild(t) + // filename must end with ".go" + if !strings.HasSuffix(filename, ".go") { + t.Fatalf("filename doesn't end in .go: %s", filename) + } + basename := filepath.Base(filename) + outname := filepath.Join(outdirname, basename[:len(basename)-2]+"o") + cmd := exec.Command("go", "tool", "compile", "-o", outname, filename) + cmd.Dir = dirname + out, err := cmd.CombinedOutput() + if err != nil { + t.Logf("%s", out) + t.Fatalf("go tool compile %s failed: %s", filename, err) + } + return outname +} + +func testPath(t *testing.T, path, srcDir string) *types.Package { + t0 := time.Now() + pkg, err := Import(make(map[string]*types.Package), path, srcDir, nil) + if err != nil { + t.Errorf("testPath(%s): %s", path, err) + return nil + } + t.Logf("testPath(%s): %v", path, time.Since(t0)) + return pkg +} + +const maxTime = 30 * time.Second + +func testDir(t *testing.T, dir string, endTime time.Time) (nimports int) { + dirname := filepath.Join(runtime.GOROOT(), "pkg", runtime.GOOS+"_"+runtime.GOARCH, dir) + list, err := ioutil.ReadDir(dirname) + if err != nil { + t.Fatalf("testDir(%s): %s", dirname, err) + } + for _, f := range list { + if time.Now().After(endTime) { + t.Log("testing time used up") + return + } + switch { + case !f.IsDir(): + // try extensions + for _, ext := range pkgExts { + if strings.HasSuffix(f.Name(), ext) { + name := f.Name()[0 : len(f.Name())-len(ext)] // remove extension + if testPath(t, filepath.Join(dir, name), dir) != nil { + nimports++ + } + } + } + case f.IsDir(): + nimports += testDir(t, filepath.Join(dir, f.Name()), endTime) + } + } + return +} + +func mktmpdir(t *testing.T) string { + tmpdir, err := ioutil.TempDir("", "gcimporter_test") + if err != nil { + t.Fatal("mktmpdir:", err) + } + if err := os.Mkdir(filepath.Join(tmpdir, "testdata"), 0700); err != nil { + os.RemoveAll(tmpdir) + t.Fatal("mktmpdir:", err) + } + return tmpdir +} + +const testfile = "exports.go" + +func TestImportTestdata(t *testing.T) { + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + + compile(t, "testdata", testfile, filepath.Join(tmpdir, "testdata")) + + // filename should end with ".go" + filename := testfile[:len(testfile)-3] + if pkg := testPath(t, "./testdata/"+filename, tmpdir); pkg != nil { + // The package's Imports list must include all packages + // explicitly imported by testfile, plus all packages + // referenced indirectly via exported objects in testfile. + // With the textual export format (when run against Go1.6), + // the list may also include additional packages that are + // not strictly required for import processing alone (they + // are exported to err "on the safe side"). + // For now, we just test the presence of a few packages + // that we know are there for sure. + got := fmt.Sprint(pkg.Imports()) + for _, want := range []string{"go/ast", "go/token"} { + if !strings.Contains(got, want) { + t.Errorf(`Package("exports").Imports() = %s, does not contain %s`, got, want) + } + } + } +} + +func TestVersionHandling(t *testing.T) { + skipSpecialPlatforms(t) // we really only need to exclude nacl platforms, but this is fine + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + const dir = "./testdata/versions" + list, err := ioutil.ReadDir(dir) + if err != nil { + t.Fatal(err) + } + + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + corruptdir := filepath.Join(tmpdir, "testdata", "versions") + if err := os.Mkdir(corruptdir, 0700); err != nil { + t.Fatal(err) + } + + for _, f := range list { + name := f.Name() + if !strings.HasSuffix(name, ".a") { + continue // not a package file + } + if strings.Contains(name, "corrupted") { + continue // don't process a leftover corrupted file + } + pkgpath := "./" + name[:len(name)-2] + + if testing.Verbose() { + t.Logf("importing %s", name) + } + + // test that export data can be imported + _, err := Import(make(map[string]*types.Package), pkgpath, dir, nil) + if err != nil { + // ok to fail if it fails with a newer version error for select files + if strings.Contains(err.Error(), "newer version") { + switch name { + case "test_go1.11_999b.a", "test_go1.11_999i.a": + continue + } + // fall through + } + t.Errorf("import %q failed: %v", pkgpath, err) + continue + } + + // create file with corrupted export data + // 1) read file + data, err := ioutil.ReadFile(filepath.Join(dir, name)) + if err != nil { + t.Fatal(err) + } + // 2) find export data + i := bytes.Index(data, []byte("\n$$B\n")) + 5 + j := bytes.Index(data[i:], []byte("\n$$\n")) + i + if i < 0 || j < 0 || i > j { + t.Fatalf("export data section not found (i = %d, j = %d)", i, j) + } + // 3) corrupt the data (increment every 7th byte) + for k := j - 13; k >= i; k -= 7 { + data[k]++ + } + // 4) write the file + pkgpath += "_corrupted" + filename := filepath.Join(corruptdir, pkgpath) + ".a" + ioutil.WriteFile(filename, data, 0666) + + // test that importing the corrupted file results in an error + _, err = Import(make(map[string]*types.Package), pkgpath, corruptdir, nil) + if err == nil { + t.Errorf("import corrupted %q succeeded", pkgpath) + } else if msg := err.Error(); !strings.Contains(msg, "version skew") { + t.Errorf("import %q error incorrect (%s)", pkgpath, msg) + } + } +} + +func TestImportStdLib(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + dt := maxTime + if testing.Short() && /* testenv. */ Builder() == "" { + dt = 10 * time.Millisecond + } + nimports := testDir(t, "", time.Now().Add(dt)) // installed packages + t.Logf("tested %d imports", nimports) +} + +func TestIssue5815(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + pkg := importPkg(t, "strings", ".") + + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + if obj.Pkg() == nil { + t.Errorf("no pkg for %s", obj) + } + if tname, _ := obj.(*types.TypeName); tname != nil { + named := tname.Type().(*types.Named) + for i := 0; i < named.NumMethods(); i++ { + m := named.Method(i) + if m.Pkg() == nil { + t.Errorf("no pkg for %s", m) + } + } + } + } +} + +// Smoke test to ensure that imported methods get the correct package. +func TestCorrectMethodPackage(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + imports := make(map[string]*types.Package) + _, err := Import(imports, "net/http", ".", nil) + if err != nil { + t.Fatal(err) + } + + mutex := imports["sync"].Scope().Lookup("Mutex").(*types.TypeName).Type() + mset := types.NewMethodSet(types.NewPointer(mutex)) // methods of *sync.Mutex + sel := mset.Lookup(nil, "Lock") + lock := sel.Obj().(*types.Func) + if got, want := lock.Pkg().Path(), "sync"; got != want { + t.Errorf("got package path %q; want %q", got, want) + } +} + +func TestIssue13566(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + testoutdir := filepath.Join(tmpdir, "testdata") + + // b.go needs to be compiled from the output directory so that the compiler can + // find the compiled package a. We pass the full path to compile() so that we + // don't have to copy the file to that directory. + bpath, err := filepath.Abs(filepath.Join("testdata", "b.go")) + if err != nil { + t.Fatal(err) + } + compile(t, "testdata", "a.go", testoutdir) + compile(t, testoutdir, bpath, testoutdir) + + // import must succeed (test for issue at hand) + pkg := importPkg(t, "./testdata/b", tmpdir) + + // make sure all indirectly imported packages have names + for _, imp := range pkg.Imports() { + if imp.Name() == "" { + t.Errorf("no name for %s package", imp.Path()) + } + } +} + +func TestIssue13898(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // import go/internal/gcimporter which imports go/types partially + imports := make(map[string]*types.Package) + _, err := Import(imports, "go/internal/gcimporter", ".", nil) + if err != nil { + t.Fatal(err) + } + + // look for go/types package + var goTypesPkg *types.Package + for path, pkg := range imports { + if path == "go/types" { + goTypesPkg = pkg + break + } + } + if goTypesPkg == nil { + t.Fatal("go/types not found") + } + + // look for go/types.Object type + obj := lookupObj(t, goTypesPkg.Scope(), "Object") + typ, ok := obj.Type().(*types.Named) + if !ok { + t.Fatalf("go/types.Object type is %v; wanted named type", typ) + } + + // lookup go/types.Object.Pkg method + m, index, indirect := types.LookupFieldOrMethod(typ, false, nil, "Pkg") + if m == nil { + t.Fatalf("go/types.Object.Pkg not found (index = %v, indirect = %v)", index, indirect) + } + + // the method must belong to go/types + if m.Pkg().Path() != "go/types" { + t.Fatalf("found %v; want go/types", m.Pkg()) + } +} + +func TestIssue15517(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + + compile(t, "testdata", "p.go", filepath.Join(tmpdir, "testdata")) + + // Multiple imports of p must succeed without redeclaration errors. + // We use an import path that's not cleaned up so that the eventual + // file path for the package is different from the package path; this + // will expose the error if it is present. + // + // (Issue: Both the textual and the binary importer used the file path + // of the package to be imported as key into the shared packages map. + // However, the binary importer then used the package path to identify + // the imported package to mark it as complete; effectively marking the + // wrong package as complete. By using an "unclean" package path, the + // file and package path are different, exposing the problem if present. + // The same issue occurs with vendoring.) + imports := make(map[string]*types.Package) + for i := 0; i < 3; i++ { + if _, err := Import(imports, "./././testdata/p", tmpdir, nil); err != nil { + t.Fatal(err) + } + } +} + +func TestIssue15920(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + compileAndImportPkg(t, "issue15920") +} + +func TestIssue20046(t *testing.T) { + skipSpecialPlatforms(t) + + // This package only handles gc export data. + if runtime.Compiler != "gc" { + t.Skipf("gc-built packages not available (compiler = %s)", runtime.Compiler) + } + + // On windows, we have to set the -D option for the compiler to avoid having a drive + // letter and an illegal ':' in the import path - just skip it (see also issue #3483). + if runtime.GOOS == "windows" { + t.Skip("avoid dealing with relative paths/drive letters on windows") + } + + // "./issue20046".V.M must exist + pkg := compileAndImportPkg(t, "issue20046") + obj := lookupObj(t, pkg.Scope(), "V") + if m, index, indirect := types.LookupFieldOrMethod(obj.Type(), false, nil, "M"); m == nil { + t.Fatalf("V.M not found (index = %v, indirect = %v)", index, indirect) + } +} + +func importPkg(t *testing.T, path, srcDir string) *types.Package { + pkg, err := Import(make(map[string]*types.Package), path, srcDir, nil) + if err != nil { + t.Fatal(err) + } + return pkg +} + +func compileAndImportPkg(t *testing.T, name string) *types.Package { + tmpdir := mktmpdir(t) + defer os.RemoveAll(tmpdir) + compile(t, "testdata", name+".go", filepath.Join(tmpdir, "testdata")) + return importPkg(t, "./testdata/"+name, tmpdir) +} + +func lookupObj(t *testing.T, scope *types.Scope, name string) types.Object { + if obj := scope.Lookup(name); obj != nil { + return obj + } + t.Fatalf("%s not found", name) + return nil +} diff --git a/internal/go/gcimporter/iexport.go b/internal/go/gcimporter/iexport.go new file mode 100644 index 000000000..4be32a2e5 --- /dev/null +++ b/internal/go/gcimporter/iexport.go @@ -0,0 +1,739 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go; +// see that file for specification of the format. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "go/ast" + "go/constant" + "go/token" + "go/types" + "io" + "math/big" + "reflect" + "sort" +) + +// Current indexed export format version. Increase with each format change. +// 0: Go1.11 encoding +const iexportVersion = 0 + +// IExportData returns the binary export data for pkg. +// +// If no file set is provided, position info will be missing. +// The package path of the top-level package will not be recorded, +// so that calls to IImportData can override with a provided package path. +func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { + defer func() { + if e := recover(); e != nil { + if ierr, ok := e.(internalError); ok { + err = ierr + return + } + // Not an internal error; panic again. + panic(e) + } + }() + + p := iexporter{ + out: bytes.NewBuffer(nil), + fset: fset, + allPkgs: map[*types.Package]bool{}, + stringIndex: map[string]uint64{}, + declIndex: map[types.Object]uint64{}, + typIndex: map[types.Type]uint64{}, + localpkg: pkg, + } + + for i, pt := range predeclared() { + p.typIndex[pt] = uint64(i) + } + if len(p.typIndex) > predeclReserved { + panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)) + } + + // Initialize work queue with exported declarations. + scope := pkg.Scope() + for _, name := range scope.Names() { + if ast.IsExported(name) { + p.pushDecl(scope.Lookup(name)) + } + } + + // Loop until no more work. + for !p.declTodo.empty() { + p.doDecl(p.declTodo.popHead()) + } + + // Append indices to data0 section. + dataLen := uint64(p.data0.Len()) + w := p.newWriter() + w.writeIndex(p.declIndex) + w.flush() + + // Assemble header. + var hdr intWriter + hdr.WriteByte('i') + hdr.uint64(iexportVersion) + hdr.uint64(uint64(p.strings.Len())) + hdr.uint64(dataLen) + + // Flush output. + io.Copy(p.out, &hdr) + io.Copy(p.out, &p.strings) + io.Copy(p.out, &p.data0) + + return p.out.Bytes(), nil +} + +// writeIndex writes out an object index. mainIndex indicates whether +// we're writing out the main index, which is also read by +// non-compiler tools and includes a complete package description +// (i.e., name and height). +func (w *exportWriter) writeIndex(index map[types.Object]uint64) { + // Build a map from packages to objects from that package. + pkgObjs := map[*types.Package][]types.Object{} + + // For the main index, make sure to include every package that + // we reference, even if we're not exporting (or reexporting) + // any symbols from it. + pkgObjs[w.p.localpkg] = nil + for pkg := range w.p.allPkgs { + pkgObjs[pkg] = nil + } + + for obj := range index { + pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj) + } + + var pkgs []*types.Package + for pkg, objs := range pkgObjs { + pkgs = append(pkgs, pkg) + + sort.Slice(objs, func(i, j int) bool { + return objs[i].Name() < objs[j].Name() + }) + } + + sort.Slice(pkgs, func(i, j int) bool { + return w.exportPath(pkgs[i]) < w.exportPath(pkgs[j]) + }) + + w.uint64(uint64(len(pkgs))) + for _, pkg := range pkgs { + w.string(w.exportPath(pkg)) + w.string(pkg.Name()) + w.uint64(uint64(0)) // package height is not needed for go/types + + objs := pkgObjs[pkg] + w.uint64(uint64(len(objs))) + for _, obj := range objs { + w.string(obj.Name()) + w.uint64(index[obj]) + } + } +} + +type iexporter struct { + fset *token.FileSet + out *bytes.Buffer + + localpkg *types.Package + + // allPkgs tracks all packages that have been referenced by + // the export data, so we can ensure to include them in the + // main index. + allPkgs map[*types.Package]bool + + declTodo objQueue + + strings intWriter + stringIndex map[string]uint64 + + data0 intWriter + declIndex map[types.Object]uint64 + typIndex map[types.Type]uint64 +} + +// stringOff returns the offset of s within the string section. +// If not already present, it's added to the end. +func (p *iexporter) stringOff(s string) uint64 { + off, ok := p.stringIndex[s] + if !ok { + off = uint64(p.strings.Len()) + p.stringIndex[s] = off + + p.strings.uint64(uint64(len(s))) + p.strings.WriteString(s) + } + return off +} + +// pushDecl adds n to the declaration work queue, if not already present. +func (p *iexporter) pushDecl(obj types.Object) { + // Package unsafe is known to the compiler and predeclared. + assert(obj.Pkg() != types.Unsafe) + + if _, ok := p.declIndex[obj]; ok { + return + } + + p.declIndex[obj] = ^uint64(0) // mark n present in work queue + p.declTodo.pushTail(obj) +} + +// exportWriter handles writing out individual data section chunks. +type exportWriter struct { + p *iexporter + + data intWriter + currPkg *types.Package + prevFile string + prevLine int64 +} + +func (w *exportWriter) exportPath(pkg *types.Package) string { + if pkg == w.p.localpkg { + return "" + } + return pkg.Path() +} + +func (p *iexporter) doDecl(obj types.Object) { + w := p.newWriter() + w.setPkg(obj.Pkg(), false) + + switch obj := obj.(type) { + case *types.Var: + w.tag('V') + w.pos(obj.Pos()) + w.typ(obj.Type(), obj.Pkg()) + + case *types.Func: + sig, _ := obj.Type().(*types.Signature) + if sig.Recv() != nil { + panic(internalErrorf("unexpected method: %v", sig)) + } + w.tag('F') + w.pos(obj.Pos()) + w.signature(sig) + + case *types.Const: + w.tag('C') + w.pos(obj.Pos()) + w.value(obj.Type(), obj.Val()) + + case *types.TypeName: + if obj.IsAlias() { + w.tag('A') + w.pos(obj.Pos()) + w.typ(obj.Type(), obj.Pkg()) + break + } + + // Defined type. + w.tag('T') + w.pos(obj.Pos()) + + underlying := obj.Type().Underlying() + w.typ(underlying, obj.Pkg()) + + t := obj.Type() + if types.IsInterface(t) { + break + } + + named, ok := t.(*types.Named) + if !ok { + panic(internalErrorf("%s is not a defined type", t)) + } + + n := named.NumMethods() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + m := named.Method(i) + w.pos(m.Pos()) + w.string(m.Name()) + sig, _ := m.Type().(*types.Signature) + w.param(sig.Recv()) + w.signature(sig) + } + + default: + panic(internalErrorf("unexpected object: %v", obj)) + } + + p.declIndex[obj] = w.flush() +} + +func (w *exportWriter) tag(tag byte) { + w.data.WriteByte(tag) +} + +func (w *exportWriter) pos(pos token.Pos) { + if w.p.fset == nil { + w.int64(0) + return + } + + p := w.p.fset.Position(pos) + file := p.Filename + line := int64(p.Line) + + // When file is the same as the last position (common case), + // we can save a few bytes by delta encoding just the line + // number. + // + // Note: Because data objects may be read out of order (or not + // at all), we can only apply delta encoding within a single + // object. This is handled implicitly by tracking prevFile and + // prevLine as fields of exportWriter. + + if file == w.prevFile { + delta := line - w.prevLine + w.int64(delta) + if delta == deltaNewFile { + w.int64(-1) + } + } else { + w.int64(deltaNewFile) + w.int64(line) // line >= 0 + w.string(file) + w.prevFile = file + } + w.prevLine = line +} + +func (w *exportWriter) pkg(pkg *types.Package) { + // Ensure any referenced packages are declared in the main index. + w.p.allPkgs[pkg] = true + + w.string(w.exportPath(pkg)) +} + +func (w *exportWriter) qualifiedIdent(obj types.Object) { + // Ensure any referenced declarations are written out too. + w.p.pushDecl(obj) + + w.string(obj.Name()) + w.pkg(obj.Pkg()) +} + +func (w *exportWriter) typ(t types.Type, pkg *types.Package) { + w.data.uint64(w.p.typOff(t, pkg)) +} + +func (p *iexporter) newWriter() *exportWriter { + return &exportWriter{p: p} +} + +func (w *exportWriter) flush() uint64 { + off := uint64(w.p.data0.Len()) + io.Copy(&w.p.data0, &w.data) + return off +} + +func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 { + off, ok := p.typIndex[t] + if !ok { + w := p.newWriter() + w.doTyp(t, pkg) + off = predeclReserved + w.flush() + p.typIndex[t] = off + } + return off +} + +func (w *exportWriter) startType(k itag) { + w.data.uint64(uint64(k)) +} + +func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { + switch t := t.(type) { + case *types.Named: + w.startType(definedType) + w.qualifiedIdent(t.Obj()) + + case *types.Pointer: + w.startType(pointerType) + w.typ(t.Elem(), pkg) + + case *types.Slice: + w.startType(sliceType) + w.typ(t.Elem(), pkg) + + case *types.Array: + w.startType(arrayType) + w.uint64(uint64(t.Len())) + w.typ(t.Elem(), pkg) + + case *types.Chan: + w.startType(chanType) + // 1 RecvOnly; 2 SendOnly; 3 SendRecv + var dir uint64 + switch t.Dir() { + case types.RecvOnly: + dir = 1 + case types.SendOnly: + dir = 2 + case types.SendRecv: + dir = 3 + } + w.uint64(dir) + w.typ(t.Elem(), pkg) + + case *types.Map: + w.startType(mapType) + w.typ(t.Key(), pkg) + w.typ(t.Elem(), pkg) + + case *types.Signature: + w.startType(signatureType) + w.setPkg(pkg, true) + w.signature(t) + + case *types.Struct: + w.startType(structType) + w.setPkg(pkg, true) + + n := t.NumFields() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + f := t.Field(i) + w.pos(f.Pos()) + w.string(f.Name()) + w.typ(f.Type(), pkg) + w.bool(f.Anonymous()) + w.string(t.Tag(i)) // note (or tag) + } + + case *types.Interface: + w.startType(interfaceType) + w.setPkg(pkg, true) + + n := t.NumEmbeddeds() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + f := t.Embedded(i) + w.pos(f.Obj().Pos()) + w.typ(f.Obj().Type(), f.Obj().Pkg()) + } + + n = t.NumExplicitMethods() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + m := t.ExplicitMethod(i) + w.pos(m.Pos()) + w.string(m.Name()) + sig, _ := m.Type().(*types.Signature) + w.signature(sig) + } + + default: + panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t))) + } +} + +func (w *exportWriter) setPkg(pkg *types.Package, write bool) { + if write { + w.pkg(pkg) + } + + w.currPkg = pkg +} + +func (w *exportWriter) signature(sig *types.Signature) { + w.paramList(sig.Params()) + w.paramList(sig.Results()) + if sig.Params().Len() > 0 { + w.bool(sig.Variadic()) + } +} + +func (w *exportWriter) paramList(tup *types.Tuple) { + n := tup.Len() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + w.param(tup.At(i)) + } +} + +func (w *exportWriter) param(obj types.Object) { + w.pos(obj.Pos()) + w.localIdent(obj) + w.typ(obj.Type(), obj.Pkg()) +} + +func (w *exportWriter) value(typ types.Type, v constant.Value) { + w.typ(typ, nil) + + switch v.Kind() { + case constant.Bool: + w.bool(constant.BoolVal(v)) + case constant.Int: + var i big.Int + if i64, exact := constant.Int64Val(v); exact { + i.SetInt64(i64) + } else if ui64, exact := constant.Uint64Val(v); exact { + i.SetUint64(ui64) + } else { + i.SetString(v.ExactString(), 10) + } + w.mpint(&i, typ) + case constant.Float: + f := constantToFloat(v) + w.mpfloat(f, typ) + case constant.Complex: + w.mpfloat(constantToFloat(constant.Real(v)), typ) + w.mpfloat(constantToFloat(constant.Imag(v)), typ) + case constant.String: + w.string(constant.StringVal(v)) + case constant.Unknown: + // package contains type errors + default: + panic(internalErrorf("unexpected value %v (%T)", v, v)) + } +} + +// constantToFloat converts a constant.Value with kind constant.Float to a +// big.Float. +func constantToFloat(x constant.Value) *big.Float { + assert(x.Kind() == constant.Float) + // Use the same floating-point precision (512) as cmd/compile + // (see Mpprec in cmd/compile/internal/gc/mpfloat.go). + const mpprec = 512 + var f big.Float + f.SetPrec(mpprec) + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + n := valueToRat(num) + d := valueToRat(denom) + f.SetRat(n.Quo(n, d)) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + _, ok := f.SetString(x.ExactString()) + assert(ok) + } + return &f +} + +// mpint exports a multi-precision integer. +// +// For unsigned types, small values are written out as a single +// byte. Larger values are written out as a length-prefixed big-endian +// byte string, where the length prefix is encoded as its complement. +// For example, bytes 0, 1, and 2 directly represent the integer +// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-, +// 2-, and 3-byte big-endian string follow. +// +// Encoding for signed types use the same general approach as for +// unsigned types, except small values use zig-zag encoding and the +// bottom bit of length prefix byte for large values is reserved as a +// sign bit. +// +// The exact boundary between small and large encodings varies +// according to the maximum number of bytes needed to encode a value +// of type typ. As a special case, 8-bit types are always encoded as a +// single byte. +// +// TODO(mdempsky): Is this level of complexity really worthwhile? +func (w *exportWriter) mpint(x *big.Int, typ types.Type) { + basic, ok := typ.Underlying().(*types.Basic) + if !ok { + panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying())) + } + + signed, maxBytes := intSize(basic) + + negative := x.Sign() < 0 + if !signed && negative { + panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x)) + } + + b := x.Bytes() + if len(b) > 0 && b[0] == 0 { + panic(internalErrorf("leading zeros")) + } + if uint(len(b)) > maxBytes { + panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)) + } + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + // Check if x can use small value encoding. + if len(b) <= 1 { + var ux uint + if len(b) == 1 { + ux = uint(b[0]) + } + if signed { + ux <<= 1 + if negative { + ux-- + } + } + if ux < maxSmall { + w.data.WriteByte(byte(ux)) + return + } + } + + n := 256 - uint(len(b)) + if signed { + n = 256 - 2*uint(len(b)) + if negative { + n |= 1 + } + } + if n < maxSmall || n >= 256 { + panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)) + } + + w.data.WriteByte(byte(n)) + w.data.Write(b) +} + +// mpfloat exports a multi-precision floating point number. +// +// The number's value is decomposed into mantissa × 2**exponent, where +// mantissa is an integer. The value is written out as mantissa (as a +// multi-precision integer) and then the exponent, except exponent is +// omitted if mantissa is zero. +func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) { + if f.IsInf() { + panic("infinite constant") + } + + // Break into f = mant × 2**exp, with 0.5 <= mant < 1. + var mant big.Float + exp := int64(f.MantExp(&mant)) + + // Scale so that mant is an integer. + prec := mant.MinPrec() + mant.SetMantExp(&mant, int(prec)) + exp -= int64(prec) + + manti, acc := mant.Int(nil) + if acc != big.Exact { + panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc)) + } + w.mpint(manti, typ) + if manti.Sign() != 0 { + w.int64(exp) + } +} + +func (w *exportWriter) bool(b bool) bool { + var x uint64 + if b { + x = 1 + } + w.uint64(x) + return b +} + +func (w *exportWriter) int64(x int64) { w.data.int64(x) } +func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) } +func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) } + +func (w *exportWriter) localIdent(obj types.Object) { + // Anonymous parameters. + if obj == nil { + w.string("") + return + } + + name := obj.Name() + if name == "_" { + w.string("_") + return + } + + w.string(name) +} + +type intWriter struct { + bytes.Buffer +} + +func (w *intWriter) int64(x int64) { + var buf [binary.MaxVarintLen64]byte + n := binary.PutVarint(buf[:], x) + w.Write(buf[:n]) +} + +func (w *intWriter) uint64(x uint64) { + var buf [binary.MaxVarintLen64]byte + n := binary.PutUvarint(buf[:], x) + w.Write(buf[:n]) +} + +func assert(cond bool) { + if !cond { + panic("internal error: assertion failed") + } +} + +// The below is copied from go/src/cmd/compile/internal/gc/syntax.go. + +// objQueue is a FIFO queue of types.Object. The zero value of objQueue is +// a ready-to-use empty queue. +type objQueue struct { + ring []types.Object + head, tail int +} + +// empty returns true if q contains no Nodes. +func (q *objQueue) empty() bool { + return q.head == q.tail +} + +// pushTail appends n to the tail of the queue. +func (q *objQueue) pushTail(obj types.Object) { + if len(q.ring) == 0 { + q.ring = make([]types.Object, 16) + } else if q.head+len(q.ring) == q.tail { + // Grow the ring. + nring := make([]types.Object, len(q.ring)*2) + // Copy the old elements. + part := q.ring[q.head%len(q.ring):] + if q.tail-q.head <= len(part) { + part = part[:q.tail-q.head] + copy(nring, part) + } else { + pos := copy(nring, part) + copy(nring[pos:], q.ring[:q.tail%len(q.ring)]) + } + q.ring, q.head, q.tail = nring, 0, q.tail-q.head + } + + q.ring[q.tail%len(q.ring)] = obj + q.tail++ +} + +// popHead pops a node from the head of the queue. It panics if q is empty. +func (q *objQueue) popHead() types.Object { + if q.empty() { + panic("dequeue empty") + } + obj := q.ring[q.head%len(q.ring)] + q.head++ + return obj +} diff --git a/internal/go/gcimporter/iexport_test.go b/internal/go/gcimporter/iexport_test.go new file mode 100644 index 000000000..2145f994c --- /dev/null +++ b/internal/go/gcimporter/iexport_test.go @@ -0,0 +1,310 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a copy of bexport_test.go for iexport.go. + +// +build go1.11 + +package gcimporter_test + +import ( + "fmt" + "go/ast" + "go/build" + "go/constant" + "go/parser" + "go/token" + "go/types" + "math/big" + "reflect" + "runtime" + "sort" + "strings" + "testing" + + "honnef.co/go/tools/internal/go/gcimporter" + + "golang.org/x/tools/go/buildutil" + "golang.org/x/tools/go/loader" +) + +func TestIExportData_stdlib(t *testing.T) { + if runtime.Compiler == "gccgo" { + t.Skip("gccgo standard library is inaccessible") + } + if runtime.GOOS == "android" { + t.Skipf("incomplete std lib on %s", runtime.GOOS) + } + if isRace { + t.Skipf("stdlib tests take too long in race mode and flake on builders") + } + + // Load, parse and type-check the program. + ctxt := build.Default // copy + ctxt.GOPATH = "" // disable GOPATH + conf := loader.Config{ + Build: &ctxt, + AllowErrors: true, + } + for _, path := range buildutil.AllPackages(conf.Build) { + conf.Import(path) + } + + // Create a package containing type and value errors to ensure + // they are properly encoded/decoded. + f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors +const UnknownValue = "" + 0 +type UnknownType undefined +`) + if err != nil { + t.Fatal(err) + } + conf.CreateFromFiles("haserrors", f) + + prog, err := conf.Load() + if err != nil { + t.Fatalf("Load failed: %v", err) + } + + numPkgs := len(prog.AllPackages) + if want := 248; numPkgs < want { + t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want) + } + + var sorted []*types.Package + for pkg := range prog.AllPackages { + sorted = append(sorted, pkg) + } + sort.Slice(sorted, func(i, j int) bool { + return sorted[i].Path() < sorted[j].Path() + }) + + for _, pkg := range sorted { + info := prog.AllPackages[pkg] + if info.Files == nil { + continue // empty directory + } + exportdata, err := gcimporter.IExportData(conf.Fset, pkg) + if err != nil { + t.Fatal(err) + } + if exportdata[0] == 'i' { + exportdata = exportdata[1:] // trim the 'i' in the header + } else { + t.Fatalf("unexpected first character of export data: %v", exportdata[0]) + } + + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + n, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path()) + if err != nil { + t.Errorf("IImportData(%s): %v", pkg.Path(), err) + continue + } + if n != len(exportdata) { + t.Errorf("IImportData(%s) decoded %d bytes, want %d", + pkg.Path(), n, len(exportdata)) + } + + // Compare the packages' corresponding members. + for _, name := range pkg.Scope().Names() { + if !ast.IsExported(name) { + continue + } + obj1 := pkg.Scope().Lookup(name) + obj2 := pkg2.Scope().Lookup(name) + if obj2 == nil { + t.Fatalf("%s.%s not found, want %s", pkg.Path(), name, obj1) + continue + } + + fl1 := fileLine(conf.Fset, obj1) + fl2 := fileLine(fset2, obj2) + if fl1 != fl2 { + t.Errorf("%s.%s: got posn %s, want %s", + pkg.Path(), name, fl2, fl1) + } + + if err := cmpObj(obj1, obj2); err != nil { + t.Errorf("%s.%s: %s\ngot: %s\nwant: %s", + pkg.Path(), name, err, obj2, obj1) + } + } + } +} + +// TestVeryLongFile tests the position of an import object declared in +// a very long input file. Line numbers greater than maxlines are +// reported as line 1, not garbage or token.NoPos. +func TestIExportData_long(t *testing.T) { + // parse and typecheck + longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int" + fset1 := token.NewFileSet() + f, err := parser.ParseFile(fset1, "foo.go", longFile, 0) + if err != nil { + t.Fatal(err) + } + var conf types.Config + pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + + // export + exportdata, err := gcimporter.IExportData(fset1, pkg) + if err != nil { + t.Fatal(err) + } + if exportdata[0] == 'i' { + exportdata = exportdata[1:] // trim the 'i' in the header + } else { + t.Fatalf("unexpected first character of export data: %v", exportdata[0]) + } + + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path()) + if err != nil { + t.Fatalf("IImportData(%s): %v", pkg.Path(), err) + } + + // compare + posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos()) + posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos()) + if want := "foo.go:1:1"; posn2.String() != want { + t.Errorf("X position = %s, want %s (orig was %s)", + posn2, want, posn1) + } +} + +func TestIExportData_typealiases(t *testing.T) { + // parse and typecheck + fset1 := token.NewFileSet() + f, err := parser.ParseFile(fset1, "p.go", src, 0) + if err != nil { + t.Fatal(err) + } + var conf types.Config + pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil) + if err == nil { + // foo in undeclared in src; we should see an error + t.Fatal("invalid source type-checked without error") + } + if pkg1 == nil { + // despite incorrect src we should see a (partially) type-checked package + t.Fatal("nil package returned") + } + checkPkg(t, pkg1, "export") + + // export + // use a nil fileset here to confirm that it doesn't panic + exportdata, err := gcimporter.IExportData(nil, pkg1) + if err != nil { + t.Fatal(err) + } + if exportdata[0] == 'i' { + exportdata = exportdata[1:] // trim the 'i' in the header + } else { + t.Fatalf("unexpected first character of export data: %v", exportdata[0]) + } + + // import + imports := make(map[string]*types.Package) + fset2 := token.NewFileSet() + _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg1.Path()) + if err != nil { + t.Fatalf("IImportData(%s): %v", pkg1.Path(), err) + } + checkPkg(t, pkg2, "import") +} + +// cmpObj reports how x and y differ. They are assumed to belong to different +// universes so cannot be compared directly. It is an adapted version of +// equalObj in bexport_test.go. +func cmpObj(x, y types.Object) error { + if reflect.TypeOf(x) != reflect.TypeOf(y) { + return fmt.Errorf("%T vs %T", x, y) + } + xt := x.Type() + yt := y.Type() + switch x.(type) { + case *types.Var, *types.Func: + // ok + case *types.Const: + xval := x.(*types.Const).Val() + yval := y.(*types.Const).Val() + equal := constant.Compare(xval, token.EQL, yval) + if !equal { + // try approx. comparison + xkind := xval.Kind() + ykind := yval.Kind() + if xkind == constant.Complex || ykind == constant.Complex { + equal = same(constant.Real(xval), constant.Real(yval)) && + same(constant.Imag(xval), constant.Imag(yval)) + } else if xkind == constant.Float || ykind == constant.Float { + equal = same(xval, yval) + } else if xkind == constant.Unknown && ykind == constant.Unknown { + equal = true + } + } + if !equal { + return fmt.Errorf("unequal constants %s vs %s", xval, yval) + } + case *types.TypeName: + xt = xt.Underlying() + yt = yt.Underlying() + default: + return fmt.Errorf("unexpected %T", x) + } + return equalType(xt, yt) +} + +// Use the same floating-point precision (512) as cmd/compile +// (see Mpprec in cmd/compile/internal/gc/mpfloat.go). +const mpprec = 512 + +// same compares non-complex numeric values and reports if they are approximately equal. +func same(x, y constant.Value) bool { + xf := constantToFloat(x) + yf := constantToFloat(y) + d := new(big.Float).Sub(xf, yf) + d.Abs(d) + eps := big.NewFloat(1.0 / (1 << (mpprec - 1))) // allow for 1 bit of error + return d.Cmp(eps) < 0 +} + +// copy of the function with the same name in iexport.go. +func constantToFloat(x constant.Value) *big.Float { + var f big.Float + f.SetPrec(mpprec) + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + n := valueToRat(num) + d := valueToRat(denom) + f.SetRat(n.Quo(n, d)) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + _, ok := f.SetString(x.ExactString()) + if !ok { + panic("should not reach here") + } + } + return &f +} + +// copy of the function with the same name in iexport.go. +func valueToRat(x constant.Value) *big.Rat { + // Convert little-endian to big-endian. + // I can't believe this is necessary. + bytes := constant.Bytes(x) + for i := 0; i < len(bytes)/2; i++ { + bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] + } + return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) +} diff --git a/internal/go/gcimporter/iimport.go b/internal/go/gcimporter/iimport.go new file mode 100644 index 000000000..a31a88026 --- /dev/null +++ b/internal/go/gcimporter/iimport.go @@ -0,0 +1,630 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed package import. +// See cmd/compile/internal/gc/iexport.go for the export data format. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "io" + "sort" +) + +type intReader struct { + *bytes.Reader + path string +} + +func (r *intReader) int64() int64 { + i, err := binary.ReadVarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +func (r *intReader) uint64() uint64 { + i, err := binary.ReadUvarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +const predeclReserved = 32 + +type itag uint64 + +const ( + // Types + definedType itag = iota + pointerType + sliceType + arrayType + chanType + mapType + signatureType + structType + interfaceType +) + +// IImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + const currentVersion = 1 + version := int64(-1) + defer func() { + if e := recover(); e != nil { + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + r := &intReader{bytes.NewReader(data), path} + + version = int64(r.uint64()) + switch version { + case currentVersion, 0: + default: + errorf("unknown iexport format version %d", version) + } + + sLen := int64(r.uint64()) + dLen := int64(r.uint64()) + + whence, _ := r.Seek(0, io.SeekCurrent) + stringData := data[whence : whence+sLen] + declData := data[whence+sLen : whence+sLen+dLen] + r.Seek(sLen+dLen, io.SeekCurrent) + + p := iimporter{ + ipath: path, + version: int(version), + + stringData: stringData, + stringCache: make(map[uint64]string), + pkgCache: make(map[uint64]*types.Package), + + declData: declData, + pkgIndex: make(map[*types.Package]map[string]uint64), + typCache: make(map[uint64]types.Type), + + fake: fakeFileSet{ + fset: fset, + files: make(map[string]*token.File), + }, + } + + for i, pt := range predeclared() { + p.typCache[uint64(i)] = pt + } + + pkgList := make([]*types.Package, r.uint64()) + for i := range pkgList { + pkgPathOff := r.uint64() + pkgPath := p.stringAt(pkgPathOff) + pkgName := p.stringAt(r.uint64()) + _ = r.uint64() // package height; unused by go/types + + if pkgPath == "" { + pkgPath = path + } + pkg := imports[pkgPath] + if pkg == nil { + pkg = types.NewPackage(pkgPath, pkgName) + imports[pkgPath] = pkg + } else if pkg.Name() != pkgName { + errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path) + } + + p.pkgCache[pkgPathOff] = pkg + + nameIndex := make(map[string]uint64) + for nSyms := r.uint64(); nSyms > 0; nSyms-- { + name := p.stringAt(r.uint64()) + nameIndex[name] = r.uint64() + } + + p.pkgIndex[pkg] = nameIndex + pkgList[i] = pkg + } + if len(pkgList) == 0 { + errorf("no packages found for %s", path) + panic("unreachable") + } + p.ipkg = pkgList[0] + names := make([]string, 0, len(p.pkgIndex[p.ipkg])) + for name := range p.pkgIndex[p.ipkg] { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + p.doDecl(p.ipkg, name) + } + + for _, typ := range p.interfaceList { + typ.Complete() + } + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), pkgList[1:]...) + sort.Sort(byPath(list)) + p.ipkg.SetImports(list) + + // package was imported completely and without errors + p.ipkg.MarkComplete() + + consumed, _ := r.Seek(0, io.SeekCurrent) + return int(consumed), p.ipkg, nil +} + +type iimporter struct { + ipath string + ipkg *types.Package + version int + + stringData []byte + stringCache map[uint64]string + pkgCache map[uint64]*types.Package + + declData []byte + pkgIndex map[*types.Package]map[string]uint64 + typCache map[uint64]types.Type + + fake fakeFileSet + interfaceList []*types.Interface +} + +func (p *iimporter) doDecl(pkg *types.Package, name string) { + // See if we've already imported this declaration. + if obj := pkg.Scope().Lookup(name); obj != nil { + return + } + + off, ok := p.pkgIndex[pkg][name] + if !ok { + errorf("%v.%v not in index", pkg, name) + } + + r := &importReader{p: p, currPkg: pkg} + r.declReader.Reset(p.declData[off:]) + + r.obj(name) +} + +func (p *iimporter) stringAt(off uint64) string { + if s, ok := p.stringCache[off]; ok { + return s + } + + slen, n := binary.Uvarint(p.stringData[off:]) + if n <= 0 { + errorf("varint failed") + } + spos := off + uint64(n) + s := string(p.stringData[spos : spos+slen]) + p.stringCache[off] = s + return s +} + +func (p *iimporter) pkgAt(off uint64) *types.Package { + if pkg, ok := p.pkgCache[off]; ok { + return pkg + } + path := p.stringAt(off) + if path == p.ipath { + return p.ipkg + } + errorf("missing package %q in %q", path, p.ipath) + return nil +} + +func (p *iimporter) typAt(off uint64, base *types.Named) types.Type { + if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) { + return t + } + + if off < predeclReserved { + errorf("predeclared type missing from cache: %v", off) + } + + r := &importReader{p: p} + r.declReader.Reset(p.declData[off-predeclReserved:]) + t := r.doType(base) + + if base == nil || !isInterface(t) { + p.typCache[off] = t + } + return t +} + +type importReader struct { + p *iimporter + declReader bytes.Reader + currPkg *types.Package + prevFile string + prevLine int64 + prevColumn int64 +} + +func (r *importReader) obj(name string) { + tag := r.byte() + pos := r.pos() + + switch tag { + case 'A': + typ := r.typ() + + r.declare(types.NewTypeName(pos, r.currPkg, name, typ)) + + case 'C': + typ, val := r.value() + + r.declare(types.NewConst(pos, r.currPkg, name, typ, val)) + + case 'F': + sig := r.signature(nil) + + r.declare(types.NewFunc(pos, r.currPkg, name, sig)) + + case 'T': + // Types can be recursive. We need to setup a stub + // declaration before recursing. + obj := types.NewTypeName(pos, r.currPkg, name, nil) + named := types.NewNamed(obj, nil, nil) + r.declare(obj) + + underlying := r.p.typAt(r.uint64(), named).Underlying() + named.SetUnderlying(underlying) + + if !isInterface(underlying) { + for n := r.uint64(); n > 0; n-- { + mpos := r.pos() + mname := r.ident() + recv := r.param() + msig := r.signature(recv) + + named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig)) + } + } + + case 'V': + typ := r.typ() + + r.declare(types.NewVar(pos, r.currPkg, name, typ)) + + default: + errorf("unexpected tag: %v", tag) + } +} + +func (r *importReader) declare(obj types.Object) { + obj.Pkg().Scope().Insert(obj) +} + +func (r *importReader) value() (typ types.Type, val constant.Value) { + typ = r.typ() + + switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { + case types.IsBoolean: + val = constant.MakeBool(r.bool()) + + case types.IsString: + val = constant.MakeString(r.string()) + + case types.IsInteger: + val = r.mpint(b) + + case types.IsFloat: + val = r.mpfloat(b) + + case types.IsComplex: + re := r.mpfloat(b) + im := r.mpfloat(b) + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + default: + if b.Kind() == types.Invalid { + val = constant.MakeUnknown() + return + } + errorf("unexpected type %v", typ) // panics + panic("unreachable") + } + + return +} + +func intSize(b *types.Basic) (signed bool, maxBytes uint) { + if (b.Info() & types.IsUntyped) != 0 { + return true, 64 + } + + switch b.Kind() { + case types.Float32, types.Complex64: + return true, 3 + case types.Float64, types.Complex128: + return true, 7 + } + + signed = (b.Info() & types.IsUnsigned) == 0 + switch b.Kind() { + case types.Int8, types.Uint8: + maxBytes = 1 + case types.Int16, types.Uint16: + maxBytes = 2 + case types.Int32, types.Uint32: + maxBytes = 4 + default: + maxBytes = 8 + } + + return +} + +func (r *importReader) mpint(b *types.Basic) constant.Value { + signed, maxBytes := intSize(b) + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + n, _ := r.declReader.ReadByte() + if uint(n) < maxSmall { + v := int64(n) + if signed { + v >>= 1 + if n&1 != 0 { + v = ^v + } + } + return constant.MakeInt64(v) + } + + v := -n + if signed { + v = -(n &^ 1) >> 1 + } + if v < 1 || uint(v) > maxBytes { + errorf("weird decoding: %v, %v => %v", n, signed, v) + } + + buf := make([]byte, v) + io.ReadFull(&r.declReader, buf) + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 { + buf[i], buf[j] = buf[j], buf[i] + } + + x := constant.MakeFromBytes(buf) + if signed && n&1 != 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +func (r *importReader) mpfloat(b *types.Basic) constant.Value { + x := r.mpint(b) + if constant.Sign(x) == 0 { + return x + } + + exp := r.int64() + switch { + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + } + return x +} + +func (r *importReader) ident() string { + return r.string() +} + +func (r *importReader) qualifiedIdent() (*types.Package, string) { + name := r.string() + pkg := r.pkg() + return pkg, name +} + +func (r *importReader) pos() token.Pos { + if r.p.version >= 1 { + r.posv1() + } else { + r.posv0() + } + + if r.prevFile == "" && r.prevLine == 0 && r.prevColumn == 0 { + return token.NoPos + } + return r.p.fake.pos(r.prevFile, int(r.prevLine), int(r.prevColumn)) +} + +func (r *importReader) posv0() { + delta := r.int64() + if delta != deltaNewFile { + r.prevLine += delta + } else if l := r.int64(); l == -1 { + r.prevLine += deltaNewFile + } else { + r.prevFile = r.string() + r.prevLine = l + } +} + +func (r *importReader) posv1() { + delta := r.int64() + r.prevColumn += delta >> 1 + if delta&1 != 0 { + delta = r.int64() + r.prevLine += delta >> 1 + if delta&1 != 0 { + r.prevFile = r.string() + } + } +} + +func (r *importReader) typ() types.Type { + return r.p.typAt(r.uint64(), nil) +} + +func isInterface(t types.Type) bool { + _, ok := t.(*types.Interface) + return ok +} + +func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) } +func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } + +func (r *importReader) doType(base *types.Named) types.Type { + switch k := r.kind(); k { + default: + errorf("unexpected kind tag in %q: %v", r.p.ipath, k) + return nil + + case definedType: + pkg, name := r.qualifiedIdent() + r.p.doDecl(pkg, name) + return pkg.Scope().Lookup(name).(*types.TypeName).Type() + case pointerType: + return types.NewPointer(r.typ()) + case sliceType: + return types.NewSlice(r.typ()) + case arrayType: + n := r.uint64() + return types.NewArray(r.typ(), int64(n)) + case chanType: + dir := chanDir(int(r.uint64())) + return types.NewChan(dir, r.typ()) + case mapType: + return types.NewMap(r.typ(), r.typ()) + case signatureType: + r.currPkg = r.pkg() + return r.signature(nil) + + case structType: + r.currPkg = r.pkg() + + fields := make([]*types.Var, r.uint64()) + tags := make([]string, len(fields)) + for i := range fields { + fpos := r.pos() + fname := r.ident() + ftyp := r.typ() + emb := r.bool() + tag := r.string() + + fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb) + tags[i] = tag + } + return types.NewStruct(fields, tags) + + case interfaceType: + r.currPkg = r.pkg() + + embeddeds := make([]types.Type, r.uint64()) + for i := range embeddeds { + _ = r.pos() + embeddeds[i] = r.typ() + } + + methods := make([]*types.Func, r.uint64()) + for i := range methods { + mpos := r.pos() + mname := r.ident() + + // TODO(mdempsky): Matches bimport.go, but I + // don't agree with this. + var recv *types.Var + if base != nil { + recv = types.NewVar(token.NoPos, r.currPkg, "", base) + } + + msig := r.signature(recv) + methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig) + } + + typ := newInterface(methods, embeddeds) + r.p.interfaceList = append(r.p.interfaceList, typ) + return typ + } +} + +func (r *importReader) kind() itag { + return itag(r.uint64()) +} + +func (r *importReader) signature(recv *types.Var) *types.Signature { + params := r.paramList() + results := r.paramList() + variadic := params.Len() > 0 && r.bool() + return types.NewSignature(recv, params, results, variadic) +} + +func (r *importReader) paramList() *types.Tuple { + xs := make([]*types.Var, r.uint64()) + for i := range xs { + xs[i] = r.param() + } + return types.NewTuple(xs...) +} + +func (r *importReader) param() *types.Var { + pos := r.pos() + name := r.ident() + typ := r.typ() + return types.NewParam(pos, r.currPkg, name, typ) +} + +func (r *importReader) bool() bool { + return r.uint64() != 0 +} + +func (r *importReader) int64() int64 { + n, err := binary.ReadVarint(&r.declReader) + if err != nil { + errorf("readVarint: %v", err) + } + return n +} + +func (r *importReader) uint64() uint64 { + n, err := binary.ReadUvarint(&r.declReader) + if err != nil { + errorf("readUvarint: %v", err) + } + return n +} + +func (r *importReader) byte() byte { + x, err := r.declReader.ReadByte() + if err != nil { + errorf("declReader.ReadByte: %v", err) + } + return x +} diff --git a/internal/go/gcimporter/israce_test.go b/internal/go/gcimporter/israce_test.go new file mode 100644 index 000000000..af8e52b2e --- /dev/null +++ b/internal/go/gcimporter/israce_test.go @@ -0,0 +1,11 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build race + +package gcimporter_test + +func init() { + isRace = true +} diff --git a/internal/go/gcimporter/newInterface10.go b/internal/go/gcimporter/newInterface10.go new file mode 100644 index 000000000..463f25227 --- /dev/null +++ b/internal/go/gcimporter/newInterface10.go @@ -0,0 +1,21 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.11 + +package gcimporter + +import "go/types" + +func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { + named := make([]*types.Named, len(embeddeds)) + for i, e := range embeddeds { + var ok bool + named[i], ok = e.(*types.Named) + if !ok { + panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11") + } + } + return types.NewInterface(methods, named) +} diff --git a/internal/go/gcimporter/newInterface11.go b/internal/go/gcimporter/newInterface11.go new file mode 100644 index 000000000..ab28b95cb --- /dev/null +++ b/internal/go/gcimporter/newInterface11.go @@ -0,0 +1,13 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.11 + +package gcimporter + +import "go/types" + +func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { + return types.NewInterfaceType(methods, embeddeds) +} diff --git a/internal/go/gcimporter/testdata/a.go b/internal/go/gcimporter/testdata/a.go new file mode 100644 index 000000000..56e4292cd --- /dev/null +++ b/internal/go/gcimporter/testdata/a.go @@ -0,0 +1,14 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue13566 + +package a + +import "encoding/json" + +type A struct { + a *A + json json.RawMessage +} diff --git a/internal/go/gcimporter/testdata/b.go b/internal/go/gcimporter/testdata/b.go new file mode 100644 index 000000000..419667820 --- /dev/null +++ b/internal/go/gcimporter/testdata/b.go @@ -0,0 +1,11 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue13566 + +package b + +import "./a" + +type A a.A diff --git a/internal/go/gcimporter/testdata/exports.go b/internal/go/gcimporter/testdata/exports.go new file mode 100644 index 000000000..8ee28b094 --- /dev/null +++ b/internal/go/gcimporter/testdata/exports.go @@ -0,0 +1,89 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is used to generate an object file which +// serves as test file for gcimporter_test.go. + +package exports + +import ( + "go/ast" +) + +// Issue 3682: Correctly read dotted identifiers from export data. +const init1 = 0 + +func init() {} + +const ( + C0 int = 0 + C1 = 3.14159265 + C2 = 2.718281828i + C3 = -123.456e-789 + C4 = +123.456E+789 + C5 = 1234i + C6 = "foo\n" + C7 = `bar\n` +) + +type ( + T1 int + T2 [10]int + T3 []int + T4 *int + T5 chan int + T6a chan<- int + T6b chan (<-chan int) + T6c chan<- (chan int) + T7 <-chan *ast.File + T8 struct{} + T9 struct { + a int + b, c float32 + d []string `go:"tag"` + } + T10 struct { + T8 + T9 + _ *T10 + } + T11 map[int]string + T12 interface{} + T13 interface { + m1() + m2(int) float32 + } + T14 interface { + T12 + T13 + m3(x ...struct{}) []T9 + } + T15 func() + T16 func(int) + T17 func(x int) + T18 func() float32 + T19 func() (x float32) + T20 func(...interface{}) + T21 struct{ next *T21 } + T22 struct{ link *T23 } + T23 struct{ link *T22 } + T24 *T24 + T25 *T26 + T26 *T27 + T27 *T25 + T28 func(T28) T28 +) + +var ( + V0 int + V1 = -991.0 +) + +func F1() {} +func F2(x int) {} +func F3() int { return 0 } +func F4() float32 { return 0 } +func F5(a, b, c int, u, v, w struct{ x, y T1 }, more ...interface{}) (p, q, r chan<- T10) + +func (p *T1) M1() diff --git a/internal/go/gcimporter/testdata/issue15920.go b/internal/go/gcimporter/testdata/issue15920.go new file mode 100644 index 000000000..c70f7d826 --- /dev/null +++ b/internal/go/gcimporter/testdata/issue15920.go @@ -0,0 +1,11 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package p + +// The underlying type of Error is the underlying type of error. +// Make sure we can import this again without problems. +type Error error + +func F() Error { return nil } diff --git a/internal/go/gcimporter/testdata/issue20046.go b/internal/go/gcimporter/testdata/issue20046.go new file mode 100644 index 000000000..c63ee821c --- /dev/null +++ b/internal/go/gcimporter/testdata/issue20046.go @@ -0,0 +1,9 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package p + +var V interface { + M() +} diff --git a/internal/go/gcimporter/testdata/issue25301.go b/internal/go/gcimporter/testdata/issue25301.go new file mode 100644 index 000000000..e3dc98b4e --- /dev/null +++ b/internal/go/gcimporter/testdata/issue25301.go @@ -0,0 +1,17 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package issue25301 + +type ( + A = interface { + M() + } + T interface { + A + } + S struct{} +) + +func (S) M() { println("m") } diff --git a/internal/go/gcimporter/testdata/p.go b/internal/go/gcimporter/testdata/p.go new file mode 100644 index 000000000..9e2e70576 --- /dev/null +++ b/internal/go/gcimporter/testdata/p.go @@ -0,0 +1,13 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Input for TestIssue15517 + +package p + +const C = 0 + +var V int + +func F() {} diff --git a/internal/go/gcimporter/testdata/versions/test.go b/internal/go/gcimporter/testdata/versions/test.go new file mode 100644 index 000000000..6362adc21 --- /dev/null +++ b/internal/go/gcimporter/testdata/versions/test.go @@ -0,0 +1,30 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/testdata/versions.test.go. + +// To create a test case for a new export format version, +// build this package with the latest compiler and store +// the resulting .a file appropriately named in the versions +// directory. The VersionHandling test will pick it up. +// +// In the testdata/versions: +// +// go build -o test_go1.$X_$Y.a test.go +// +// with $X = Go version and $Y = export format version +// (add 'b' or 'i' to distinguish between binary and +// indexed format starting with 1.11 as long as both +// formats are supported). +// +// Make sure this source is extended such that it exercises +// whatever export format change has taken place. + +package test + +// Any release before and including Go 1.7 didn't encode +// the package for a blank struct field. +type BlankField struct { + _ int +} diff --git a/internal/go/gcimporter/testdata/versions/test_go1.11_0i.a b/internal/go/gcimporter/testdata/versions/test_go1.11_0i.a new file mode 100644 index 0000000000000000000000000000000000000000..b00fefed0462172f5f5370f9769ed19342fc0c16 GIT binary patch literal 2420 zcmd5--)q}e6h69muBw_KLdZ-RK{Y0XwvlDomSrzBA-GAqvNg0Rgub}4e66UFB}bN1 zGRnHwvDZCpu!ju#KkT)%u;;z(|JYw}yK^PWah)GyZ@WnM+1DqefAUVHk@lW)sAU!R=Z-ziAz*d$3R7Pll9 zcIA!Y&f_PKAMce$p6nHiH}FUCis}R)bTZs2 zZF!b|xb0G}ckQ0=MuV0B_x9eXpn!016NLK`U_uExPS8<$Mh9x4pq?oLj0}OT#_rqmo&#X{*fU1qhfxSD`(3C5+jDF$ zuq|)&{&(G~RkWJbrCP0AH}tATjdIsCbd8o9mRYWS5?ra+J!)g4U219-z0oiCdtFNF z{hC>?+FfAy*!r%Am2Oz$C~&AxLo1@a7O=a?4(QRj`j3Du#?EdKc%S6!>snnm>iurN zVOUzZZc?*eGi`%bsAf~$1lDo_6}O@$7Vyes0pbClCe#!zDr7zrrnKiC z)Ep?mkht`l%9?7!(MVim;V9&%Z~u} z&`C3<^pF0^eSS?8e@W;6X4CzqHjQ6P2bWkLA7fG=w}b~Xfskcb1`VgV488`UoRM-$ zM#>1;tSDrq9NyWuK~`LlLFOp3h`=S1gWMT#SHf54q z+T#odoZ&U^e&Pgo*>!b7pd|!ECs+{S7h$A2oh&h85CxPB_vkWqRYuG#M&X~SNmgMM zZB`t@P{fp{!UpC9b61q0`2t5RMmF&rp{ityyoT3k+e8ErZ43Su@LJ@R8XUD5qxKP+ q<vTbv3ss< z^@i{M(5+HID_KpdmGWgnFKX1tH%&v=Xue{Z`O*i*g@WCpHa6O%rdH4^?R>k{q_o^F zndPG01cq;^-R)tdo7O0B9Xg=C70}ieu#M;h^z>T%r@$6NWz%(gANXtQT3I*B?Pj}T zSX#bpQnOq#ZG#r5W>eh+w&l1g9z~7FZ%WTLT670(_YRYd*kn3^%dz4ze>=G2TLXvQ zL099b;V|e2cNkUCXcGAKtZc@vZ%bPyHi)vu6LB@L>x^L{u7-8p#GItDJsQ~4Z>ku_ za+7j=R;%Ys!?ONp5On)gb?jz07^+$x*mguUz^cQqy1E`-@yTbc0#jlbQd+)my#}@t zqNcv`<=Ada4{R+Wj|z%s1>uzemYwq1cmhlg_waVW#XGT@%E*cRpK2@e{*Sel%zLaX z*b}o@WcUR8pSUf&CrQAbNCJctzKp1E`2Jw33BU0TVcx?bRLyrTXZ&so(JmxrgiG3M zFKUVtu9KeLMmnaSX1KD(2;}M4A&&L2{JofNgD4tTV7E*^Yvf)G@qdtW{BF*1b6EGe zi0giCQulLl-Ot5!AJ5Q;J;wCU|4w~!PZWQR*Z(Es^Tsl^UyCQVNNyiuVj!3Jy8&PV zA!$g1hRaj}KLb%tNGT;DC4^*B6p~U3pJdn|DbC9va}-%b;1WqeY648li6jCu9GJ!j zftgF-!z)1P)5x>9Ox_6aMo1N90;DFBG&vTlu-Fq#P<{|_%>ELN?EXrGm|aCF0h89o z4C~}vfO8?GA;Z$POj1jA&d}iuXMFgX6Kt^i>X<-_2#8KFAi_IgqO4 E0@X}F`Tzg` literal 0 HcmV?d00001 diff --git a/internal/go/gcimporter/testdata/versions/test_go1.11_999b.a b/internal/go/gcimporter/testdata/versions/test_go1.11_999b.a new file mode 100644 index 0000000000000000000000000000000000000000..c35d22dce691e67127e04ea74ddd8c97a57e22ff GIT binary patch literal 2600 zcmd5-QE%c#5MFNy=W|eEYCj zofNe{t4~#>{-OSb&aBx$f#kU>+p|0K%{Q~N-rZLxPV5d&m2OwR`tYv({kfJlV9tK4GN1>%$9hg#UXWp1K*PW)>y)|`=NRW*-&W<%|Dk5#Om*JxNw>r9#U!*r~b zP0ci$Mx$A+X{J%OwbuqT?WWm}oe_(|V4Sd6=}m*7rv;u?yl%MV_s08v?Wf6Q6m@>S zx_JAs)U_|7*t!_KtKHs)x1H;^ei#;&H_#MC+23y|pIJN!!m-w9G!%tU=~==i2_YNG z@A|fKfBo+A$IHvk#w2#X^22Q&a_kIW_--(Y!Wa_RM!e@F&i0IHqM%0i!xycgGydf~ zU_+05O{vr+l#orjNhAW1m98Il^>AK5QK?63X_$73{$JMel|qOgmK#G)BLDpLr;&V# zn63|;$v`g_^@UP^kwyqP;Ej138~ZV%zGZv9{g znV&e*B=p&s#ZJP!Q?wnYb7FUEW^{*u9S-P);czK(-!$r`Rqyxu4a+ggb(`7sn(bPw z!VH(0Ho{N+P=|@>X^{1WI|EEG?uSo;jsdh=}V&d^_VEymMv z5{#JcyS*Tp=tdb=^(-YE!-<85gnq;ihxvBU5u^!MAa>%|`GU%Q?zyr%_5^ko2BC*( zkn~83oP=*VG!&Pjci*5^9g-`C$oe6cpk{Da~(`lDJVy;zog4 zbVW=-{{e#MzIi6gvuXw^X8{AKl@uyEI2~bJx6@$%1PEUGwmQYvIB5<$qk2Z$*50 zk75>_FIQD5p>37s>5beZJ44wf8d^aX7v%GO;k|hPJq4YWL!pz878ng++&xoiZnuMs z17y5kpgjiH=els+Gz1+rDI`qsK{i@_81CZGq8iaMXf;*%R8ePV0gI{7COseryqA?D z63LJRsHGFggVXp7#Hd+yYxGLd9RSj+|6dE7!@>Xn literal 0 HcmV?d00001 diff --git a/internal/go/gcimporter/testdata/versions/test_go1.11_999i.a b/internal/go/gcimporter/testdata/versions/test_go1.11_999i.a new file mode 100644 index 0000000000000000000000000000000000000000..99401d7c37ca48fbca32f0448b71606b7f853ce4 GIT binary patch literal 2420 zcmd5-&uimG6n+|9N2{11LU7Geg4UQ2(nOZ!Uy7R;W4zgI15L=fG&ip-j}>(l%aP@5 z1EJe%>9vOzdPrgahhCRm=((5v4ZZd+nDmWgC0^&p(pv{<-psuBeecaDjr3^C^sS@1 z)aj`EyH8q=ca*4U6U$9Bj`~We6ngYZWEJJPWP6I&{f1hB(lh;+&OkAbd+6@bQ|c<8 zmWzf_EK|}RnwHiTHiO1>Ckfq0rjQs*l~M` z(^Im$U%veM#jyKq|CxPI9T!}uqv`px)3g4;aP;{oXzzd9(!9g7QZ*}WV3Q;%mupJS z-*fx&R_@8(!QNgwKk}`-?d3!2f$8RL%Q+r;J|@ZsbQJW=z|5agf8=dW>OKs7V9D=%9a!78T+cGy z(c9m3vs~25W|wN^LbaloG+HTijf$?(!lr2y%I^gii&l?X*l3p;T2bHZ7y7*}rPY4f zsFtiQFnnr#$Hhvw%yHn^bU=MGpuIY<^=Jq5^7VDCs#mK0Zhy04 zYK5vnjcVDjDzr#7i|Pijy6vgB6*VHiO`ZluID@`-Psn;4GArP6?6}gei7UQ2u<1Q? zEsh$Eg5%(xpc)2U7k*QftvK~vvSs3cNHs3RbYV9cLm{Tix*1|z%h(?etl1}3ifg&e zIoYe}+Ox2%KOO|mF;#7=>jWcJD*$UmR0I6p@T0G;@pC@)TC2d67>1Ol@0(}9av^Hw zEnkk~CiK8oBl742Gc<$n$NpzUBRcnI`(tQxc$`tI5;b)!vS0ntc=WXBNKY;)8AHY3y z;*2rW<9?>Dw_{AxV7!}9nL69c&=JeUcDq#+F&PE!ed4MaI1rIdt} z5RyqzNJ=TZlVO9TxFCbfQDhN;OC$xUYhYSVBoUb5z%1wjXNQEe%B)CVHu`4lRBryvAOf|9sD`=DA z5QZYAyb!i9Cm8#p1hwZlsx#8SbA+mrCOL<5v@Ifnh_(*@2zV{>N-G@I2vLU!O>$^+ nD3dor{X)Qn<{JT5T7U9>hmi$T8}%kKFB9#&rSuczE%^q0%WFuW&wc6l=qK3gNV1LV2CkISWwo<2znR(DnOWWbk`=+lSITs1 zJbduY`PcXKW|%8lk!IhlE~PbMkKL8VV{4ha}V4EOeHssAj^ZGh&GQ1o9aT0V zE7|B7>L+=&s3k_6(@*cFEPHY<;b{n;kv(Zj0QL#&g9Lc0GL7@8k*`v;$zj8PET0xV z^BVO3%BON$0l*rK(@+9ome1$hFfQ2Q!sz#nt;>lH@;??fh+eI;#5~Q*0)&k6i-ycW z9M$0Qj_tQ`kmdnP7w_MizUT7DnM6*+L(e&jEb6nfh=$|9wIhc9x0JC3LLT7YKKD$= zoW5pom1AG}q=&cQ38gfS>1SbHk^94bS&&D_~y4t#C474yO~F zx{e=?EsI(n1K~A}t9(YwEZB8V*XMo=G#Zel@vd3FSY~B1<3=3#Nx3jg3&fp9JHtm- zb*gb(_3Tyw4G{X;3|+FKV9!AeYO9@U(KxTqPLu43v0KW+wUgD^AcoD!&10FfvhExZ z{hffO7=c>Luya7T4gB{`d;c~9VK?Y*K`QL>muo}l$gTT7pAXdgKb;SHAILu7Z0*CZ z$Vnea8*sMT_y(I%(LMYsy53j(RYQ_M-OZ!CGFsC&v>SmeyI1*Y@NSULW-JR&F}M7b z74dS$vvPr9P1g?BjvoBp`TV*pzgqKG(e2A_>khY>f3O#XkS=t=MBR2vZ?$E$)mEU@ zX}2+{H&g(k335O>(Ans8dupq*(Ye{^Xev=b1)<#_J?J65xuUl!IjJTQ+Tn&d5SmVO z&|4L9OU30-X#F;7q3wVi!4c99%waB@DtRp(-9eT9Ls@~rW84jeb__De1W?iu+DW^* ztk4W%1O8PslOTw62!BX8HS_^`<>I7v@u;$@{1|hUypx8nB@`I%B$SxHRjVyDx``D7 NcQ?$zihvOW!*Feif1)VE2;9JR0_2dO%^`=P@{(NAY$Q^q zq@)3YqSvA?&;mKQK%XFo=67nQwk}c6N1r4`=@I z4QVjY?%la{@Y$hS41G>HDZY!Vw@d!y__I<77%Q$(zN)K#c>n z721~V^(@nOdp7a9*e2NS6U%b@y`XRQecUyDrXek9hYjgpSxW8pP3aNICUKgmh9m(J zE>Fp13M=mcJbwDct-GJ!y?ehi$^1@~cJd4pJnTe%JQ}AN%eTYP0iNQ{+{+}FXi;i^ zaC3;0hlep42Jj)eBn$untfEyAfQ6M{5TyexofmUa?SenA@`l@+AI|@+^3rt)AWDti zFc5(1(gGoT`q9~Wc{bR$=8Rc!A^{xJ3N zaPs=4?zs*Lt$t{QByg>6XxJX^hIY{N9W%tt{}R};{DAmu5RbUJW%ffO3_L=5VV5l6*ZhCesyzp>E=A;JYzhJs%EWh zmQ3Rj(IVfArxQ&#*ggwa33~Y@Drfz+^W(fHroT2598#a z_Et?wC-c}qH;c%{Wgg>c-mpMhm4pT?138zmh6U;r>bKA1^~(^bS!k>6z;$OoihIxy zYqx(mXv??1KWMMNVb1}ZwN?0)al#u^2R3VU_C^$g(@X5fDY__b0bN>!l`#m;?xy<~7Z;B9V zLKAd`t=H6AU6gBe32Kdcoi)N0nVpA1(Hd$%V}WU|%eBS=bG5)IGLk_Cs$4ycBR~f-YN8|AfotChY*l~BSE^i{gInmi0M7+UXW+Gc88z0vVt#QqU%$7~6Y}XlzC8MDY*kH5)V;GCf`>CKa_pp6Wlt4RT zUnW&-5J5!1Bn_%KbzU{9xX1+dk}JE2DH%244}ndEXSl^(IJt`l+=}~g$1-{)n5P00 dxUU2z+5L^LdsMlx$dx6&zhKtR&Wb;+KLH~_A*28R literal 0 HcmV?d00001 diff --git a/internal/go/gcimporter/testdata/versions/test_go1.8_4.a b/internal/go/gcimporter/testdata/versions/test_go1.8_4.a new file mode 100644 index 0000000000000000000000000000000000000000..26b8531650ad8f85885fcf62f8306e5f686a1dca GIT binary patch literal 1658 zcmah}L2KMb6n-<6*FxTgsq1b{DMnqE#W=23(#pGQ6G|FP8|cBM^&y0UGb@eOYGp~* zY7+vX^ji803OTsYV^2Ax*U&)Ez4U+d7mWL)-LVtJCc`jq=6m1!<~_|rlhjPd-wVGm z1D-#M5|f_<1J4Y^LlK)dcO2Vs`;O=Cn1?fAexBtfZk^$t>+IPh)3u$UdDF&W-rEav zej>6oN(zx1pW-kKqtm%#JQVpfN)yvF44`DW5Yqyd-UWE{`imOH%WU{ zWWwX##E(w0G)J9Y$qsnIdkd9mg;#~el#(N$04sC_2;jAb@reO2 zNuAv|ARz7HaV9M5m`{(bcH4R-RMHG&w{maQJ;xx|E6)s5%ZeOiq3bOJu&;eg||9}IQ_ZoB=#&>Ou*?)Uvb_&CH^jO@O<8#-Yy7GfCgjE0^+ z28o4*QH+`1;L-z1-V)|$7vdayp7)`O2X8d2V7P1wxZN4 z-cXKntI_>QA9B$jbiawL;MK!F3v!I5|== z3j6Bc=7-C={;N8_&5{K{S;gN}n`>;zf2Z`RKuIZWK?`jB&onfAL7izv)6|Rx4$y8e z0J;j2uF)nmFIg?7HITLbI_v&N^f`g&#PApey3c5fK42sG_$Pwv#_!cC@0;`)foH_9 zk+gQ3(dOEBCFx1IoRyrmWF5)?-KC6V-HXGTWSi6kQ!70khD%qntp>8NW4CD+x@a4E z2APiPXT(Bz*!o&GVEX~?Y{^_~Q9zPMb5$~UJ9Z7ZrKMZ~v%mQBri;N$8`F8p1bGlny&5i&D%B(^TFPz z;3p!_;8<196I3*1d$M-+mg4B!qV3?+X{?VH6}; zz?1Qxf9!a!=Lo0ognYn9KF4h@^mjvUyFI_}4PGPndO;`x9AYR2cF)}%IiqkWM1Qn1 z=zGBsB-S;G6U_7upOjf7Qc>_ygdc<4o>m}E-^l+I`eFz0rHAA0H@Xfz|BPke|1)`+iXcrZt?40oFTZL8Nb(Rod^G~7A$jbiewML-p{&gCc zI5|==3j6$@=6j2}{);-l&5{{GS;gN}n`>;vzft*Aprn+xpanMmXBry5pw2X-X=+9T z2WU4L09^%1*Ju-(m#h}k8pv9Io%Qg2`hvgr@ zU9=57hg?VXGh(4UY<-~{u>A;kwq!20C?LtBI_PL^_?O^Zt&-WS*lx{!fFX@4#LpRh hN$fL%fd7&p;{9E%1IcdUhrv&SSzxYkem?yz{|7p9b&dc4 literal 0 HcmV?d00001 diff --git a/internal/testenv/testenv.go b/internal/testenv/testenv.go new file mode 100644 index 000000000..0cc90d26a --- /dev/null +++ b/internal/testenv/testenv.go @@ -0,0 +1,185 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package testenv contains helper functions for skipping tests +// based on which tools are present in the environment. +package testenv + +import ( + "fmt" + "io/ioutil" + "os" + "os/exec" + "runtime" + "strings" + "sync" +) + +// Testing is an abstraction of a *testing.T. +type Testing interface { + Skipf(format string, args ...interface{}) + Fatalf(format string, args ...interface{}) +} + +type helperer interface { + Helper() +} + +// packageMainIsDevel reports whether the module containing package main +// is a development version (if module information is available). +// +// Builds in GOPATH mode and builds that lack module information are assumed to +// be development versions. +var packageMainIsDevel = func() bool { return true } + +var checkGoGoroot struct { + once sync.Once + err error +} + +func hasTool(tool string) error { + _, err := exec.LookPath(tool) + if err != nil { + return err + } + + switch tool { + case "patch": + // check that the patch tools supports the -o argument + temp, err := ioutil.TempFile("", "patch-test") + if err != nil { + return err + } + temp.Close() + defer os.Remove(temp.Name()) + cmd := exec.Command(tool, "-o", temp.Name()) + if err := cmd.Run(); err != nil { + return err + } + + case "go": + checkGoGoroot.once.Do(func() { + // Ensure that the 'go' command found by exec.LookPath is from the correct + // GOROOT. Otherwise, 'some/path/go test ./...' will test against some + // version of the 'go' binary other than 'some/path/go', which is almost + // certainly not what the user intended. + out, err := exec.Command(tool, "env", "GOROOT").CombinedOutput() + if err != nil { + checkGoGoroot.err = err + return + } + GOROOT := strings.TrimSpace(string(out)) + if GOROOT != runtime.GOROOT() { + checkGoGoroot.err = fmt.Errorf("'go env GOROOT' does not match runtime.GOROOT:\n\tgo env: %s\n\tGOROOT: %s", GOROOT, runtime.GOROOT()) + } + }) + if checkGoGoroot.err != nil { + return checkGoGoroot.err + } + } + + return nil +} + +func allowMissingTool(tool string) bool { + if runtime.GOOS == "android" { + // Android builds generally run tests on a separate machine from the build, + // so don't expect any external tools to be available. + return true + } + + switch tool { + case "go": + if os.Getenv("GO_BUILDER_NAME") == "illumos-amd64-joyent" { + // Work around a misconfigured builder (see https://golang.org/issue/33950). + return true + } + case "diff": + if os.Getenv("GO_BUILDER_NAME") != "" { + return true + } + case "patch": + if os.Getenv("GO_BUILDER_NAME") != "" { + return true + } + } + + // If a developer is actively working on this test, we expect them to have all + // of its dependencies installed. However, if it's just a dependency of some + // other module (for example, being run via 'go test all'), we should be more + // tolerant of unusual environments. + return !packageMainIsDevel() +} + +// NeedsTool skips t if the named tool is not present in the path. +func NeedsTool(t Testing, tool string) { + if t, ok := t.(helperer); ok { + t.Helper() + } + err := hasTool(tool) + if err == nil { + return + } + if allowMissingTool(tool) { + t.Skipf("skipping because %s tool not available: %v", tool, err) + } else { + t.Fatalf("%s tool not available: %v", tool, err) + } +} + +// NeedsGoPackages skips t if the go/packages driver (or 'go' tool) implied by +// the current process environment is not present in the path. +func NeedsGoPackages(t Testing) { + if t, ok := t.(helperer); ok { + t.Helper() + } + + tool := os.Getenv("GOPACKAGESDRIVER") + switch tool { + case "off": + // "off" forces go/packages to use the go command. + tool = "go" + case "": + if _, err := exec.LookPath("gopackagesdriver"); err == nil { + tool = "gopackagesdriver" + } else { + tool = "go" + } + } + + NeedsTool(t, tool) +} + +// NeedsGoPackagesEnv skips t if the go/packages driver (or 'go' tool) implied +// by env is not present in the path. +func NeedsGoPackagesEnv(t Testing, env []string) { + if t, ok := t.(helperer); ok { + t.Helper() + } + + for _, v := range env { + if strings.HasPrefix(v, "GOPACKAGESDRIVER=") { + tool := strings.TrimPrefix(v, "GOPACKAGESDRIVER=") + if tool == "off" { + NeedsTool(t, "go") + } else { + NeedsTool(t, tool) + } + return + } + } + + NeedsGoPackages(t) +} + +// ExitIfSmallMachine emits a helpful diagnostic and calls os.Exit(0) if the +// current machine is a builder known to have scarce resources. +// +// It should be called from within a TestMain function. +func ExitIfSmallMachine() { + if os.Getenv("GO_BUILDER_NAME") == "linux-arm" { + fmt.Fprintln(os.Stderr, "skipping test: linux-arm builder lacks sufficient memory (https://golang.org/issue/32834)") + os.Exit(0) + } +} diff --git a/internal/testenv/testenv_112.go b/internal/testenv/testenv_112.go new file mode 100644 index 000000000..b25846c20 --- /dev/null +++ b/internal/testenv/testenv_112.go @@ -0,0 +1,27 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.12 + +package testenv + +import "runtime/debug" + +func packageMainIsDevelModule() bool { + info, ok := debug.ReadBuildInfo() + if !ok { + // Most test binaries currently lack build info, but this should become more + // permissive once https://golang.org/issue/33976 is fixed. + return true + } + + // Note: info.Main.Version describes the version of the module containing + // package main, not the version of “the main module”. + // See https://golang.org/issue/33975. + return info.Main.Version == "(devel)" +} + +func init() { + packageMainIsDevel = packageMainIsDevelModule +} From 45f326d34e7ae09439d72cffcac68449d9361d8e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 15 Apr 2020 10:49:33 +0200 Subject: [PATCH 016/111] internal/go/gcexportdata: import copy of golang.org/x/tools/go/gcexportdata Import from commit 33e937220d8f91f1d242ad15aebc3e245aca5515 --- internal/go/gcexportdata/example_test.go | 126 ++++++++++++++++++ internal/go/gcexportdata/gcexportdata.go | 109 +++++++++++++++ internal/go/gcexportdata/gcexportdata_test.go | 41 ++++++ internal/go/gcexportdata/importer.go | 73 ++++++++++ internal/go/gcexportdata/main.go | 100 ++++++++++++++ .../go/gcexportdata/testdata/errors-ae16.a | Bin 0 -> 5494 bytes 6 files changed, 449 insertions(+) create mode 100644 internal/go/gcexportdata/example_test.go create mode 100644 internal/go/gcexportdata/gcexportdata.go create mode 100644 internal/go/gcexportdata/gcexportdata_test.go create mode 100644 internal/go/gcexportdata/importer.go create mode 100644 internal/go/gcexportdata/main.go create mode 100644 internal/go/gcexportdata/testdata/errors-ae16.a diff --git a/internal/go/gcexportdata/example_test.go b/internal/go/gcexportdata/example_test.go new file mode 100644 index 000000000..0b517b5ad --- /dev/null +++ b/internal/go/gcexportdata/example_test.go @@ -0,0 +1,126 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.7 +// +build gc + +package gcexportdata_test + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "log" + "os" + "path/filepath" + + "honnef.co/go/tools/internal/go/gcexportdata" +) + +// ExampleRead uses gcexportdata.Read to load type information for the +// "fmt" package from the fmt.a file produced by the gc compiler. +func ExampleRead() { + // Find the export data file. + filename, path := gcexportdata.Find("fmt", "") + if filename == "" { + log.Fatalf("can't find export data for fmt") + } + fmt.Printf("Package path: %s\n", path) + fmt.Printf("Export data: %s\n", filepath.Base(filename)) + + // Open and read the file. + f, err := os.Open(filename) + if err != nil { + log.Fatal(err) + } + defer f.Close() + r, err := gcexportdata.NewReader(f) + if err != nil { + log.Fatalf("reading export data %s: %v", filename, err) + } + + // Decode the export data. + fset := token.NewFileSet() + imports := make(map[string]*types.Package) + pkg, err := gcexportdata.Read(r, fset, imports, path) + if err != nil { + log.Fatal(err) + } + + // Print package information. + members := pkg.Scope().Names() + if members[0] == ".inittask" { + // An improvement to init handling in 1.13 added ".inittask". Remove so go >= 1.13 and go < 1.13 both pass. + members = members[1:] + } + fmt.Printf("Package members: %s...\n", members[:5]) + println := pkg.Scope().Lookup("Println") + posn := fset.Position(println.Pos()) + posn.Line = 123 // make example deterministic + fmt.Printf("Println type: %s\n", println.Type()) + fmt.Printf("Println location: %s\n", slashify(posn)) + + // Output: + // + // Package path: fmt + // Export data: fmt.a + // Package members: [Errorf Formatter Fprint Fprintf Fprintln]... + // Println type: func(a ...interface{}) (n int, err error) + // Println location: $GOROOT/src/fmt/print.go:123:1 +} + +// ExampleNewImporter demonstrates usage of NewImporter to provide type +// information for dependencies when type-checking Go source code. +func ExampleNewImporter() { + const src = `package myrpc + +// choosing a package that doesn't change across releases +import "net/rpc" + +const serverError rpc.ServerError = "" +` + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "myrpc.go", src, 0) + if err != nil { + log.Fatal(err) + } + + packages := make(map[string]*types.Package) + imp := gcexportdata.NewImporter(fset, packages) + conf := types.Config{Importer: imp} + pkg, err := conf.Check("myrpc", fset, []*ast.File{f}, nil) + if err != nil { + log.Fatal(err) + } + + // object from imported package + pi := packages["net/rpc"].Scope().Lookup("ServerError") + fmt.Printf("type %s.%s %s // %s\n", + pi.Pkg().Path(), + pi.Name(), + pi.Type().Underlying(), + slashify(fset.Position(pi.Pos())), + ) + + // object in source package + twopi := pkg.Scope().Lookup("serverError") + fmt.Printf("const %s %s = %s // %s\n", + twopi.Name(), + twopi.Type(), + twopi.(*types.Const).Val(), + slashify(fset.Position(twopi.Pos())), + ) + + // Output: + // + // type net/rpc.ServerError string // $GOROOT/src/net/rpc/client.go:20:1 + // const serverError net/rpc.ServerError = "" // myrpc.go:6:7 +} + +func slashify(posn token.Position) token.Position { + posn.Filename = filepath.ToSlash(posn.Filename) // for MS Windows portability + return posn +} diff --git a/internal/go/gcexportdata/gcexportdata.go b/internal/go/gcexportdata/gcexportdata.go new file mode 100644 index 000000000..13e757f47 --- /dev/null +++ b/internal/go/gcexportdata/gcexportdata.go @@ -0,0 +1,109 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gcexportdata provides functions for locating, reading, and +// writing export data files containing type information produced by the +// gc compiler. This package supports go1.7 export data format and all +// later versions. +// +// Although it might seem convenient for this package to live alongside +// go/types in the standard library, this would cause version skew +// problems for developer tools that use it, since they must be able to +// consume the outputs of the gc compiler both before and after a Go +// update such as from Go 1.7 to Go 1.8. Because this package lives in +// golang.org/x/tools, sites can update their version of this repo some +// time before the Go 1.8 release and rebuild and redeploy their +// developer tools, which will then be able to consume both Go 1.7 and +// Go 1.8 export data files, so they will work before and after the +// Go update. (See discussion at https://golang.org/issue/15651.) +// +package gcexportdata + +import ( + "bufio" + "bytes" + "fmt" + "go/token" + "go/types" + "io" + "io/ioutil" + + "honnef.co/go/tools/internal/go/gcimporter" +) + +// Find returns the name of an object (.o) or archive (.a) file +// containing type information for the specified import path, +// using the workspace layout conventions of go/build. +// If no file was found, an empty filename is returned. +// +// A relative srcDir is interpreted relative to the current working directory. +// +// Find also returns the package's resolved (canonical) import path, +// reflecting the effects of srcDir and vendoring on importPath. +func Find(importPath, srcDir string) (filename, path string) { + return gcimporter.FindPkg(importPath, srcDir) +} + +// NewReader returns a reader for the export data section of an object +// (.o) or archive (.a) file read from r. The new reader may provide +// additional trailing data beyond the end of the export data. +func NewReader(r io.Reader) (io.Reader, error) { + buf := bufio.NewReader(r) + _, err := gcimporter.FindExportData(buf) + // If we ever switch to a zip-like archive format with the ToC + // at the end, we can return the correct portion of export data, + // but for now we must return the entire rest of the file. + return buf, err +} + +// Read reads export data from in, decodes it, and returns type +// information for the package. +// The package name is specified by path. +// File position information is added to fset. +// +// Read may inspect and add to the imports map to ensure that references +// within the export data to other packages are consistent. The caller +// must ensure that imports[path] does not exist, or exists but is +// incomplete (see types.Package.Complete), and Read inserts the +// resulting package into this map entry. +// +// On return, the state of the reader is undefined. +func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { + data, err := ioutil.ReadAll(in) + if err != nil { + return nil, fmt.Errorf("reading export data for %q: %v", path, err) + } + + if bytes.HasPrefix(data, []byte("!")) { + return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) + } + + // The App Engine Go runtime v1.6 uses the old export data format. + // TODO(adonovan): delete once v1.7 has been around for a while. + if bytes.HasPrefix(data, []byte("package ")) { + return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) + } + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + if len(data) > 0 && data[0] == 'i' { + _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) + return pkg, err + } + + _, pkg, err := gcimporter.BImportData(fset, imports, data, path) + return pkg, err +} + +// Write writes encoded type information for the specified package to out. +// The FileSet provides file position information for named objects. +func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { + b, err := gcimporter.IExportData(fset, pkg) + if err != nil { + return err + } + _, err = out.Write(b) + return err +} diff --git a/internal/go/gcexportdata/gcexportdata_test.go b/internal/go/gcexportdata/gcexportdata_test.go new file mode 100644 index 000000000..17b3e6f3d --- /dev/null +++ b/internal/go/gcexportdata/gcexportdata_test.go @@ -0,0 +1,41 @@ +package gcexportdata_test + +import ( + "go/token" + "go/types" + "log" + "os" + "testing" + + "honnef.co/go/tools/internal/go/gcexportdata" +) + +// Test to ensure that gcexportdata can read files produced by App +// Engine Go runtime v1.6. +func TestAppEngine16(t *testing.T) { + // Open and read the file. + f, err := os.Open("testdata/errors-ae16.a") + if err != nil { + t.Fatal(err) + } + defer f.Close() + r, err := gcexportdata.NewReader(f) + if err != nil { + log.Fatalf("reading export data: %v", err) + } + + // Decode the export data. + fset := token.NewFileSet() + imports := make(map[string]*types.Package) + pkg, err := gcexportdata.Read(r, fset, imports, "errors") + if err != nil { + log.Fatal(err) + } + + // Print package information. + got := pkg.Scope().Lookup("New").Type().String() + want := "func(text string) error" + if got != want { + t.Errorf("New.Type = %s, want %s", got, want) + } +} diff --git a/internal/go/gcexportdata/importer.go b/internal/go/gcexportdata/importer.go new file mode 100644 index 000000000..efe221e7e --- /dev/null +++ b/internal/go/gcexportdata/importer.go @@ -0,0 +1,73 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gcexportdata + +import ( + "fmt" + "go/token" + "go/types" + "os" +) + +// NewImporter returns a new instance of the types.Importer interface +// that reads type information from export data files written by gc. +// The Importer also satisfies types.ImporterFrom. +// +// Export data files are located using "go build" workspace conventions +// and the build.Default context. +// +// Use this importer instead of go/importer.For("gc", ...) to avoid the +// version-skew problems described in the documentation of this package, +// or to control the FileSet or access the imports map populated during +// package loading. +// +func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { + return importer{fset, imports} +} + +type importer struct { + fset *token.FileSet + imports map[string]*types.Package +} + +func (imp importer) Import(importPath string) (*types.Package, error) { + return imp.ImportFrom(importPath, "", 0) +} + +func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { + filename, path := Find(importPath, srcDir) + if filename == "" { + if importPath == "unsafe" { + // Even for unsafe, call Find first in case + // the package was vendored. + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %s", importPath) + } + + if pkg, ok := imp.imports[path]; ok && pkg.Complete() { + return pkg, nil // cache hit + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + f.Close() + if err != nil { + // add file name to error + err = fmt.Errorf("reading export data: %s: %v", filename, err) + } + }() + + r, err := NewReader(f) + if err != nil { + return nil, err + } + + return Read(r, imp.fset, imp.imports, path) +} diff --git a/internal/go/gcexportdata/main.go b/internal/go/gcexportdata/main.go new file mode 100644 index 000000000..a84a7ca4c --- /dev/null +++ b/internal/go/gcexportdata/main.go @@ -0,0 +1,100 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +// The gcexportdata command is a diagnostic tool that displays the +// contents of gc export data files. +package main + +import ( + "flag" + "fmt" + "go/token" + "go/types" + "log" + "os" + + "honnef.co/go/tools/internal/go/gcexportdata" + + "golang.org/x/tools/go/types/typeutil" +) + +var packageFlag = flag.String("package", "", "alternative package to print") + +func main() { + log.SetPrefix("gcexportdata: ") + log.SetFlags(0) + flag.Usage = func() { + fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a") + } + flag.Parse() + if flag.NArg() != 1 { + flag.Usage() + os.Exit(2) + } + filename := flag.Args()[0] + + f, err := os.Open(filename) + if err != nil { + log.Fatal(err) + } + + r, err := gcexportdata.NewReader(f) + if err != nil { + log.Fatalf("%s: %s", filename, err) + } + + // Decode the package. + const primary = "" + imports := make(map[string]*types.Package) + fset := token.NewFileSet() + pkg, err := gcexportdata.Read(r, fset, imports, primary) + if err != nil { + log.Fatalf("%s: %s", filename, err) + } + + // Optionally select an indirectly mentioned package. + if *packageFlag != "" { + pkg = imports[*packageFlag] + if pkg == nil { + fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n", + filename, *packageFlag) + for p := range imports { + if p != primary { + fmt.Fprintf(os.Stderr, "\t%s\n", p) + } + } + os.Exit(1) + } + } + + // Print all package-level declarations, including non-exported ones. + fmt.Printf("package %s\n", pkg.Name()) + for _, imp := range pkg.Imports() { + fmt.Printf("import %q\n", imp.Path()) + } + qual := func(p *types.Package) string { + if pkg == p { + return "" + } + return p.Name() + } + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + fmt.Printf("%s: %s\n", + fset.Position(obj.Pos()), + types.ObjectString(obj, qual)) + + // For types, print each method. + if _, ok := obj.(*types.TypeName); ok { + for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { + fmt.Printf("%s: %s\n", + fset.Position(method.Obj().Pos()), + types.SelectionString(method, qual)) + } + } + } +} diff --git a/internal/go/gcexportdata/testdata/errors-ae16.a b/internal/go/gcexportdata/testdata/errors-ae16.a new file mode 100644 index 0000000000000000000000000000000000000000..3f1dad54f074503fb14d9ddf702553f41e696a4c GIT binary patch literal 5494 zcmb^#&2QUO{KYL*7D{K0YLsbAUdXDnfM@4BWo*^d4I|K2X*+CEMe8_r8k#1N9(8^@tYXqg z<}w8}V_7*Pl`7=ZYQj*}yro*%v>J^>VzEeFH9%B>&Wc@?;^T-&`COw|w<(Lpl$ZX_gmfoz z#JN>BDbRzBtXIHrm&%od*i;*;g^4Om`ou_4vtdjrYl%d3Lb`-zNO7tl*R`b~B?J~S zZW_QE9YE$wwRySbhM*NA_B|+GseGOVPI{SmMNUTufxsp!#SqyQhMBVoxj%saMQvmF z41q^`Ul)(w`w@3{|BS1@Y0bIVG=6B>S8q{-=E>Moy4jeCg|+5POwu+gv7FXi4C6*w z{{W$6<)+q*yWm&wK>bV0KXpd6jX9v841so9yZbo)$iup|odf1v;#^E@i5z?Olhz!J zY0XM3+&Yw=eM9%Zr}p;teud|FBzzKu67os-1R>*?0CH!x0PNH7Fdi@I%>O&g#;1C)@dQcW&9dO@D-WXw_$(7q55Ely?oZ~*!( zg^p4vO-nf2qGvrBKNe7NiE9Te5+8vgz~FBRkvW9wU0{0G9aR0^0Fo*kVYz0T4F^hc zzFIS!H3u-JX9)?P0+aL=r{*lz=M_a!$x$KL4j-aINEbv*xX*2sNP}LJN8l7ooshxf z)Dx_R$Z_F~S?g+OcIN5`2@5+RB96W-d>;yp5MhMKLPI!92CtWlO3lzKjfW4cR8~o6 zlj)Rcq%#>Mld*D{Tux8rvx;F^X`Og*6(gtXIWwoIc~ws;rfwvXmZ~SzoT|Vk11=Ec zQtfQRHqO$xG~5Lag<>)O)gh>tew)vsOBnO&OR}2e{*Wk6aZ*peMVc zSGivP7bM7S0mwo~3lCj8c=U{Si^rf#zn@1>!%=sU>)^AYmxF~dLdK95x+~ZJ7=#n% z8)Pe?tN}tFfrwyjt09A=RF@p-@$i-fak?lp=>KG>0INl+yxfdO`0ScQCq)j30`kSv8Zcd#c z;c$}23WVOwAc(9$qHT((i;d<491lTn9Cqj4u82dx z+efIy+ybA(*W8q15)*AoR+4Qhlzadf+tSGnK}Zm5R~F|)FxF&!Un%{t{L87ADJ9pA zeG4xkzH)?-7cQY5?se-W1FrPfOUnnX0ts>us|cu`U?3}jeMx==DOR_zC6QrLACzVKWR4?Iwlw)wu3c)C2A`TfGo= z#hG?np{0^SK&H$Ik1~LX z417n}j0YPCc(;w~vD?RfsKeG+o5Z!1$p|_}K_kp2Zbx~!UbCIL?kr-&2)%c^{7HX; zuy1*X2j$OhLq5e{?NA(E$2c8RoYSCcU4b|@#ylbrIk;AKAendA`wPhB7qID>JrSDD zo7~2JR~}Qo_Dk0<9yXut7rLPJQ!8%o>DD)} Date: Thu, 16 Apr 2020 05:35:21 +0200 Subject: [PATCH 017/111] internal/go/gcimporter: avoid expensive call to (*token.File).SetLines SetLines verifies the lines argument, which gets expensive for our 64K lines slices. Circumvent the check by unsafely setting the field directly. --- internal/go/gcimporter/bimport.go | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/internal/go/gcimporter/bimport.go b/internal/go/gcimporter/bimport.go index e9f73d14a..3176441c4 100644 --- a/internal/go/gcimporter/bimport.go +++ b/internal/go/gcimporter/bimport.go @@ -18,6 +18,7 @@ import ( "sync" "unicode" "unicode/utf8" + "unsafe" ) type importer struct { @@ -341,6 +342,22 @@ type fakeFileSet struct { files map[string]*token.File } +type unsafeFile struct { + _ uintptr + _ string + _ int + _ int + mutex sync.Mutex + lines []int +} + +func (f *unsafeFile) SetLines(lines []int) bool { + f.mutex.Lock() + f.lines = lines + f.mutex.Unlock() + return true +} + func (s *fakeFileSet) pos(file string, line, column int) token.Pos { // TODO(mdempsky): Make use of column. @@ -359,7 +376,7 @@ func (s *fakeFileSet) pos(file string, line, column int) token.Pos { fakeLines[i] = i } }) - f.SetLines(fakeLines) + (*unsafeFile)(unsafe.Pointer(f)).SetLines(fakeLines) } if line > maxlines { From ff23a1406b4cdeb36e17fd58ad8b91af44d8a7b3 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 16 Apr 2020 05:44:05 +0200 Subject: [PATCH 018/111] internal/go/gcexportdata: remove code we don't need We don't need to find object files, go/packages (via go list) take care of that for us. We also don't need to write export data. --- internal/go/gcexportdata/example_test.go | 126 ----------------------- internal/go/gcexportdata/gcexportdata.go | 24 ----- internal/go/gcexportdata/importer.go | 73 ------------- 3 files changed, 223 deletions(-) delete mode 100644 internal/go/gcexportdata/example_test.go delete mode 100644 internal/go/gcexportdata/importer.go diff --git a/internal/go/gcexportdata/example_test.go b/internal/go/gcexportdata/example_test.go deleted file mode 100644 index 0b517b5ad..000000000 --- a/internal/go/gcexportdata/example_test.go +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build go1.7 -// +build gc - -package gcexportdata_test - -import ( - "fmt" - "go/ast" - "go/parser" - "go/token" - "go/types" - "log" - "os" - "path/filepath" - - "honnef.co/go/tools/internal/go/gcexportdata" -) - -// ExampleRead uses gcexportdata.Read to load type information for the -// "fmt" package from the fmt.a file produced by the gc compiler. -func ExampleRead() { - // Find the export data file. - filename, path := gcexportdata.Find("fmt", "") - if filename == "" { - log.Fatalf("can't find export data for fmt") - } - fmt.Printf("Package path: %s\n", path) - fmt.Printf("Export data: %s\n", filepath.Base(filename)) - - // Open and read the file. - f, err := os.Open(filename) - if err != nil { - log.Fatal(err) - } - defer f.Close() - r, err := gcexportdata.NewReader(f) - if err != nil { - log.Fatalf("reading export data %s: %v", filename, err) - } - - // Decode the export data. - fset := token.NewFileSet() - imports := make(map[string]*types.Package) - pkg, err := gcexportdata.Read(r, fset, imports, path) - if err != nil { - log.Fatal(err) - } - - // Print package information. - members := pkg.Scope().Names() - if members[0] == ".inittask" { - // An improvement to init handling in 1.13 added ".inittask". Remove so go >= 1.13 and go < 1.13 both pass. - members = members[1:] - } - fmt.Printf("Package members: %s...\n", members[:5]) - println := pkg.Scope().Lookup("Println") - posn := fset.Position(println.Pos()) - posn.Line = 123 // make example deterministic - fmt.Printf("Println type: %s\n", println.Type()) - fmt.Printf("Println location: %s\n", slashify(posn)) - - // Output: - // - // Package path: fmt - // Export data: fmt.a - // Package members: [Errorf Formatter Fprint Fprintf Fprintln]... - // Println type: func(a ...interface{}) (n int, err error) - // Println location: $GOROOT/src/fmt/print.go:123:1 -} - -// ExampleNewImporter demonstrates usage of NewImporter to provide type -// information for dependencies when type-checking Go source code. -func ExampleNewImporter() { - const src = `package myrpc - -// choosing a package that doesn't change across releases -import "net/rpc" - -const serverError rpc.ServerError = "" -` - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, "myrpc.go", src, 0) - if err != nil { - log.Fatal(err) - } - - packages := make(map[string]*types.Package) - imp := gcexportdata.NewImporter(fset, packages) - conf := types.Config{Importer: imp} - pkg, err := conf.Check("myrpc", fset, []*ast.File{f}, nil) - if err != nil { - log.Fatal(err) - } - - // object from imported package - pi := packages["net/rpc"].Scope().Lookup("ServerError") - fmt.Printf("type %s.%s %s // %s\n", - pi.Pkg().Path(), - pi.Name(), - pi.Type().Underlying(), - slashify(fset.Position(pi.Pos())), - ) - - // object in source package - twopi := pkg.Scope().Lookup("serverError") - fmt.Printf("const %s %s = %s // %s\n", - twopi.Name(), - twopi.Type(), - twopi.(*types.Const).Val(), - slashify(fset.Position(twopi.Pos())), - ) - - // Output: - // - // type net/rpc.ServerError string // $GOROOT/src/net/rpc/client.go:20:1 - // const serverError net/rpc.ServerError = "" // myrpc.go:6:7 -} - -func slashify(posn token.Position) token.Position { - posn.Filename = filepath.ToSlash(posn.Filename) // for MS Windows portability - return posn -} diff --git a/internal/go/gcexportdata/gcexportdata.go b/internal/go/gcexportdata/gcexportdata.go index 13e757f47..52f47c8b0 100644 --- a/internal/go/gcexportdata/gcexportdata.go +++ b/internal/go/gcexportdata/gcexportdata.go @@ -32,19 +32,6 @@ import ( "honnef.co/go/tools/internal/go/gcimporter" ) -// Find returns the name of an object (.o) or archive (.a) file -// containing type information for the specified import path, -// using the workspace layout conventions of go/build. -// If no file was found, an empty filename is returned. -// -// A relative srcDir is interpreted relative to the current working directory. -// -// Find also returns the package's resolved (canonical) import path, -// reflecting the effects of srcDir and vendoring on importPath. -func Find(importPath, srcDir string) (filename, path string) { - return gcimporter.FindPkg(importPath, srcDir) -} - // NewReader returns a reader for the export data section of an object // (.o) or archive (.a) file read from r. The new reader may provide // additional trailing data beyond the end of the export data. @@ -96,14 +83,3 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, _, pkg, err := gcimporter.BImportData(fset, imports, data, path) return pkg, err } - -// Write writes encoded type information for the specified package to out. -// The FileSet provides file position information for named objects. -func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { - b, err := gcimporter.IExportData(fset, pkg) - if err != nil { - return err - } - _, err = out.Write(b) - return err -} diff --git a/internal/go/gcexportdata/importer.go b/internal/go/gcexportdata/importer.go deleted file mode 100644 index efe221e7e..000000000 --- a/internal/go/gcexportdata/importer.go +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gcexportdata - -import ( - "fmt" - "go/token" - "go/types" - "os" -) - -// NewImporter returns a new instance of the types.Importer interface -// that reads type information from export data files written by gc. -// The Importer also satisfies types.ImporterFrom. -// -// Export data files are located using "go build" workspace conventions -// and the build.Default context. -// -// Use this importer instead of go/importer.For("gc", ...) to avoid the -// version-skew problems described in the documentation of this package, -// or to control the FileSet or access the imports map populated during -// package loading. -// -func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { - return importer{fset, imports} -} - -type importer struct { - fset *token.FileSet - imports map[string]*types.Package -} - -func (imp importer) Import(importPath string) (*types.Package, error) { - return imp.ImportFrom(importPath, "", 0) -} - -func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { - filename, path := Find(importPath, srcDir) - if filename == "" { - if importPath == "unsafe" { - // Even for unsafe, call Find first in case - // the package was vendored. - return types.Unsafe, nil - } - return nil, fmt.Errorf("can't find import: %s", importPath) - } - - if pkg, ok := imp.imports[path]; ok && pkg.Complete() { - return pkg, nil // cache hit - } - - // open file - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer func() { - f.Close() - if err != nil { - // add file name to error - err = fmt.Errorf("reading export data: %s: %v", filename, err) - } - }() - - r, err := NewReader(f) - if err != nil { - return nil, err - } - - return Read(r, imp.fset, imp.imports, path) -} From 0295cbe75995d895d290704619e0def477b16cf8 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 16 Apr 2020 05:59:21 +0200 Subject: [PATCH 019/111] internal/go/gcimporter: remove code for writing export data --- internal/go/gcimporter/bexport.go | 852 ------------------------- internal/go/gcimporter/bexport_test.go | 420 ------------ internal/go/gcimporter/iexport.go | 739 --------------------- internal/go/gcimporter/iexport_test.go | 310 --------- internal/go/gcimporter/israce_test.go | 11 - 5 files changed, 2332 deletions(-) delete mode 100644 internal/go/gcimporter/bexport.go delete mode 100644 internal/go/gcimporter/bexport_test.go delete mode 100644 internal/go/gcimporter/iexport.go delete mode 100644 internal/go/gcimporter/iexport_test.go delete mode 100644 internal/go/gcimporter/israce_test.go diff --git a/internal/go/gcimporter/bexport.go b/internal/go/gcimporter/bexport.go deleted file mode 100644 index a807d0aaa..000000000 --- a/internal/go/gcimporter/bexport.go +++ /dev/null @@ -1,852 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Binary package export. -// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go; -// see that file for specification of the format. - -package gcimporter - -import ( - "bytes" - "encoding/binary" - "fmt" - "go/ast" - "go/constant" - "go/token" - "go/types" - "math" - "math/big" - "sort" - "strings" -) - -// If debugFormat is set, each integer and string value is preceded by a marker -// and position information in the encoding. This mechanism permits an importer -// to recognize immediately when it is out of sync. The importer recognizes this -// mode automatically (i.e., it can import export data produced with debugging -// support even if debugFormat is not set at the time of import). This mode will -// lead to massively larger export data (by a factor of 2 to 3) and should only -// be enabled during development and debugging. -// -// NOTE: This flag is the first flag to enable if importing dies because of -// (suspected) format errors, and whenever a change is made to the format. -const debugFormat = false // default: false - -// If trace is set, debugging output is printed to std out. -const trace = false // default: false - -// Current export format version. Increase with each format change. -// Note: The latest binary (non-indexed) export format is at version 6. -// This exporter is still at level 4, but it doesn't matter since -// the binary importer can handle older versions just fine. -// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE -// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE -// 4: type name objects support type aliases, uses aliasTag -// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used) -// 2: removed unused bool in ODCL export (compiler only) -// 1: header format change (more regular), export package for _ struct fields -// 0: Go1.7 encoding -const exportVersion = 4 - -// trackAllTypes enables cycle tracking for all types, not just named -// types. The existing compiler invariants assume that unnamed types -// that are not completely set up are not used, or else there are spurious -// errors. -// If disabled, only named types are tracked, possibly leading to slightly -// less efficient encoding in rare cases. It also prevents the export of -// some corner-case type declarations (but those are not handled correctly -// with with the textual export format either). -// TODO(gri) enable and remove once issues caused by it are fixed -const trackAllTypes = false - -type exporter struct { - fset *token.FileSet - out bytes.Buffer - - // object -> index maps, indexed in order of serialization - strIndex map[string]int - pkgIndex map[*types.Package]int - typIndex map[types.Type]int - - // position encoding - posInfoFormat bool - prevFile string - prevLine int - - // debugging support - written int // bytes written - indent int // for trace -} - -// internalError represents an error generated inside this package. -type internalError string - -func (e internalError) Error() string { return "gcimporter: " + string(e) } - -func internalErrorf(format string, args ...interface{}) error { - return internalError(fmt.Sprintf(format, args...)) -} - -// BExportData returns binary export data for pkg. -// If no file set is provided, position info will be missing. -func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { - defer func() { - if e := recover(); e != nil { - if ierr, ok := e.(internalError); ok { - err = ierr - return - } - // Not an internal error; panic again. - panic(e) - } - }() - - p := exporter{ - fset: fset, - strIndex: map[string]int{"": 0}, // empty string is mapped to 0 - pkgIndex: make(map[*types.Package]int), - typIndex: make(map[types.Type]int), - posInfoFormat: true, // TODO(gri) might become a flag, eventually - } - - // write version info - // The version string must start with "version %d" where %d is the version - // number. Additional debugging information may follow after a blank; that - // text is ignored by the importer. - p.rawStringln(fmt.Sprintf("version %d", exportVersion)) - var debug string - if debugFormat { - debug = "debug" - } - p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly - p.bool(trackAllTypes) - p.bool(p.posInfoFormat) - - // --- generic export data --- - - // populate type map with predeclared "known" types - for index, typ := range predeclared() { - p.typIndex[typ] = index - } - if len(p.typIndex) != len(predeclared()) { - return nil, internalError("duplicate entries in type map?") - } - - // write package data - p.pkg(pkg, true) - if trace { - p.tracef("\n") - } - - // write objects - objcount := 0 - scope := pkg.Scope() - for _, name := range scope.Names() { - if !ast.IsExported(name) { - continue - } - if trace { - p.tracef("\n") - } - p.obj(scope.Lookup(name)) - objcount++ - } - - // indicate end of list - if trace { - p.tracef("\n") - } - p.tag(endTag) - - // for self-verification only (redundant) - p.int(objcount) - - if trace { - p.tracef("\n") - } - - // --- end of export data --- - - return p.out.Bytes(), nil -} - -func (p *exporter) pkg(pkg *types.Package, emptypath bool) { - if pkg == nil { - panic(internalError("unexpected nil pkg")) - } - - // if we saw the package before, write its index (>= 0) - if i, ok := p.pkgIndex[pkg]; ok { - p.index('P', i) - return - } - - // otherwise, remember the package, write the package tag (< 0) and package data - if trace { - p.tracef("P%d = { ", len(p.pkgIndex)) - defer p.tracef("} ") - } - p.pkgIndex[pkg] = len(p.pkgIndex) - - p.tag(packageTag) - p.string(pkg.Name()) - if emptypath { - p.string("") - } else { - p.string(pkg.Path()) - } -} - -func (p *exporter) obj(obj types.Object) { - switch obj := obj.(type) { - case *types.Const: - p.tag(constTag) - p.pos(obj) - p.qualifiedName(obj) - p.typ(obj.Type()) - p.value(obj.Val()) - - case *types.TypeName: - if obj.IsAlias() { - p.tag(aliasTag) - p.pos(obj) - p.qualifiedName(obj) - } else { - p.tag(typeTag) - } - p.typ(obj.Type()) - - case *types.Var: - p.tag(varTag) - p.pos(obj) - p.qualifiedName(obj) - p.typ(obj.Type()) - - case *types.Func: - p.tag(funcTag) - p.pos(obj) - p.qualifiedName(obj) - sig := obj.Type().(*types.Signature) - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) - - default: - panic(internalErrorf("unexpected object %v (%T)", obj, obj)) - } -} - -func (p *exporter) pos(obj types.Object) { - if !p.posInfoFormat { - return - } - - file, line := p.fileLine(obj) - if file == p.prevFile { - // common case: write line delta - // delta == 0 means different file or no line change - delta := line - p.prevLine - p.int(delta) - if delta == 0 { - p.int(-1) // -1 means no file change - } - } else { - // different file - p.int(0) - // Encode filename as length of common prefix with previous - // filename, followed by (possibly empty) suffix. Filenames - // frequently share path prefixes, so this can save a lot - // of space and make export data size less dependent on file - // path length. The suffix is unlikely to be empty because - // file names tend to end in ".go". - n := commonPrefixLen(p.prevFile, file) - p.int(n) // n >= 0 - p.string(file[n:]) // write suffix only - p.prevFile = file - p.int(line) - } - p.prevLine = line -} - -func (p *exporter) fileLine(obj types.Object) (file string, line int) { - if p.fset != nil { - pos := p.fset.Position(obj.Pos()) - file = pos.Filename - line = pos.Line - } - return -} - -func commonPrefixLen(a, b string) int { - if len(a) > len(b) { - a, b = b, a - } - // len(a) <= len(b) - i := 0 - for i < len(a) && a[i] == b[i] { - i++ - } - return i -} - -func (p *exporter) qualifiedName(obj types.Object) { - p.string(obj.Name()) - p.pkg(obj.Pkg(), false) -} - -func (p *exporter) typ(t types.Type) { - if t == nil { - panic(internalError("nil type")) - } - - // Possible optimization: Anonymous pointer types *T where - // T is a named type are common. We could canonicalize all - // such types *T to a single type PT = *T. This would lead - // to at most one *T entry in typIndex, and all future *T's - // would be encoded as the respective index directly. Would - // save 1 byte (pointerTag) per *T and reduce the typIndex - // size (at the cost of a canonicalization map). We can do - // this later, without encoding format change. - - // if we saw the type before, write its index (>= 0) - if i, ok := p.typIndex[t]; ok { - p.index('T', i) - return - } - - // otherwise, remember the type, write the type tag (< 0) and type data - if trackAllTypes { - if trace { - p.tracef("T%d = {>\n", len(p.typIndex)) - defer p.tracef("<\n} ") - } - p.typIndex[t] = len(p.typIndex) - } - - switch t := t.(type) { - case *types.Named: - if !trackAllTypes { - // if we don't track all types, track named types now - p.typIndex[t] = len(p.typIndex) - } - - p.tag(namedTag) - p.pos(t.Obj()) - p.qualifiedName(t.Obj()) - p.typ(t.Underlying()) - if !types.IsInterface(t) { - p.assocMethods(t) - } - - case *types.Array: - p.tag(arrayTag) - p.int64(t.Len()) - p.typ(t.Elem()) - - case *types.Slice: - p.tag(sliceTag) - p.typ(t.Elem()) - - case *dddSlice: - p.tag(dddTag) - p.typ(t.elem) - - case *types.Struct: - p.tag(structTag) - p.fieldList(t) - - case *types.Pointer: - p.tag(pointerTag) - p.typ(t.Elem()) - - case *types.Signature: - p.tag(signatureTag) - p.paramList(t.Params(), t.Variadic()) - p.paramList(t.Results(), false) - - case *types.Interface: - p.tag(interfaceTag) - p.iface(t) - - case *types.Map: - p.tag(mapTag) - p.typ(t.Key()) - p.typ(t.Elem()) - - case *types.Chan: - p.tag(chanTag) - p.int(int(3 - t.Dir())) // hack - p.typ(t.Elem()) - - default: - panic(internalErrorf("unexpected type %T: %s", t, t)) - } -} - -func (p *exporter) assocMethods(named *types.Named) { - // Sort methods (for determinism). - var methods []*types.Func - for i := 0; i < named.NumMethods(); i++ { - methods = append(methods, named.Method(i)) - } - sort.Sort(methodsByName(methods)) - - p.int(len(methods)) - - if trace && methods != nil { - p.tracef("associated methods {>\n") - } - - for i, m := range methods { - if trace && i > 0 { - p.tracef("\n") - } - - p.pos(m) - name := m.Name() - p.string(name) - if !exported(name) { - p.pkg(m.Pkg(), false) - } - - sig := m.Type().(*types.Signature) - p.paramList(types.NewTuple(sig.Recv()), false) - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) - p.int(0) // dummy value for go:nointerface pragma - ignored by importer - } - - if trace && methods != nil { - p.tracef("<\n} ") - } -} - -type methodsByName []*types.Func - -func (x methodsByName) Len() int { return len(x) } -func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } -func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } - -func (p *exporter) fieldList(t *types.Struct) { - if trace && t.NumFields() > 0 { - p.tracef("fields {>\n") - defer p.tracef("<\n} ") - } - - p.int(t.NumFields()) - for i := 0; i < t.NumFields(); i++ { - if trace && i > 0 { - p.tracef("\n") - } - p.field(t.Field(i)) - p.string(t.Tag(i)) - } -} - -func (p *exporter) field(f *types.Var) { - if !f.IsField() { - panic(internalError("field expected")) - } - - p.pos(f) - p.fieldName(f) - p.typ(f.Type()) -} - -func (p *exporter) iface(t *types.Interface) { - // TODO(gri): enable importer to load embedded interfaces, - // then emit Embeddeds and ExplicitMethods separately here. - p.int(0) - - n := t.NumMethods() - if trace && n > 0 { - p.tracef("methods {>\n") - defer p.tracef("<\n} ") - } - p.int(n) - for i := 0; i < n; i++ { - if trace && i > 0 { - p.tracef("\n") - } - p.method(t.Method(i)) - } -} - -func (p *exporter) method(m *types.Func) { - sig := m.Type().(*types.Signature) - if sig.Recv() == nil { - panic(internalError("method expected")) - } - - p.pos(m) - p.string(m.Name()) - if m.Name() != "_" && !ast.IsExported(m.Name()) { - p.pkg(m.Pkg(), false) - } - - // interface method; no need to encode receiver. - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) -} - -func (p *exporter) fieldName(f *types.Var) { - name := f.Name() - - if f.Anonymous() { - // anonymous field - we distinguish between 3 cases: - // 1) field name matches base type name and is exported - // 2) field name matches base type name and is not exported - // 3) field name doesn't match base type name (alias name) - bname := basetypeName(f.Type()) - if name == bname { - if ast.IsExported(name) { - name = "" // 1) we don't need to know the field name or package - } else { - name = "?" // 2) use unexported name "?" to force package export - } - } else { - // 3) indicate alias and export name as is - // (this requires an extra "@" but this is a rare case) - p.string("@") - } - } - - p.string(name) - if name != "" && !ast.IsExported(name) { - p.pkg(f.Pkg(), false) - } -} - -func basetypeName(typ types.Type) string { - switch typ := deref(typ).(type) { - case *types.Basic: - return typ.Name() - case *types.Named: - return typ.Obj().Name() - default: - return "" // unnamed type - } -} - -func (p *exporter) paramList(params *types.Tuple, variadic bool) { - // use negative length to indicate unnamed parameters - // (look at the first parameter only since either all - // names are present or all are absent) - n := params.Len() - if n > 0 && params.At(0).Name() == "" { - n = -n - } - p.int(n) - for i := 0; i < params.Len(); i++ { - q := params.At(i) - t := q.Type() - if variadic && i == params.Len()-1 { - t = &dddSlice{t.(*types.Slice).Elem()} - } - p.typ(t) - if n > 0 { - name := q.Name() - p.string(name) - if name != "_" { - p.pkg(q.Pkg(), false) - } - } - p.string("") // no compiler-specific info - } -} - -func (p *exporter) value(x constant.Value) { - if trace { - p.tracef("= ") - } - - switch x.Kind() { - case constant.Bool: - tag := falseTag - if constant.BoolVal(x) { - tag = trueTag - } - p.tag(tag) - - case constant.Int: - if v, exact := constant.Int64Val(x); exact { - // common case: x fits into an int64 - use compact encoding - p.tag(int64Tag) - p.int64(v) - return - } - // uncommon case: large x - use float encoding - // (powers of 2 will be encoded efficiently with exponent) - p.tag(floatTag) - p.float(constant.ToFloat(x)) - - case constant.Float: - p.tag(floatTag) - p.float(x) - - case constant.Complex: - p.tag(complexTag) - p.float(constant.Real(x)) - p.float(constant.Imag(x)) - - case constant.String: - p.tag(stringTag) - p.string(constant.StringVal(x)) - - case constant.Unknown: - // package contains type errors - p.tag(unknownTag) - - default: - panic(internalErrorf("unexpected value %v (%T)", x, x)) - } -} - -func (p *exporter) float(x constant.Value) { - if x.Kind() != constant.Float { - panic(internalErrorf("unexpected constant %v, want float", x)) - } - // extract sign (there is no -0) - sign := constant.Sign(x) - if sign == 0 { - // x == 0 - p.int(0) - return - } - // x != 0 - - var f big.Float - if v, exact := constant.Float64Val(x); exact { - // float64 - f.SetFloat64(v) - } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { - // TODO(gri): add big.Rat accessor to constant.Value. - r := valueToRat(num) - f.SetRat(r.Quo(r, valueToRat(denom))) - } else { - // Value too large to represent as a fraction => inaccessible. - // TODO(gri): add big.Float accessor to constant.Value. - f.SetFloat64(math.MaxFloat64) // FIXME - } - - // extract exponent such that 0.5 <= m < 1.0 - var m big.Float - exp := f.MantExp(&m) - - // extract mantissa as *big.Int - // - set exponent large enough so mant satisfies mant.IsInt() - // - get *big.Int from mant - m.SetMantExp(&m, int(m.MinPrec())) - mant, acc := m.Int(nil) - if acc != big.Exact { - panic(internalError("internal error")) - } - - p.int(sign) - p.int(exp) - p.string(string(mant.Bytes())) -} - -func valueToRat(x constant.Value) *big.Rat { - // Convert little-endian to big-endian. - // I can't believe this is necessary. - bytes := constant.Bytes(x) - for i := 0; i < len(bytes)/2; i++ { - bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] - } - return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) -} - -func (p *exporter) bool(b bool) bool { - if trace { - p.tracef("[") - defer p.tracef("= %v] ", b) - } - - x := 0 - if b { - x = 1 - } - p.int(x) - return b -} - -// ---------------------------------------------------------------------------- -// Low-level encoders - -func (p *exporter) index(marker byte, index int) { - if index < 0 { - panic(internalError("invalid index < 0")) - } - if debugFormat { - p.marker('t') - } - if trace { - p.tracef("%c%d ", marker, index) - } - p.rawInt64(int64(index)) -} - -func (p *exporter) tag(tag int) { - if tag >= 0 { - panic(internalError("invalid tag >= 0")) - } - if debugFormat { - p.marker('t') - } - if trace { - p.tracef("%s ", tagString[-tag]) - } - p.rawInt64(int64(tag)) -} - -func (p *exporter) int(x int) { - p.int64(int64(x)) -} - -func (p *exporter) int64(x int64) { - if debugFormat { - p.marker('i') - } - if trace { - p.tracef("%d ", x) - } - p.rawInt64(x) -} - -func (p *exporter) string(s string) { - if debugFormat { - p.marker('s') - } - if trace { - p.tracef("%q ", s) - } - // if we saw the string before, write its index (>= 0) - // (the empty string is mapped to 0) - if i, ok := p.strIndex[s]; ok { - p.rawInt64(int64(i)) - return - } - // otherwise, remember string and write its negative length and bytes - p.strIndex[s] = len(p.strIndex) - p.rawInt64(-int64(len(s))) - for i := 0; i < len(s); i++ { - p.rawByte(s[i]) - } -} - -// marker emits a marker byte and position information which makes -// it easy for a reader to detect if it is "out of sync". Used for -// debugFormat format only. -func (p *exporter) marker(m byte) { - p.rawByte(m) - // Enable this for help tracking down the location - // of an incorrect marker when running in debugFormat. - if false && trace { - p.tracef("#%d ", p.written) - } - p.rawInt64(int64(p.written)) -} - -// rawInt64 should only be used by low-level encoders. -func (p *exporter) rawInt64(x int64) { - var tmp [binary.MaxVarintLen64]byte - n := binary.PutVarint(tmp[:], x) - for i := 0; i < n; i++ { - p.rawByte(tmp[i]) - } -} - -// rawStringln should only be used to emit the initial version string. -func (p *exporter) rawStringln(s string) { - for i := 0; i < len(s); i++ { - p.rawByte(s[i]) - } - p.rawByte('\n') -} - -// rawByte is the bottleneck interface to write to p.out. -// rawByte escapes b as follows (any encoding does that -// hides '$'): -// -// '$' => '|' 'S' -// '|' => '|' '|' -// -// Necessary so other tools can find the end of the -// export data by searching for "$$". -// rawByte should only be used by low-level encoders. -func (p *exporter) rawByte(b byte) { - switch b { - case '$': - // write '$' as '|' 'S' - b = 'S' - fallthrough - case '|': - // write '|' as '|' '|' - p.out.WriteByte('|') - p.written++ - } - p.out.WriteByte(b) - p.written++ -} - -// tracef is like fmt.Printf but it rewrites the format string -// to take care of indentation. -func (p *exporter) tracef(format string, args ...interface{}) { - if strings.ContainsAny(format, "<>\n") { - var buf bytes.Buffer - for i := 0; i < len(format); i++ { - // no need to deal with runes - ch := format[i] - switch ch { - case '>': - p.indent++ - continue - case '<': - p.indent-- - continue - } - buf.WriteByte(ch) - if ch == '\n' { - for j := p.indent; j > 0; j-- { - buf.WriteString(". ") - } - } - } - format = buf.String() - } - fmt.Printf(format, args...) -} - -// Debugging support. -// (tagString is only used when tracing is enabled) -var tagString = [...]string{ - // Packages - -packageTag: "package", - - // Types - -namedTag: "named type", - -arrayTag: "array", - -sliceTag: "slice", - -dddTag: "ddd", - -structTag: "struct", - -pointerTag: "pointer", - -signatureTag: "signature", - -interfaceTag: "interface", - -mapTag: "map", - -chanTag: "chan", - - // Values - -falseTag: "false", - -trueTag: "true", - -int64Tag: "int64", - -floatTag: "float", - -fractionTag: "fraction", - -complexTag: "complex", - -stringTag: "string", - -unknownTag: "unknown", - - // Type aliases - -aliasTag: "alias", -} diff --git a/internal/go/gcimporter/bexport_test.go b/internal/go/gcimporter/bexport_test.go deleted file mode 100644 index 9e1d1c5d4..000000000 --- a/internal/go/gcimporter/bexport_test.go +++ /dev/null @@ -1,420 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gcimporter_test - -import ( - "fmt" - "go/ast" - "go/build" - "go/constant" - "go/parser" - "go/token" - "go/types" - "reflect" - "runtime" - "strings" - "testing" - - "honnef.co/go/tools/internal/go/gcimporter" - - "golang.org/x/tools/go/buildutil" - "golang.org/x/tools/go/loader" -) - -var isRace = false - -func TestBExportData_stdlib(t *testing.T) { - if runtime.Compiler == "gccgo" { - t.Skip("gccgo standard library is inaccessible") - } - if runtime.GOOS == "android" { - t.Skipf("incomplete std lib on %s", runtime.GOOS) - } - if isRace { - t.Skipf("stdlib tests take too long in race mode and flake on builders") - } - - // Load, parse and type-check the program. - ctxt := build.Default // copy - ctxt.GOPATH = "" // disable GOPATH - conf := loader.Config{ - Build: &ctxt, - AllowErrors: true, - } - for _, path := range buildutil.AllPackages(conf.Build) { - conf.Import(path) - } - - // Create a package containing type and value errors to ensure - // they are properly encoded/decoded. - f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors -const UnknownValue = "" + 0 -type UnknownType undefined -`) - if err != nil { - t.Fatal(err) - } - conf.CreateFromFiles("haserrors", f) - - prog, err := conf.Load() - if err != nil { - t.Fatalf("Load failed: %v", err) - } - - numPkgs := len(prog.AllPackages) - if want := 248; numPkgs < want { - t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want) - } - - for pkg, info := range prog.AllPackages { - if info.Files == nil { - continue // empty directory - } - exportdata, err := gcimporter.BExportData(conf.Fset, pkg) - if err != nil { - t.Fatal(err) - } - - imports := make(map[string]*types.Package) - fset2 := token.NewFileSet() - n, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path()) - if err != nil { - t.Errorf("BImportData(%s): %v", pkg.Path(), err) - continue - } - if n != len(exportdata) { - t.Errorf("BImportData(%s) decoded %d bytes, want %d", - pkg.Path(), n, len(exportdata)) - } - - // Compare the packages' corresponding members. - for _, name := range pkg.Scope().Names() { - if !ast.IsExported(name) { - continue - } - obj1 := pkg.Scope().Lookup(name) - obj2 := pkg2.Scope().Lookup(name) - if obj2 == nil { - t.Errorf("%s.%s not found, want %s", pkg.Path(), name, obj1) - continue - } - - fl1 := fileLine(conf.Fset, obj1) - fl2 := fileLine(fset2, obj2) - if fl1 != fl2 { - t.Errorf("%s.%s: got posn %s, want %s", - pkg.Path(), name, fl2, fl1) - } - - if err := equalObj(obj1, obj2); err != nil { - t.Errorf("%s.%s: %s\ngot: %s\nwant: %s", - pkg.Path(), name, err, obj2, obj1) - } - } - } -} - -func fileLine(fset *token.FileSet, obj types.Object) string { - posn := fset.Position(obj.Pos()) - return fmt.Sprintf("%s:%d", posn.Filename, posn.Line) -} - -// equalObj reports how x and y differ. They are assumed to belong to -// different universes so cannot be compared directly. -func equalObj(x, y types.Object) error { - if reflect.TypeOf(x) != reflect.TypeOf(y) { - return fmt.Errorf("%T vs %T", x, y) - } - xt := x.Type() - yt := y.Type() - switch x.(type) { - case *types.Var, *types.Func: - // ok - case *types.Const: - xval := x.(*types.Const).Val() - yval := y.(*types.Const).Val() - // Use string comparison for floating-point values since rounding is permitted. - if constant.Compare(xval, token.NEQ, yval) && - !(xval.Kind() == constant.Float && xval.String() == yval.String()) { - return fmt.Errorf("unequal constants %s vs %s", xval, yval) - } - case *types.TypeName: - xt = xt.Underlying() - yt = yt.Underlying() - default: - return fmt.Errorf("unexpected %T", x) - } - return equalType(xt, yt) -} - -func equalType(x, y types.Type) error { - if reflect.TypeOf(x) != reflect.TypeOf(y) { - return fmt.Errorf("unequal kinds: %T vs %T", x, y) - } - switch x := x.(type) { - case *types.Interface: - y := y.(*types.Interface) - // TODO(gri): enable separate emission of Embedded interfaces - // and ExplicitMethods then use this logic. - // if x.NumEmbeddeds() != y.NumEmbeddeds() { - // return fmt.Errorf("unequal number of embedded interfaces: %d vs %d", - // x.NumEmbeddeds(), y.NumEmbeddeds()) - // } - // for i := 0; i < x.NumEmbeddeds(); i++ { - // xi := x.Embedded(i) - // yi := y.Embedded(i) - // if xi.String() != yi.String() { - // return fmt.Errorf("mismatched %th embedded interface: %s vs %s", - // i, xi, yi) - // } - // } - // if x.NumExplicitMethods() != y.NumExplicitMethods() { - // return fmt.Errorf("unequal methods: %d vs %d", - // x.NumExplicitMethods(), y.NumExplicitMethods()) - // } - // for i := 0; i < x.NumExplicitMethods(); i++ { - // xm := x.ExplicitMethod(i) - // ym := y.ExplicitMethod(i) - // if xm.Name() != ym.Name() { - // return fmt.Errorf("mismatched %th method: %s vs %s", i, xm, ym) - // } - // if err := equalType(xm.Type(), ym.Type()); err != nil { - // return fmt.Errorf("mismatched %s method: %s", xm.Name(), err) - // } - // } - if x.NumMethods() != y.NumMethods() { - return fmt.Errorf("unequal methods: %d vs %d", - x.NumMethods(), y.NumMethods()) - } - for i := 0; i < x.NumMethods(); i++ { - xm := x.Method(i) - ym := y.Method(i) - if xm.Name() != ym.Name() { - return fmt.Errorf("mismatched %dth method: %s vs %s", i, xm, ym) - } - if err := equalType(xm.Type(), ym.Type()); err != nil { - return fmt.Errorf("mismatched %s method: %s", xm.Name(), err) - } - } - case *types.Array: - y := y.(*types.Array) - if x.Len() != y.Len() { - return fmt.Errorf("unequal array lengths: %d vs %d", x.Len(), y.Len()) - } - if err := equalType(x.Elem(), y.Elem()); err != nil { - return fmt.Errorf("array elements: %s", err) - } - case *types.Basic: - y := y.(*types.Basic) - if x.Kind() != y.Kind() { - return fmt.Errorf("unequal basic types: %s vs %s", x, y) - } - case *types.Chan: - y := y.(*types.Chan) - if x.Dir() != y.Dir() { - return fmt.Errorf("unequal channel directions: %d vs %d", x.Dir(), y.Dir()) - } - if err := equalType(x.Elem(), y.Elem()); err != nil { - return fmt.Errorf("channel elements: %s", err) - } - case *types.Map: - y := y.(*types.Map) - if err := equalType(x.Key(), y.Key()); err != nil { - return fmt.Errorf("map keys: %s", err) - } - if err := equalType(x.Elem(), y.Elem()); err != nil { - return fmt.Errorf("map values: %s", err) - } - case *types.Named: - y := y.(*types.Named) - if x.String() != y.String() { - return fmt.Errorf("unequal named types: %s vs %s", x, y) - } - case *types.Pointer: - y := y.(*types.Pointer) - if err := equalType(x.Elem(), y.Elem()); err != nil { - return fmt.Errorf("pointer elements: %s", err) - } - case *types.Signature: - y := y.(*types.Signature) - if err := equalType(x.Params(), y.Params()); err != nil { - return fmt.Errorf("parameters: %s", err) - } - if err := equalType(x.Results(), y.Results()); err != nil { - return fmt.Errorf("results: %s", err) - } - if x.Variadic() != y.Variadic() { - return fmt.Errorf("unequal variadicity: %t vs %t", - x.Variadic(), y.Variadic()) - } - if (x.Recv() != nil) != (y.Recv() != nil) { - return fmt.Errorf("unequal receivers: %s vs %s", x.Recv(), y.Recv()) - } - if x.Recv() != nil { - // TODO(adonovan): fix: this assertion fires for interface methods. - // The type of the receiver of an interface method is a named type - // if the Package was loaded from export data, or an unnamed (interface) - // type if the Package was produced by type-checking ASTs. - // if err := equalType(x.Recv().Type(), y.Recv().Type()); err != nil { - // return fmt.Errorf("receiver: %s", err) - // } - } - case *types.Slice: - y := y.(*types.Slice) - if err := equalType(x.Elem(), y.Elem()); err != nil { - return fmt.Errorf("slice elements: %s", err) - } - case *types.Struct: - y := y.(*types.Struct) - if x.NumFields() != y.NumFields() { - return fmt.Errorf("unequal struct fields: %d vs %d", - x.NumFields(), y.NumFields()) - } - for i := 0; i < x.NumFields(); i++ { - xf := x.Field(i) - yf := y.Field(i) - if xf.Name() != yf.Name() { - return fmt.Errorf("mismatched fields: %s vs %s", xf, yf) - } - if err := equalType(xf.Type(), yf.Type()); err != nil { - return fmt.Errorf("struct field %s: %s", xf.Name(), err) - } - if x.Tag(i) != y.Tag(i) { - return fmt.Errorf("struct field %s has unequal tags: %q vs %q", - xf.Name(), x.Tag(i), y.Tag(i)) - } - } - case *types.Tuple: - y := y.(*types.Tuple) - if x.Len() != y.Len() { - return fmt.Errorf("unequal tuple lengths: %d vs %d", x.Len(), y.Len()) - } - for i := 0; i < x.Len(); i++ { - if err := equalType(x.At(i).Type(), y.At(i).Type()); err != nil { - return fmt.Errorf("tuple element %d: %s", i, err) - } - } - } - return nil -} - -// TestVeryLongFile tests the position of an import object declared in -// a very long input file. Line numbers greater than maxlines are -// reported as line 1, not garbage or token.NoPos. -func TestVeryLongFile(t *testing.T) { - // parse and typecheck - longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int" - fset1 := token.NewFileSet() - f, err := parser.ParseFile(fset1, "foo.go", longFile, 0) - if err != nil { - t.Fatal(err) - } - var conf types.Config - pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil) - if err != nil { - t.Fatal(err) - } - - // export - exportdata, err := gcimporter.BExportData(fset1, pkg) - if err != nil { - t.Fatal(err) - } - - // import - imports := make(map[string]*types.Package) - fset2 := token.NewFileSet() - _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg.Path()) - if err != nil { - t.Fatalf("BImportData(%s): %v", pkg.Path(), err) - } - - // compare - posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos()) - posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos()) - if want := "foo.go:1:1"; posn2.String() != want { - t.Errorf("X position = %s, want %s (orig was %s)", - posn2, want, posn1) - } -} - -const src = ` -package p - -type ( - T0 = int32 - T1 = struct{} - T2 = struct{ T1 } - Invalid = foo // foo is undeclared -) -` - -func checkPkg(t *testing.T, pkg *types.Package, label string) { - T1 := types.NewStruct(nil, nil) - T2 := types.NewStruct([]*types.Var{types.NewField(0, pkg, "T1", T1, true)}, nil) - - for _, test := range []struct { - name string - typ types.Type - }{ - {"T0", types.Typ[types.Int32]}, - {"T1", T1}, - {"T2", T2}, - {"Invalid", types.Typ[types.Invalid]}, - } { - obj := pkg.Scope().Lookup(test.name) - if obj == nil { - t.Errorf("%s: %s not found", label, test.name) - continue - } - tname, _ := obj.(*types.TypeName) - if tname == nil { - t.Errorf("%s: %v not a type name", label, obj) - continue - } - if !tname.IsAlias() { - t.Errorf("%s: %v: not marked as alias", label, tname) - continue - } - if got := tname.Type(); !types.Identical(got, test.typ) { - t.Errorf("%s: %v: got %v; want %v", label, tname, got, test.typ) - } - } -} - -func TestTypeAliases(t *testing.T) { - // parse and typecheck - fset1 := token.NewFileSet() - f, err := parser.ParseFile(fset1, "p.go", src, 0) - if err != nil { - t.Fatal(err) - } - var conf types.Config - pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil) - if err == nil { - // foo in undeclared in src; we should see an error - t.Fatal("invalid source type-checked without error") - } - if pkg1 == nil { - // despite incorrect src we should see a (partially) type-checked package - t.Fatal("nil package returned") - } - checkPkg(t, pkg1, "export") - - // export - exportdata, err := gcimporter.BExportData(fset1, pkg1) - if err != nil { - t.Fatal(err) - } - - // import - imports := make(map[string]*types.Package) - fset2 := token.NewFileSet() - _, pkg2, err := gcimporter.BImportData(fset2, imports, exportdata, pkg1.Path()) - if err != nil { - t.Fatalf("BImportData(%s): %v", pkg1.Path(), err) - } - checkPkg(t, pkg2, "import") -} diff --git a/internal/go/gcimporter/iexport.go b/internal/go/gcimporter/iexport.go deleted file mode 100644 index 4be32a2e5..000000000 --- a/internal/go/gcimporter/iexport.go +++ /dev/null @@ -1,739 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Indexed binary package export. -// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go; -// see that file for specification of the format. - -package gcimporter - -import ( - "bytes" - "encoding/binary" - "go/ast" - "go/constant" - "go/token" - "go/types" - "io" - "math/big" - "reflect" - "sort" -) - -// Current indexed export format version. Increase with each format change. -// 0: Go1.11 encoding -const iexportVersion = 0 - -// IExportData returns the binary export data for pkg. -// -// If no file set is provided, position info will be missing. -// The package path of the top-level package will not be recorded, -// so that calls to IImportData can override with a provided package path. -func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { - defer func() { - if e := recover(); e != nil { - if ierr, ok := e.(internalError); ok { - err = ierr - return - } - // Not an internal error; panic again. - panic(e) - } - }() - - p := iexporter{ - out: bytes.NewBuffer(nil), - fset: fset, - allPkgs: map[*types.Package]bool{}, - stringIndex: map[string]uint64{}, - declIndex: map[types.Object]uint64{}, - typIndex: map[types.Type]uint64{}, - localpkg: pkg, - } - - for i, pt := range predeclared() { - p.typIndex[pt] = uint64(i) - } - if len(p.typIndex) > predeclReserved { - panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)) - } - - // Initialize work queue with exported declarations. - scope := pkg.Scope() - for _, name := range scope.Names() { - if ast.IsExported(name) { - p.pushDecl(scope.Lookup(name)) - } - } - - // Loop until no more work. - for !p.declTodo.empty() { - p.doDecl(p.declTodo.popHead()) - } - - // Append indices to data0 section. - dataLen := uint64(p.data0.Len()) - w := p.newWriter() - w.writeIndex(p.declIndex) - w.flush() - - // Assemble header. - var hdr intWriter - hdr.WriteByte('i') - hdr.uint64(iexportVersion) - hdr.uint64(uint64(p.strings.Len())) - hdr.uint64(dataLen) - - // Flush output. - io.Copy(p.out, &hdr) - io.Copy(p.out, &p.strings) - io.Copy(p.out, &p.data0) - - return p.out.Bytes(), nil -} - -// writeIndex writes out an object index. mainIndex indicates whether -// we're writing out the main index, which is also read by -// non-compiler tools and includes a complete package description -// (i.e., name and height). -func (w *exportWriter) writeIndex(index map[types.Object]uint64) { - // Build a map from packages to objects from that package. - pkgObjs := map[*types.Package][]types.Object{} - - // For the main index, make sure to include every package that - // we reference, even if we're not exporting (or reexporting) - // any symbols from it. - pkgObjs[w.p.localpkg] = nil - for pkg := range w.p.allPkgs { - pkgObjs[pkg] = nil - } - - for obj := range index { - pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj) - } - - var pkgs []*types.Package - for pkg, objs := range pkgObjs { - pkgs = append(pkgs, pkg) - - sort.Slice(objs, func(i, j int) bool { - return objs[i].Name() < objs[j].Name() - }) - } - - sort.Slice(pkgs, func(i, j int) bool { - return w.exportPath(pkgs[i]) < w.exportPath(pkgs[j]) - }) - - w.uint64(uint64(len(pkgs))) - for _, pkg := range pkgs { - w.string(w.exportPath(pkg)) - w.string(pkg.Name()) - w.uint64(uint64(0)) // package height is not needed for go/types - - objs := pkgObjs[pkg] - w.uint64(uint64(len(objs))) - for _, obj := range objs { - w.string(obj.Name()) - w.uint64(index[obj]) - } - } -} - -type iexporter struct { - fset *token.FileSet - out *bytes.Buffer - - localpkg *types.Package - - // allPkgs tracks all packages that have been referenced by - // the export data, so we can ensure to include them in the - // main index. - allPkgs map[*types.Package]bool - - declTodo objQueue - - strings intWriter - stringIndex map[string]uint64 - - data0 intWriter - declIndex map[types.Object]uint64 - typIndex map[types.Type]uint64 -} - -// stringOff returns the offset of s within the string section. -// If not already present, it's added to the end. -func (p *iexporter) stringOff(s string) uint64 { - off, ok := p.stringIndex[s] - if !ok { - off = uint64(p.strings.Len()) - p.stringIndex[s] = off - - p.strings.uint64(uint64(len(s))) - p.strings.WriteString(s) - } - return off -} - -// pushDecl adds n to the declaration work queue, if not already present. -func (p *iexporter) pushDecl(obj types.Object) { - // Package unsafe is known to the compiler and predeclared. - assert(obj.Pkg() != types.Unsafe) - - if _, ok := p.declIndex[obj]; ok { - return - } - - p.declIndex[obj] = ^uint64(0) // mark n present in work queue - p.declTodo.pushTail(obj) -} - -// exportWriter handles writing out individual data section chunks. -type exportWriter struct { - p *iexporter - - data intWriter - currPkg *types.Package - prevFile string - prevLine int64 -} - -func (w *exportWriter) exportPath(pkg *types.Package) string { - if pkg == w.p.localpkg { - return "" - } - return pkg.Path() -} - -func (p *iexporter) doDecl(obj types.Object) { - w := p.newWriter() - w.setPkg(obj.Pkg(), false) - - switch obj := obj.(type) { - case *types.Var: - w.tag('V') - w.pos(obj.Pos()) - w.typ(obj.Type(), obj.Pkg()) - - case *types.Func: - sig, _ := obj.Type().(*types.Signature) - if sig.Recv() != nil { - panic(internalErrorf("unexpected method: %v", sig)) - } - w.tag('F') - w.pos(obj.Pos()) - w.signature(sig) - - case *types.Const: - w.tag('C') - w.pos(obj.Pos()) - w.value(obj.Type(), obj.Val()) - - case *types.TypeName: - if obj.IsAlias() { - w.tag('A') - w.pos(obj.Pos()) - w.typ(obj.Type(), obj.Pkg()) - break - } - - // Defined type. - w.tag('T') - w.pos(obj.Pos()) - - underlying := obj.Type().Underlying() - w.typ(underlying, obj.Pkg()) - - t := obj.Type() - if types.IsInterface(t) { - break - } - - named, ok := t.(*types.Named) - if !ok { - panic(internalErrorf("%s is not a defined type", t)) - } - - n := named.NumMethods() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - m := named.Method(i) - w.pos(m.Pos()) - w.string(m.Name()) - sig, _ := m.Type().(*types.Signature) - w.param(sig.Recv()) - w.signature(sig) - } - - default: - panic(internalErrorf("unexpected object: %v", obj)) - } - - p.declIndex[obj] = w.flush() -} - -func (w *exportWriter) tag(tag byte) { - w.data.WriteByte(tag) -} - -func (w *exportWriter) pos(pos token.Pos) { - if w.p.fset == nil { - w.int64(0) - return - } - - p := w.p.fset.Position(pos) - file := p.Filename - line := int64(p.Line) - - // When file is the same as the last position (common case), - // we can save a few bytes by delta encoding just the line - // number. - // - // Note: Because data objects may be read out of order (or not - // at all), we can only apply delta encoding within a single - // object. This is handled implicitly by tracking prevFile and - // prevLine as fields of exportWriter. - - if file == w.prevFile { - delta := line - w.prevLine - w.int64(delta) - if delta == deltaNewFile { - w.int64(-1) - } - } else { - w.int64(deltaNewFile) - w.int64(line) // line >= 0 - w.string(file) - w.prevFile = file - } - w.prevLine = line -} - -func (w *exportWriter) pkg(pkg *types.Package) { - // Ensure any referenced packages are declared in the main index. - w.p.allPkgs[pkg] = true - - w.string(w.exportPath(pkg)) -} - -func (w *exportWriter) qualifiedIdent(obj types.Object) { - // Ensure any referenced declarations are written out too. - w.p.pushDecl(obj) - - w.string(obj.Name()) - w.pkg(obj.Pkg()) -} - -func (w *exportWriter) typ(t types.Type, pkg *types.Package) { - w.data.uint64(w.p.typOff(t, pkg)) -} - -func (p *iexporter) newWriter() *exportWriter { - return &exportWriter{p: p} -} - -func (w *exportWriter) flush() uint64 { - off := uint64(w.p.data0.Len()) - io.Copy(&w.p.data0, &w.data) - return off -} - -func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 { - off, ok := p.typIndex[t] - if !ok { - w := p.newWriter() - w.doTyp(t, pkg) - off = predeclReserved + w.flush() - p.typIndex[t] = off - } - return off -} - -func (w *exportWriter) startType(k itag) { - w.data.uint64(uint64(k)) -} - -func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { - switch t := t.(type) { - case *types.Named: - w.startType(definedType) - w.qualifiedIdent(t.Obj()) - - case *types.Pointer: - w.startType(pointerType) - w.typ(t.Elem(), pkg) - - case *types.Slice: - w.startType(sliceType) - w.typ(t.Elem(), pkg) - - case *types.Array: - w.startType(arrayType) - w.uint64(uint64(t.Len())) - w.typ(t.Elem(), pkg) - - case *types.Chan: - w.startType(chanType) - // 1 RecvOnly; 2 SendOnly; 3 SendRecv - var dir uint64 - switch t.Dir() { - case types.RecvOnly: - dir = 1 - case types.SendOnly: - dir = 2 - case types.SendRecv: - dir = 3 - } - w.uint64(dir) - w.typ(t.Elem(), pkg) - - case *types.Map: - w.startType(mapType) - w.typ(t.Key(), pkg) - w.typ(t.Elem(), pkg) - - case *types.Signature: - w.startType(signatureType) - w.setPkg(pkg, true) - w.signature(t) - - case *types.Struct: - w.startType(structType) - w.setPkg(pkg, true) - - n := t.NumFields() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - f := t.Field(i) - w.pos(f.Pos()) - w.string(f.Name()) - w.typ(f.Type(), pkg) - w.bool(f.Anonymous()) - w.string(t.Tag(i)) // note (or tag) - } - - case *types.Interface: - w.startType(interfaceType) - w.setPkg(pkg, true) - - n := t.NumEmbeddeds() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - f := t.Embedded(i) - w.pos(f.Obj().Pos()) - w.typ(f.Obj().Type(), f.Obj().Pkg()) - } - - n = t.NumExplicitMethods() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - m := t.ExplicitMethod(i) - w.pos(m.Pos()) - w.string(m.Name()) - sig, _ := m.Type().(*types.Signature) - w.signature(sig) - } - - default: - panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t))) - } -} - -func (w *exportWriter) setPkg(pkg *types.Package, write bool) { - if write { - w.pkg(pkg) - } - - w.currPkg = pkg -} - -func (w *exportWriter) signature(sig *types.Signature) { - w.paramList(sig.Params()) - w.paramList(sig.Results()) - if sig.Params().Len() > 0 { - w.bool(sig.Variadic()) - } -} - -func (w *exportWriter) paramList(tup *types.Tuple) { - n := tup.Len() - w.uint64(uint64(n)) - for i := 0; i < n; i++ { - w.param(tup.At(i)) - } -} - -func (w *exportWriter) param(obj types.Object) { - w.pos(obj.Pos()) - w.localIdent(obj) - w.typ(obj.Type(), obj.Pkg()) -} - -func (w *exportWriter) value(typ types.Type, v constant.Value) { - w.typ(typ, nil) - - switch v.Kind() { - case constant.Bool: - w.bool(constant.BoolVal(v)) - case constant.Int: - var i big.Int - if i64, exact := constant.Int64Val(v); exact { - i.SetInt64(i64) - } else if ui64, exact := constant.Uint64Val(v); exact { - i.SetUint64(ui64) - } else { - i.SetString(v.ExactString(), 10) - } - w.mpint(&i, typ) - case constant.Float: - f := constantToFloat(v) - w.mpfloat(f, typ) - case constant.Complex: - w.mpfloat(constantToFloat(constant.Real(v)), typ) - w.mpfloat(constantToFloat(constant.Imag(v)), typ) - case constant.String: - w.string(constant.StringVal(v)) - case constant.Unknown: - // package contains type errors - default: - panic(internalErrorf("unexpected value %v (%T)", v, v)) - } -} - -// constantToFloat converts a constant.Value with kind constant.Float to a -// big.Float. -func constantToFloat(x constant.Value) *big.Float { - assert(x.Kind() == constant.Float) - // Use the same floating-point precision (512) as cmd/compile - // (see Mpprec in cmd/compile/internal/gc/mpfloat.go). - const mpprec = 512 - var f big.Float - f.SetPrec(mpprec) - if v, exact := constant.Float64Val(x); exact { - // float64 - f.SetFloat64(v) - } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { - // TODO(gri): add big.Rat accessor to constant.Value. - n := valueToRat(num) - d := valueToRat(denom) - f.SetRat(n.Quo(n, d)) - } else { - // Value too large to represent as a fraction => inaccessible. - // TODO(gri): add big.Float accessor to constant.Value. - _, ok := f.SetString(x.ExactString()) - assert(ok) - } - return &f -} - -// mpint exports a multi-precision integer. -// -// For unsigned types, small values are written out as a single -// byte. Larger values are written out as a length-prefixed big-endian -// byte string, where the length prefix is encoded as its complement. -// For example, bytes 0, 1, and 2 directly represent the integer -// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-, -// 2-, and 3-byte big-endian string follow. -// -// Encoding for signed types use the same general approach as for -// unsigned types, except small values use zig-zag encoding and the -// bottom bit of length prefix byte for large values is reserved as a -// sign bit. -// -// The exact boundary between small and large encodings varies -// according to the maximum number of bytes needed to encode a value -// of type typ. As a special case, 8-bit types are always encoded as a -// single byte. -// -// TODO(mdempsky): Is this level of complexity really worthwhile? -func (w *exportWriter) mpint(x *big.Int, typ types.Type) { - basic, ok := typ.Underlying().(*types.Basic) - if !ok { - panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying())) - } - - signed, maxBytes := intSize(basic) - - negative := x.Sign() < 0 - if !signed && negative { - panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x)) - } - - b := x.Bytes() - if len(b) > 0 && b[0] == 0 { - panic(internalErrorf("leading zeros")) - } - if uint(len(b)) > maxBytes { - panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)) - } - - maxSmall := 256 - maxBytes - if signed { - maxSmall = 256 - 2*maxBytes - } - if maxBytes == 1 { - maxSmall = 256 - } - - // Check if x can use small value encoding. - if len(b) <= 1 { - var ux uint - if len(b) == 1 { - ux = uint(b[0]) - } - if signed { - ux <<= 1 - if negative { - ux-- - } - } - if ux < maxSmall { - w.data.WriteByte(byte(ux)) - return - } - } - - n := 256 - uint(len(b)) - if signed { - n = 256 - 2*uint(len(b)) - if negative { - n |= 1 - } - } - if n < maxSmall || n >= 256 { - panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)) - } - - w.data.WriteByte(byte(n)) - w.data.Write(b) -} - -// mpfloat exports a multi-precision floating point number. -// -// The number's value is decomposed into mantissa × 2**exponent, where -// mantissa is an integer. The value is written out as mantissa (as a -// multi-precision integer) and then the exponent, except exponent is -// omitted if mantissa is zero. -func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) { - if f.IsInf() { - panic("infinite constant") - } - - // Break into f = mant × 2**exp, with 0.5 <= mant < 1. - var mant big.Float - exp := int64(f.MantExp(&mant)) - - // Scale so that mant is an integer. - prec := mant.MinPrec() - mant.SetMantExp(&mant, int(prec)) - exp -= int64(prec) - - manti, acc := mant.Int(nil) - if acc != big.Exact { - panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc)) - } - w.mpint(manti, typ) - if manti.Sign() != 0 { - w.int64(exp) - } -} - -func (w *exportWriter) bool(b bool) bool { - var x uint64 - if b { - x = 1 - } - w.uint64(x) - return b -} - -func (w *exportWriter) int64(x int64) { w.data.int64(x) } -func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) } -func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) } - -func (w *exportWriter) localIdent(obj types.Object) { - // Anonymous parameters. - if obj == nil { - w.string("") - return - } - - name := obj.Name() - if name == "_" { - w.string("_") - return - } - - w.string(name) -} - -type intWriter struct { - bytes.Buffer -} - -func (w *intWriter) int64(x int64) { - var buf [binary.MaxVarintLen64]byte - n := binary.PutVarint(buf[:], x) - w.Write(buf[:n]) -} - -func (w *intWriter) uint64(x uint64) { - var buf [binary.MaxVarintLen64]byte - n := binary.PutUvarint(buf[:], x) - w.Write(buf[:n]) -} - -func assert(cond bool) { - if !cond { - panic("internal error: assertion failed") - } -} - -// The below is copied from go/src/cmd/compile/internal/gc/syntax.go. - -// objQueue is a FIFO queue of types.Object. The zero value of objQueue is -// a ready-to-use empty queue. -type objQueue struct { - ring []types.Object - head, tail int -} - -// empty returns true if q contains no Nodes. -func (q *objQueue) empty() bool { - return q.head == q.tail -} - -// pushTail appends n to the tail of the queue. -func (q *objQueue) pushTail(obj types.Object) { - if len(q.ring) == 0 { - q.ring = make([]types.Object, 16) - } else if q.head+len(q.ring) == q.tail { - // Grow the ring. - nring := make([]types.Object, len(q.ring)*2) - // Copy the old elements. - part := q.ring[q.head%len(q.ring):] - if q.tail-q.head <= len(part) { - part = part[:q.tail-q.head] - copy(nring, part) - } else { - pos := copy(nring, part) - copy(nring[pos:], q.ring[:q.tail%len(q.ring)]) - } - q.ring, q.head, q.tail = nring, 0, q.tail-q.head - } - - q.ring[q.tail%len(q.ring)] = obj - q.tail++ -} - -// popHead pops a node from the head of the queue. It panics if q is empty. -func (q *objQueue) popHead() types.Object { - if q.empty() { - panic("dequeue empty") - } - obj := q.ring[q.head%len(q.ring)] - q.head++ - return obj -} diff --git a/internal/go/gcimporter/iexport_test.go b/internal/go/gcimporter/iexport_test.go deleted file mode 100644 index 2145f994c..000000000 --- a/internal/go/gcimporter/iexport_test.go +++ /dev/null @@ -1,310 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This is a copy of bexport_test.go for iexport.go. - -// +build go1.11 - -package gcimporter_test - -import ( - "fmt" - "go/ast" - "go/build" - "go/constant" - "go/parser" - "go/token" - "go/types" - "math/big" - "reflect" - "runtime" - "sort" - "strings" - "testing" - - "honnef.co/go/tools/internal/go/gcimporter" - - "golang.org/x/tools/go/buildutil" - "golang.org/x/tools/go/loader" -) - -func TestIExportData_stdlib(t *testing.T) { - if runtime.Compiler == "gccgo" { - t.Skip("gccgo standard library is inaccessible") - } - if runtime.GOOS == "android" { - t.Skipf("incomplete std lib on %s", runtime.GOOS) - } - if isRace { - t.Skipf("stdlib tests take too long in race mode and flake on builders") - } - - // Load, parse and type-check the program. - ctxt := build.Default // copy - ctxt.GOPATH = "" // disable GOPATH - conf := loader.Config{ - Build: &ctxt, - AllowErrors: true, - } - for _, path := range buildutil.AllPackages(conf.Build) { - conf.Import(path) - } - - // Create a package containing type and value errors to ensure - // they are properly encoded/decoded. - f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors -const UnknownValue = "" + 0 -type UnknownType undefined -`) - if err != nil { - t.Fatal(err) - } - conf.CreateFromFiles("haserrors", f) - - prog, err := conf.Load() - if err != nil { - t.Fatalf("Load failed: %v", err) - } - - numPkgs := len(prog.AllPackages) - if want := 248; numPkgs < want { - t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want) - } - - var sorted []*types.Package - for pkg := range prog.AllPackages { - sorted = append(sorted, pkg) - } - sort.Slice(sorted, func(i, j int) bool { - return sorted[i].Path() < sorted[j].Path() - }) - - for _, pkg := range sorted { - info := prog.AllPackages[pkg] - if info.Files == nil { - continue // empty directory - } - exportdata, err := gcimporter.IExportData(conf.Fset, pkg) - if err != nil { - t.Fatal(err) - } - if exportdata[0] == 'i' { - exportdata = exportdata[1:] // trim the 'i' in the header - } else { - t.Fatalf("unexpected first character of export data: %v", exportdata[0]) - } - - imports := make(map[string]*types.Package) - fset2 := token.NewFileSet() - n, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path()) - if err != nil { - t.Errorf("IImportData(%s): %v", pkg.Path(), err) - continue - } - if n != len(exportdata) { - t.Errorf("IImportData(%s) decoded %d bytes, want %d", - pkg.Path(), n, len(exportdata)) - } - - // Compare the packages' corresponding members. - for _, name := range pkg.Scope().Names() { - if !ast.IsExported(name) { - continue - } - obj1 := pkg.Scope().Lookup(name) - obj2 := pkg2.Scope().Lookup(name) - if obj2 == nil { - t.Fatalf("%s.%s not found, want %s", pkg.Path(), name, obj1) - continue - } - - fl1 := fileLine(conf.Fset, obj1) - fl2 := fileLine(fset2, obj2) - if fl1 != fl2 { - t.Errorf("%s.%s: got posn %s, want %s", - pkg.Path(), name, fl2, fl1) - } - - if err := cmpObj(obj1, obj2); err != nil { - t.Errorf("%s.%s: %s\ngot: %s\nwant: %s", - pkg.Path(), name, err, obj2, obj1) - } - } - } -} - -// TestVeryLongFile tests the position of an import object declared in -// a very long input file. Line numbers greater than maxlines are -// reported as line 1, not garbage or token.NoPos. -func TestIExportData_long(t *testing.T) { - // parse and typecheck - longFile := "package foo" + strings.Repeat("\n", 123456) + "var X int" - fset1 := token.NewFileSet() - f, err := parser.ParseFile(fset1, "foo.go", longFile, 0) - if err != nil { - t.Fatal(err) - } - var conf types.Config - pkg, err := conf.Check("foo", fset1, []*ast.File{f}, nil) - if err != nil { - t.Fatal(err) - } - - // export - exportdata, err := gcimporter.IExportData(fset1, pkg) - if err != nil { - t.Fatal(err) - } - if exportdata[0] == 'i' { - exportdata = exportdata[1:] // trim the 'i' in the header - } else { - t.Fatalf("unexpected first character of export data: %v", exportdata[0]) - } - - // import - imports := make(map[string]*types.Package) - fset2 := token.NewFileSet() - _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path()) - if err != nil { - t.Fatalf("IImportData(%s): %v", pkg.Path(), err) - } - - // compare - posn1 := fset1.Position(pkg.Scope().Lookup("X").Pos()) - posn2 := fset2.Position(pkg2.Scope().Lookup("X").Pos()) - if want := "foo.go:1:1"; posn2.String() != want { - t.Errorf("X position = %s, want %s (orig was %s)", - posn2, want, posn1) - } -} - -func TestIExportData_typealiases(t *testing.T) { - // parse and typecheck - fset1 := token.NewFileSet() - f, err := parser.ParseFile(fset1, "p.go", src, 0) - if err != nil { - t.Fatal(err) - } - var conf types.Config - pkg1, err := conf.Check("p", fset1, []*ast.File{f}, nil) - if err == nil { - // foo in undeclared in src; we should see an error - t.Fatal("invalid source type-checked without error") - } - if pkg1 == nil { - // despite incorrect src we should see a (partially) type-checked package - t.Fatal("nil package returned") - } - checkPkg(t, pkg1, "export") - - // export - // use a nil fileset here to confirm that it doesn't panic - exportdata, err := gcimporter.IExportData(nil, pkg1) - if err != nil { - t.Fatal(err) - } - if exportdata[0] == 'i' { - exportdata = exportdata[1:] // trim the 'i' in the header - } else { - t.Fatalf("unexpected first character of export data: %v", exportdata[0]) - } - - // import - imports := make(map[string]*types.Package) - fset2 := token.NewFileSet() - _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg1.Path()) - if err != nil { - t.Fatalf("IImportData(%s): %v", pkg1.Path(), err) - } - checkPkg(t, pkg2, "import") -} - -// cmpObj reports how x and y differ. They are assumed to belong to different -// universes so cannot be compared directly. It is an adapted version of -// equalObj in bexport_test.go. -func cmpObj(x, y types.Object) error { - if reflect.TypeOf(x) != reflect.TypeOf(y) { - return fmt.Errorf("%T vs %T", x, y) - } - xt := x.Type() - yt := y.Type() - switch x.(type) { - case *types.Var, *types.Func: - // ok - case *types.Const: - xval := x.(*types.Const).Val() - yval := y.(*types.Const).Val() - equal := constant.Compare(xval, token.EQL, yval) - if !equal { - // try approx. comparison - xkind := xval.Kind() - ykind := yval.Kind() - if xkind == constant.Complex || ykind == constant.Complex { - equal = same(constant.Real(xval), constant.Real(yval)) && - same(constant.Imag(xval), constant.Imag(yval)) - } else if xkind == constant.Float || ykind == constant.Float { - equal = same(xval, yval) - } else if xkind == constant.Unknown && ykind == constant.Unknown { - equal = true - } - } - if !equal { - return fmt.Errorf("unequal constants %s vs %s", xval, yval) - } - case *types.TypeName: - xt = xt.Underlying() - yt = yt.Underlying() - default: - return fmt.Errorf("unexpected %T", x) - } - return equalType(xt, yt) -} - -// Use the same floating-point precision (512) as cmd/compile -// (see Mpprec in cmd/compile/internal/gc/mpfloat.go). -const mpprec = 512 - -// same compares non-complex numeric values and reports if they are approximately equal. -func same(x, y constant.Value) bool { - xf := constantToFloat(x) - yf := constantToFloat(y) - d := new(big.Float).Sub(xf, yf) - d.Abs(d) - eps := big.NewFloat(1.0 / (1 << (mpprec - 1))) // allow for 1 bit of error - return d.Cmp(eps) < 0 -} - -// copy of the function with the same name in iexport.go. -func constantToFloat(x constant.Value) *big.Float { - var f big.Float - f.SetPrec(mpprec) - if v, exact := constant.Float64Val(x); exact { - // float64 - f.SetFloat64(v) - } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { - // TODO(gri): add big.Rat accessor to constant.Value. - n := valueToRat(num) - d := valueToRat(denom) - f.SetRat(n.Quo(n, d)) - } else { - // Value too large to represent as a fraction => inaccessible. - // TODO(gri): add big.Float accessor to constant.Value. - _, ok := f.SetString(x.ExactString()) - if !ok { - panic("should not reach here") - } - } - return &f -} - -// copy of the function with the same name in iexport.go. -func valueToRat(x constant.Value) *big.Rat { - // Convert little-endian to big-endian. - // I can't believe this is necessary. - bytes := constant.Bytes(x) - for i := 0; i < len(bytes)/2; i++ { - bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] - } - return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) -} diff --git a/internal/go/gcimporter/israce_test.go b/internal/go/gcimporter/israce_test.go deleted file mode 100644 index af8e52b2e..000000000 --- a/internal/go/gcimporter/israce_test.go +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build race - -package gcimporter_test - -func init() { - isRace = true -} From 83cac16039a2b6aec2c5f19affe5685d4953aeaa Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 16 Apr 2020 06:06:34 +0200 Subject: [PATCH 020/111] internal/go/gcexportdata: read file directly when possible --- internal/go/gcexportdata/gcexportdata.go | 45 +++++++++++++++++++++--- internal/go/gcimporter/exportdata.go | 10 ++++-- 2 files changed, 48 insertions(+), 7 deletions(-) diff --git a/internal/go/gcexportdata/gcexportdata.go b/internal/go/gcexportdata/gcexportdata.go index 52f47c8b0..03540ebd3 100644 --- a/internal/go/gcexportdata/gcexportdata.go +++ b/internal/go/gcexportdata/gcexportdata.go @@ -28,15 +28,32 @@ import ( "go/types" "io" "io/ioutil" + "os" "honnef.co/go/tools/internal/go/gcimporter" ) +type bufferedReader struct { + r io.Reader + buf *bufio.Reader +} + +func (r *bufferedReader) Read(b []byte) (int, error) { + return r.buf.Read(b) +} + +func (r *bufferedReader) ReadSlice(delim byte) (line []byte, err error) { + return r.buf.ReadSlice(delim) +} + // NewReader returns a reader for the export data section of an object // (.o) or archive (.a) file read from r. The new reader may provide // additional trailing data beyond the end of the export data. func NewReader(r io.Reader) (io.Reader, error) { - buf := bufio.NewReader(r) + buf := &bufferedReader{ + r: r, + buf: bufio.NewReader(r), + } _, err := gcimporter.FindExportData(buf) // If we ever switch to a zip-like archive format with the ToC // at the end, we can return the correct portion of export data, @@ -57,9 +74,29 @@ func NewReader(r io.Reader) (io.Reader, error) { // // On return, the state of the reader is undefined. func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { - data, err := ioutil.ReadAll(in) - if err != nil { - return nil, fmt.Errorf("reading export data for %q: %v", path, err) + var data []byte + if br, ok := in.(*bufferedReader); ok { + if f, ok := br.r.(*os.File); ok { + fi, err := f.Stat() + if err == nil { + // we expect to be close to the start of the file, + // which is why we don't bother checking with + // SEEK_CUR. + data = make([]byte, fi.Size()) + n, err := io.ReadFull(in, data) + data = data[:n] + if err != nil && err != io.ErrUnexpectedEOF { + data = nil + } + } + } + } + if data == nil { + var err error + data, err = ioutil.ReadAll(in) + if err != nil { + return nil, fmt.Errorf("reading export data for %q: %v", path, err) + } } if bytes.HasPrefix(data, []byte("!")) { diff --git a/internal/go/gcimporter/exportdata.go b/internal/go/gcimporter/exportdata.go index f33dc5613..b0a2e353f 100644 --- a/internal/go/gcimporter/exportdata.go +++ b/internal/go/gcimporter/exportdata.go @@ -9,14 +9,13 @@ package gcimporter import ( - "bufio" "fmt" "io" "strconv" "strings" ) -func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { +func readGopackHeader(r io.Reader) (name string, size int, err error) { // See $GOROOT/include/ar.h. hdr := make([]byte, 16+12+6+6+8+10+2) _, err = io.ReadFull(r, hdr) @@ -37,13 +36,18 @@ func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { return } +type BufferedReader interface { + Read(b []byte) (int, error) + ReadSlice(delim byte) (line []byte, err error) +} + // FindExportData positions the reader r at the beginning of the // export data section of an underlying GC-created object/archive // file by reading from it. The reader must be positioned at the // start of the file before calling this function. The hdr result // is the string before the export data, either "$$" or "$$B". // -func FindExportData(r *bufio.Reader) (hdr string, err error) { +func FindExportData(r BufferedReader) (hdr string, err error) { // Read first line to make sure this is an object file. line, err := r.ReadSlice('\n') if err != nil { From 5cfc85b70e7b778eb76fd7338e538d7c9af21e4e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 16 Apr 2020 06:10:44 +0200 Subject: [PATCH 021/111] Implement a new analysis runner and improve U1000 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit completely replaces the analysis runner of Staticcheck. It fixes several performance shortcomings, as well as subtle bugs in U1000. To explain the behaviors of the old and new runners, assume that we're processing a package graph that looks like this: A ↙ ↘ B C ↓ ⋮ ↓ X Package A is the package we wish to check. Packages B and C are direct dependencies of A, and X is an indirect dependency of B, with potentially many packages between B and X In the old runner, we would process the graph in a single DFS pass. We would start processing A, see that it needed B and C, start loading B and C, and so forth. This approach would unnecessarily increase memory usage. Package C would be held in memory, ready to be used by A, while the long chain from X to B was being processed. Furthermore, A may not need most of C's data in the first place, if A was already fully cached. Furthermore, processing the graph top to bottom is harder to parallelize efficiently. The new runner, in contrast, first materializes the graph (the planning phase) and then executes it from the bottom up (the execution phase). Whenever a leaf node finishes execution, its data would be cached on disk, then unloaded from memory. The only data that will be kept in memory is the package's hash, so that its dependents can compute their own hashes. Next, all dependents that are ready to run (i.e. that have no more unprocessed leaf nodes) will be executed. If the dependent decides that it needs information of its dependencies, it loads them from disk again. This approach drastically reduces peak memory usage, at a slight increase in CPU usage because of repeated loading of data. However, knowing the full graph allows for more efficient parallelization, offsetting the increased CPU cost. It also favours the common case, where most packages will have up to date cached data. Changes to unused The 'unused' check (U1000 and U1001) has always been the odd one out. It is the only check that propagates information backwards in the import graph – that is, the sum of dependents determines which objects in a package are considered used. Due to tests and test variants, this applies even when not operating in whole-program mode. The way we implemented this was not only expensive – whole-program mode in particular needed to retain type information for all packages – it was also subtly wrong. Because we cached all diagnostics of a package, we cached stale 'unused' diagnostics when a dependent changed. As part of writing the new analysis runner, we make several changes to 'unused' that make sure it behaves well and doesn't negate the performance improvements of the new runner. The most obvious change is the removal of whole-program mode. The combination of correct caching and efficient cache usage means that we no longer have access to the information required to compute a whole-program solution. It never worked quite right, anyway, being unaware of reflection, and having to grossly over-estimate the set of used methods due to interfaces. The normal mode of 'unused' now considers all exported package-level identifiers as used, even if they are declared within tests or package main. Treating exported functions in package main unused has been wrong ever since the addition of the 'plugin' build mode. Doing so in tests may have been mostly correct (ignoring reflection), but continuing to do so would complicate the implementation for little gain. In the new implementation, the per-package information that is cached for U1000 consists of two lists: the list of used objects and the list of unused objects. At the end of analysis, the lists of all packages get merged: if any package uses an object, it is considered used. Otherwise, if any package didn't use an object, it is considered unused. This list-based approach is only correct if the usedness of an exported object in one package doesn't depend on another package. Consider the following package layout: foo.go: package pkg func unexported() {} export_test.go package pkg func Exported() { unexported() } external_test.go package pkg_test import "pkg" var _ = pkg.Exported This layout has three packages: pkg, pkg [test] and pkg_test. Under unused's old logic, pkg_test would be responsible for marking pkg [test]'s Exported as used. This would transitively mark 'unexported' as used, too. However, with our list-based approach, we would get the following lists: pkg: used: unused: unexported pkg [test]: used: unused: unexported, Exported pkg_test: used: Exported unused: Merging these lists, we would never know that 'unexported' was used. Instead of using these lists, we would need to cache and resolve full graphs. This problem does not exist for unexported objects. If a package is able to use an unexported object, it must exist within the same package, which means it can internally resolve the package's graph before generating the lists. For completeness, these are the correct lists: pkg: used: unused: unexported pkg [test]: used: Exported, unexported unused: pkg_test: used: Exported unused: (The inclusion of Exported in pkg_test is superfluous and may be optimized away at some point.) As part of porting unused's tests, we discovered a flaky false negative, caused by an incorrect implementation of our version of types.Identical. We were still using types.Identical under the hood, which wouldn't correctly account for nested types. This has been fixed. Finally, two improvements to U1000 have been made. 1. We no longer hide unused methods of unused types. This would sometimes confuse users who would see an unused type, remove just the type, then be confronted with compile time errors because of lingering methods. 2. //lint:ignore is no longer purely a post-processing step. U1000 is aware of ignore directives and uses them to actively mark objects as used. This means that if an unused function uses an object, and is //lint:ignore'd, the object it uses will transitively be marked used. Closes gh-233 Closes gh-284 Closes gh-476 Closes gh-538 Closes gh-576 Closes gh-671 Closes gh-675 Closes gh-690 Closes gh-691 --- .github/workflows/ci.yml | 2 +- cmd/staticcheck/staticcheck.go | 10 +- code/code.go | 62 +- facts/directives.go | 107 ++ go/types/typeutil/callee_test.go | 2 +- go/types/typeutil/identical.go | 138 +- internal/cache/cache.go | 10 +- internal/passes/buildir/buildir.go | 3 + internal/sync/sync.go | 36 + lint/directives.go | 56 + lint/lint.go | 571 ++++---- lint/lint_test.go | 49 +- lint/lintutil/format/format.go | 24 +- lint/lintutil/util.go | 87 +- lint/runner.go | 1114 --------------- lint/stats.go | 38 - loader/buildid.go | 238 ++++ loader/hash.go | 83 ++ loader/loader.go | 258 ++-- loader/note.go | 207 +++ pattern/match.go | 6 +- report/report.go | 22 +- runner/runner.go | 1196 +++++++++++++++++ runner/stats.go | 48 + staticcheck/lint.go | 5 +- stylecheck/analysis.go | 10 +- unused/edge.go | 1 + unused/implements.go | 2 +- unused/testdata/src/alias/alias.go | 16 +- unused/testdata/src/anonymous/anonymous.go | 12 +- unused/testdata/src/blank/blank.go | 18 +- unused/testdata/src/cgo/cgo.go | 4 +- unused/testdata/src/consts/consts.go | 32 +- unused/testdata/src/conversion/conversion.go | 64 +- unused/testdata/src/cyclic/cyclic.go | 4 +- unused/testdata/src/defer/defer.go | 12 +- unused/testdata/src/elem/elem.go | 14 +- .../src/embedded_call/embedded_call.go | 26 +- unused/testdata/src/embedding/embedding.go | 80 +- unused/testdata/src/embedding2/embedding2.go | 22 +- .../src/exported_fields/exported_fields.go | 28 +- .../exported_fields_main.go | 10 +- .../exported_method_test.go | 16 +- unused/testdata/src/fields/fields.go | 126 +- unused/testdata/src/functions/functions.go | 30 +- unused/testdata/src/ignored/ignored.go | 56 + unused/testdata/src/interfaces/interfaces.go | 47 +- unused/testdata/src/interfaces2/interfaces.go | 14 +- unused/testdata/src/linkname/linkname.go | 12 +- unused/testdata/src/main/main.go | 13 +- unused/testdata/src/mapslice/mapslice.go | 4 +- unused/testdata/src/methods/methods.go | 16 +- unused/testdata/src/named/named.go | 4 +- unused/testdata/src/nested/nested.go | 23 +- .../testdata/src/nocopy-main/nocopy-main.go | 34 +- unused/testdata/src/nocopy/nocopy.go | 32 +- .../pointer-type-embedding.go | 14 +- unused/testdata/src/quiet/quiet.go | 24 +- unused/testdata/src/selectors/selectors.go | 8 +- .../src/switch_interface/switch_interface.go | 10 +- unused/testdata/src/tests-main/main_test.go | 4 +- unused/testdata/src/tests/tests.go | 2 +- unused/testdata/src/tests/tests_test.go | 2 +- unused/testdata/src/type-dedup/dedup.go | 14 +- unused/testdata/src/type-dedup2/dedup.go | 18 +- unused/testdata/src/type-dedup3/dedup.go | 18 +- unused/testdata/src/types/types.go | 10 +- .../src/unused-argument/unused-argument.go | 8 +- .../testdata/src/unused_type/unused_type.go | 14 +- unused/testdata/src/variables/variables.go | 27 +- unused/testdata/src/variables/vartype.go | 6 +- unused/unused.go | 1110 ++++++--------- unused/unused_test.go | 279 ++-- 73 files changed, 3795 insertions(+), 2927 deletions(-) create mode 100644 facts/directives.go create mode 100644 internal/sync/sync.go create mode 100644 lint/directives.go delete mode 100644 lint/runner.go delete mode 100644 lint/stats.go create mode 100644 loader/buildid.go create mode 100644 loader/hash.go create mode 100644 loader/note.go create mode 100644 runner/runner.go create mode 100644 runner/stats.go create mode 100644 unused/testdata/src/ignored/ignored.go diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1018652d0..fe36b76d1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,7 +7,7 @@ jobs: strategy: matrix: os: ["windows-latest", "ubuntu-latest", "macOS-latest"] - go: ["1.12.x", "1.13.x"] + go: ["1.13.x", "1.14.x"] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v1 diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 4f504dc39..87bed55c0 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -6,7 +6,6 @@ import ( "os" "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" "honnef.co/go/tools/simple" "honnef.co/go/tools/staticcheck" @@ -16,7 +15,6 @@ import ( func main() { fs := lintutil.FlagSet("staticcheck") - wholeProgram := fs.Bool("unused.whole-program", false, "Run unused in whole program mode") debug := fs.String("debug.unused-graph", "", "Write unused's object graph to `file`") fs.Parse(os.Args[1:]) @@ -31,14 +29,14 @@ func main() { cs = append(cs, v) } - u := unused.NewChecker(*wholeProgram) + cs = append(cs, unused.Analyzer) if *debug != "" { f, err := os.OpenFile(*debug, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666) if err != nil { log.Fatal(err) } - u.Debug = f + unused.Debug = f } - cums := []lint.CumulativeChecker{u} - lintutil.ProcessFlagSet(cs, cums, fs) + + lintutil.ProcessFlagSet(cs, fs) } diff --git a/code/code.go b/code/code.go index 6f4df8b9a..5ef7aef4d 100644 --- a/code/code.go +++ b/code/code.go @@ -2,6 +2,7 @@ package code import ( + "bytes" "flag" "fmt" "go/ast" @@ -9,6 +10,7 @@ import ( "go/token" "go/types" "strings" + "sync" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -17,9 +19,55 @@ import ( "honnef.co/go/tools/facts" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/ir" - "honnef.co/go/tools/lint" ) +var bufferPool = &sync.Pool{ + New: func() interface{} { + buf := bytes.NewBuffer(nil) + buf.Grow(64) + return buf + }, +} + +func FuncName(f *types.Func) string { + buf := bufferPool.Get().(*bytes.Buffer) + buf.Reset() + if f.Type() != nil { + sig := f.Type().(*types.Signature) + if recv := sig.Recv(); recv != nil { + buf.WriteByte('(') + if _, ok := recv.Type().(*types.Interface); ok { + // gcimporter creates abstract methods of + // named interfaces using the interface type + // (not the named type) as the receiver. + // Don't print it in full. + buf.WriteString("interface") + } else { + types.WriteType(buf, recv.Type(), nil) + } + buf.WriteByte(')') + buf.WriteByte('.') + } else if f.Pkg() != nil { + writePackage(buf, f.Pkg()) + } + } + buf.WriteString(f.Name()) + s := buf.String() + bufferPool.Put(buf) + return s +} + +func writePackage(buf *bytes.Buffer, pkg *types.Package) { + if pkg == nil { + return + } + s := pkg.Path() + if s != "" { + buf.WriteString(s) + buf.WriteByte('.') + } +} + type Positioner interface { Pos() token.Pos } @@ -34,7 +82,7 @@ func CallName(call *ir.CallCommon) string { if !ok { return "" } - return lint.FuncName(fn) + return FuncName(fn) case *ir.Builtin: return v.Name() } @@ -244,12 +292,12 @@ func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string { if !ok { return "" } - return lint.FuncName(fn) + return FuncName(fn) case *ast.Ident: obj := pass.TypesInfo.ObjectOf(fun) switch obj := obj.(type) { case *types.Func: - return lint.FuncName(obj) + return FuncName(obj) case *types.Builtin: return obj.Name() default: @@ -472,7 +520,11 @@ func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity facts.PurityR } func IsGoVersion(pass *analysis.Pass, minor int) bool { - version := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter).Get().(int) + f, ok := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter) + if !ok { + panic("requested Go version, but analyzer has no version flag") + } + version := f.Get().(int) return version >= minor } diff --git a/facts/directives.go b/facts/directives.go new file mode 100644 index 000000000..04cee52aa --- /dev/null +++ b/facts/directives.go @@ -0,0 +1,107 @@ +package facts + +import ( + "go/ast" + "go/token" + "path/filepath" + "reflect" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// A directive is a comment of the form '//lint: +// [arguments...]'. It represents instructions to the static analysis +// tool. +type Directive struct { + Command string + Arguments []string + Directive *ast.Comment + Node ast.Node +} + +type SerializedDirective struct { + Command string + Arguments []string + // The position of the comment + DirectivePosition token.Position + // The position of the node that the comment is attached to + NodePosition token.Position +} + +func parseDirective(s string) (cmd string, args []string) { + if !strings.HasPrefix(s, "//lint:") { + return "", nil + } + s = strings.TrimPrefix(s, "//lint:") + fields := strings.Split(s, " ") + return fields[0], fields[1:] +} + +func directives(pass *analysis.Pass) (interface{}, error) { + return ParseDirectives(pass.Files, pass.Fset), nil +} + +func ParseDirectives(files []*ast.File, fset *token.FileSet) []Directive { + var dirs []Directive + for _, f := range files { + // OPT(dh): in our old code, we skip all the commentmap work if we + // couldn't find any directives, benchmark if that's actually + // worth doing + cm := ast.NewCommentMap(fset, f, f.Comments) + for node, cgs := range cm { + for _, cg := range cgs { + for _, c := range cg.List { + if !strings.HasPrefix(c.Text, "//lint:") { + continue + } + cmd, args := parseDirective(c.Text) + d := Directive{ + Command: cmd, + Arguments: args, + Directive: c, + Node: node, + } + dirs = append(dirs, d) + } + } + } + } + return dirs +} + +// duplicated from report.DisplayPosition to break import cycle +func displayPosition(fset *token.FileSet, p token.Pos) token.Position { + if p == token.NoPos { + return token.Position{} + } + + // Only use the adjusted position if it points to another Go file. + // This means we'll point to the original file for cgo files, but + // we won't point to a YACC grammar file. + pos := fset.PositionFor(p, false) + adjPos := fset.PositionFor(p, true) + + if filepath.Ext(adjPos.Filename) == ".go" { + return adjPos + } + + return pos +} + +var Directives = &analysis.Analyzer{ + Name: "directives", + Doc: "extracts linter directives", + Run: directives, + RunDespiteErrors: true, + ResultType: reflect.TypeOf([]Directive{}), +} + +func SerializeDirective(dir Directive, fset *token.FileSet) SerializedDirective { + return SerializedDirective{ + Command: dir.Command, + Arguments: dir.Arguments, + DirectivePosition: displayPosition(fset, dir.Directive.Pos()), + NodePosition: displayPosition(fset, dir.Node.Pos()), + } +} diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go index 6875d699f..2201eee71 100644 --- a/go/types/typeutil/callee_test.go +++ b/go/types/typeutil/callee_test.go @@ -63,7 +63,7 @@ func noncalls() { Uses: make(map[*ast.Ident]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), } - cfg := &types.Config{Importer: importer.For("source", nil)} + cfg := &types.Config{Importer: importer.ForCompiler(fset, "source", nil)} if _, err := cfg.Check("p", fset, []*ast.File{f}, info); err != nil { t.Fatal(err) } diff --git a/go/types/typeutil/identical.go b/go/types/typeutil/identical.go index c0ca441c3..0cd82e8c0 100644 --- a/go/types/typeutil/identical.go +++ b/go/types/typeutil/identical.go @@ -4,23 +4,80 @@ import ( "go/types" ) -// Identical reports whether x and y are identical types. // Unlike types.Identical, receivers of Signature types are not ignored. // Unlike types.Identical, interfaces are compared via pointer equality (except for the empty interface, which gets deduplicated). // Unlike types.Identical, structs are compared via pointer equality. -func Identical(x, y types.Type) (ret bool) { - if !types.Identical(x, y) { - return false +func identical0(x, y types.Type) bool { + if x == y { + return true } switch x := x.(type) { + case *types.Basic: + // Basic types are singletons except for the rune and byte + // aliases, thus we cannot solely rely on the x == y check + // above. See also comment in TypeName.IsAlias. + if y, ok := y.(*types.Basic); ok { + return x.Kind() == y.Kind() + } + + case *types.Array: + // Two array types are identical if they have identical element types + // and the same array length. + if y, ok := y.(*types.Array); ok { + // If one or both array lengths are unknown (< 0) due to some error, + // assume they are the same to avoid spurious follow-on errors. + return (x.Len() < 0 || y.Len() < 0 || x.Len() == y.Len()) && identical0(x.Elem(), y.Elem()) + } + + case *types.Slice: + // Two slice types are identical if they have identical element types. + if y, ok := y.(*types.Slice); ok { + return identical0(x.Elem(), y.Elem()) + } + case *types.Struct: - y, ok := y.(*types.Struct) - if !ok { - // should be impossible - return true + if y, ok := y.(*types.Struct); ok { + return x == y + } + + case *types.Pointer: + // Two pointer types are identical if they have identical base types. + if y, ok := y.(*types.Pointer); ok { + return identical0(x.Elem(), y.Elem()) + } + + case *types.Tuple: + // Two tuples types are identical if they have the same number of elements + // and corresponding elements have identical types. + if y, ok := y.(*types.Tuple); ok { + if x.Len() == y.Len() { + if x != nil { + for i := 0; i < x.Len(); i++ { + v := x.At(i) + w := y.At(i) + if !identical0(v.Type(), w.Type()) { + return false + } + } + } + return true + } + } + + case *types.Signature: + // Two function types are identical if they have the same number of parameters + // and result values, corresponding parameter and result types are identical, + // and either both functions are variadic or neither is. Parameter and result + // names are not required to match. + if y, ok := y.(*types.Signature); ok { + + return x.Variadic() == y.Variadic() && + identical0(x.Params(), y.Params()) && + identical0(x.Results(), y.Results()) && + (x.Recv() != nil && y.Recv() != nil && identical0(x.Recv().Type(), y.Recv().Type()) || x.Recv() == nil && y.Recv() == nil) } - return x == y + case *types.Interface: // The issue with interfaces, typeutil.Map and types.Identical // @@ -43,33 +100,50 @@ func Identical(x, y types.Type) (ret bool) { // pointers. This will obviously miss identical interfaces, // but this only has a runtime cost, it doesn't affect // correctness. - y, ok := y.(*types.Interface) - if !ok { - // should be impossible - return true - } - if x.NumEmbeddeds() == 0 && - y.NumEmbeddeds() == 0 && - x.NumMethods() == 0 && - y.NumMethods() == 0 { - // all truly empty interfaces are the same - return true + if y, ok := y.(*types.Interface); ok { + if x.NumEmbeddeds() == 0 && + y.NumEmbeddeds() == 0 && + x.NumMethods() == 0 && + y.NumMethods() == 0 { + // all truly empty interfaces are the same + return true + } + return x == y } - return x == y - case *types.Signature: - y, ok := y.(*types.Signature) - if !ok { - // should be impossible - return true + + case *types.Map: + // Two map types are identical if they have identical key and value types. + if y, ok := y.(*types.Map); ok { + return identical0(x.Key(), y.Key()) && identical0(x.Elem(), y.Elem()) } - if x.Recv() == y.Recv() { - return true + + case *types.Chan: + // Two channel types are identical if they have identical value types + // and the same direction. + if y, ok := y.(*types.Chan); ok { + return x.Dir() == y.Dir() && identical0(x.Elem(), y.Elem()) } - if x.Recv() == nil || y.Recv() == nil { - return false + + case *types.Named: + // Two named types are identical if their type names originate + // in the same type declaration. + if y, ok := y.(*types.Named); ok { + return x.Obj() == y.Obj() } - return Identical(x.Recv().Type(), y.Recv().Type()) + + case nil: + default: - return true + panic("unreachable") } + + return false +} + +// Identical reports whether x and y are identical types. +// Unlike types.Identical, receivers of Signature types are not ignored. +// Unlike types.Identical, interfaces are compared via pointer equality (except for the empty interface, which gets deduplicated). +// Unlike types.Identical, structs are compared via pointer equality. +func Identical(x, y types.Type) (ret bool) { + return identical0(x, y) } diff --git a/internal/cache/cache.go b/internal/cache/cache.go index 6b41811cf..cfd4241f9 100644 --- a/internal/cache/cache.go +++ b/internal/cache/cache.go @@ -77,7 +77,7 @@ func (c *Cache) fileName(id [HashSize]byte, key string) string { return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key) } -var errMissing = errors.New("cache entry not found") +var ErrMissing = errors.New("cache entry not found") const ( // action entry file is "v1 \n" @@ -124,7 +124,7 @@ func initEnv() { // saved file for that output ID is still available. func (c *Cache) Get(id ActionID) (Entry, error) { if verify { - return Entry{}, errMissing + return Entry{}, ErrMissing } return c.get(id) } @@ -138,7 +138,7 @@ type Entry struct { // get is Get but does not respect verify mode, so that Put can use it. func (c *Cache) get(id ActionID) (Entry, error) { missing := func() (Entry, error) { - return Entry{}, errMissing + return Entry{}, ErrMissing } f, err := os.Open(c.fileName(id, "a")) if err != nil { @@ -196,7 +196,7 @@ func (c *Cache) GetFile(id ActionID) (file string, entry Entry, err error) { file = c.OutputFile(entry.OutputID) info, err := os.Stat(file) if err != nil || info.Size() != entry.Size { - return "", Entry{}, errMissing + return "", Entry{}, ErrMissing } return file, entry, nil } @@ -211,7 +211,7 @@ func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) { } data, _ := ioutil.ReadFile(c.OutputFile(entry.OutputID)) if sha256.Sum256(data) != entry.OutputID { - return nil, entry, errMissing + return nil, entry, ErrMissing } return data, entry, nil } diff --git a/internal/passes/buildir/buildir.go b/internal/passes/buildir/buildir.go index 394697702..884884f55 100644 --- a/internal/passes/buildir/buildir.go +++ b/internal/passes/buildir/buildir.go @@ -25,6 +25,9 @@ type willUnwind struct{} func (*willExit) AFact() {} func (*willUnwind) AFact() {} +func (*willExit) String() string { return "will exit" } +func (*willUnwind) String() string { return "will unwind" } + var Analyzer = &analysis.Analyzer{ Name: "buildir", Doc: "build IR for later passes", diff --git a/internal/sync/sync.go b/internal/sync/sync.go new file mode 100644 index 000000000..e78ad5072 --- /dev/null +++ b/internal/sync/sync.go @@ -0,0 +1,36 @@ +package sync + +type Semaphore struct { + ch chan struct{} +} + +func NewSemaphore(size int) Semaphore { + return Semaphore{ + ch: make(chan struct{}, size), + } +} + +func (sem Semaphore) Acquire() { + sem.ch <- struct{}{} +} + +func (sem Semaphore) AcquireMaybe() bool { + select { + case sem.ch <- struct{}{}: + return true + default: + return false + } +} + +func (sem Semaphore) Release() { + <-sem.ch +} + +func (sem Semaphore) Len() int { + return len(sem.ch) +} + +func (sem Semaphore) Cap() int { + return cap(sem.ch) +} diff --git a/lint/directives.go b/lint/directives.go new file mode 100644 index 000000000..1ca8d5acf --- /dev/null +++ b/lint/directives.go @@ -0,0 +1,56 @@ +package lint + +import ( + "strings" + + "honnef.co/go/tools/facts" + "honnef.co/go/tools/runner" +) + +func parseDirectives(dirs []facts.SerializedDirective) ([]ignore, []Problem) { + var ignores []ignore + var problems []Problem + + for _, dir := range dirs { + cmd := dir.Command + args := dir.Arguments + switch cmd { + case "ignore", "file-ignore": + if len(args) < 2 { + p := Problem{ + Diagnostic: runner.Diagnostic{ + Position: dir.NodePosition, + Message: "malformed linter directive; missing the required reason field?", + Category: "compile", + }, + Severity: Error, + } + problems = append(problems, p) + continue + } + default: + // unknown directive, ignore + continue + } + checks := strings.Split(args[0], ",") + pos := dir.NodePosition + var ig ignore + switch cmd { + case "ignore": + ig = &lineIgnore{ + File: pos.Filename, + Line: pos.Line, + Checks: checks, + Pos: dir.DirectivePosition, + } + case "file-ignore": + ig = &fileIgnore{ + File: pos.Filename, + Checks: checks, + } + } + ignores = append(ignores, ig) + } + + return ignores, problems +} diff --git a/lint/lint.go b/lint/lint.go index 1a70e0c29..62533cffc 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -2,23 +2,21 @@ package lint // import "honnef.co/go/tools/lint" import ( - "bytes" - "encoding/gob" "fmt" - "go/scanner" "go/token" - "go/types" "path/filepath" + "regexp" "sort" + "strconv" "strings" - "sync" - "sync/atomic" "unicode" + "honnef.co/go/tools/config" + "honnef.co/go/tools/runner" + "honnef.co/go/tools/unused" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" - "honnef.co/go/tools/config" - "honnef.co/go/tools/internal/cache" ) type Documentation struct { @@ -55,11 +53,11 @@ func (doc *Documentation) String() string { return b.String() } -type Ignore interface { +type ignore interface { Match(p Problem) bool } -type LineIgnore struct { +type lineIgnore struct { File string Line int Checks []string @@ -67,13 +65,13 @@ type LineIgnore struct { Pos token.Position } -func (li *LineIgnore) Match(p Problem) bool { - pos := p.Pos +func (li *lineIgnore) Match(p Problem) bool { + pos := p.Position if pos.Filename != li.File || pos.Line != li.Line { return false } for _, c := range li.Checks { - if m, _ := filepath.Match(c, p.Check); m { + if m, _ := filepath.Match(c, p.Category); m { li.Matched = true return true } @@ -81,7 +79,7 @@ func (li *LineIgnore) Match(p Problem) bool { return false } -func (li *LineIgnore) String() string { +func (li *lineIgnore) String() string { matched := "not matched" if li.Matched { matched = "matched" @@ -89,17 +87,17 @@ func (li *LineIgnore) String() string { return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched) } -type FileIgnore struct { +type fileIgnore struct { File string Checks []string } -func (fi *FileIgnore) Match(p Problem) bool { - if p.Pos.Filename != fi.File { +func (fi *fileIgnore) Match(p Problem) bool { + if p.Position.Filename != fi.File { return false } for _, c := range fi.Checks { - if m, _ := filepath.Match(c, p.Check); m { + if m, _ := filepath.Match(c, p.Category); m { return true } } @@ -116,286 +114,266 @@ const ( // Problem represents a problem in some source code. type Problem struct { - Pos token.Position - End token.Position - Message string - Check string + runner.Diagnostic Severity Severity - Related []Related -} - -type Related struct { - Pos token.Position - End token.Position - Message string } -func (p Problem) Equal(o Problem) bool { - return p.Pos == o.Pos && +func (p Problem) equal(o Problem) bool { + return p.Position == o.Position && p.End == o.End && p.Message == o.Message && - p.Check == o.Check && + p.Category == o.Category && p.Severity == o.Severity } func (p *Problem) String() string { - return fmt.Sprintf("%s (%s)", p.Message, p.Check) + return fmt.Sprintf("%s (%s)", p.Message, p.Category) } // A Linter lints Go source code. type Linter struct { - Checkers []*analysis.Analyzer - CumulativeCheckers []CumulativeChecker - GoVersion int - Config config.Config - Stats Stats - RepeatAnalyzers uint + Checkers []*analysis.Analyzer + Config config.Config + Runner *runner.Runner } -type CumulativeChecker interface { - Analyzer() *analysis.Analyzer - Result() []types.Object - ProblemObject(*token.FileSet, types.Object) Problem -} +func failed(res runner.Result) []Problem { + var problems []Problem -func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error) { - var allAnalyzers []*analysis.Analyzer - allAnalyzers = append(allAnalyzers, l.Checkers...) - for _, cum := range l.CumulativeCheckers { - allAnalyzers = append(allAnalyzers, cum.Analyzer()) - } + for _, e := range res.Errors { + switch e := e.(type) { + case packages.Error: + msg := e.Msg + if len(msg) != 0 && msg[0] == '\n' { + // TODO(dh): See https://github.com/golang/go/issues/32363 + msg = msg[1:] + } - // The -checks command line flag overrules all configuration - // files, which means that for `-checks="foo"`, no check other - // than foo can ever be reported to the user. Make use of this - // fact to cull the list of analyses we need to run. - - // replace "inherit" with "all", as we don't want to base the - // list of all checks on the default configuration, which - // disables certain checks. - checks := make([]string, len(l.Config.Checks)) - copy(checks, l.Config.Checks) - for i, c := range checks { - if c == "inherit" { - checks[i] = "all" + var posn token.Position + if e.Pos == "" { + // Under certain conditions (malformed package + // declarations, multiple packages in the same + // directory), go list emits an error on stderr + // instead of JSON. Those errors do not have + // associated position information in + // go/packages.Error, even though the output on + // stderr may contain it. + if p, n, err := parsePos(msg); err == nil { + if abs, err := filepath.Abs(p.Filename); err == nil { + p.Filename = abs + } + posn = p + msg = msg[n+2:] + } + } else { + var err error + posn, _, err = parsePos(e.Pos) + if err != nil { + panic(fmt.Sprintf("internal error: %s", e)) + } + } + p := Problem{ + Diagnostic: runner.Diagnostic{ + Position: posn, + Message: msg, + Category: "compile", + }, + Severity: Error, + } + problems = append(problems, p) + case error: + p := Problem{ + Diagnostic: runner.Diagnostic{ + Position: token.Position{}, + Message: e.Error(), + Category: "compile", + }, + Severity: Error, + } + problems = append(problems, p) } } - allowed := FilterChecks(allAnalyzers, checks) - var allowedAnalyzers []*analysis.Analyzer - for _, c := range l.Checkers { - if allowed[c.Name] { - allowedAnalyzers = append(allowedAnalyzers, c) - } + return problems +} + +type unusedKey struct { + pkgPath string + base string + line int + name string +} + +type unusedPair struct { + key unusedKey + obj unused.SerializedObject +} + +func success(allowedChecks map[string]bool, res runner.Result) ([]Problem, unused.SerializedResult, error) { + diags, err := res.Diagnostics() + if err != nil { + return nil, unused.SerializedResult{}, err } - hasCumulative := false - for _, cum := range l.CumulativeCheckers { - a := cum.Analyzer() - if allowed[a.Name] { - hasCumulative = true - allowedAnalyzers = append(allowedAnalyzers, a) + + var problems []Problem + + for _, diag := range diags { + if !allowedChecks[diag.Category] { + continue } + problems = append(problems, Problem{Diagnostic: diag}) } - r, err := NewRunner(&l.Stats) - if err != nil { - return nil, err + u, err := res.Unused() + return problems, u, err +} + +func filterIgnored(problems []Problem, res runner.Result, allowedAnalyzers map[string]bool) ([]Problem, error) { + couldveMatched := func(ig *lineIgnore) bool { + for _, c := range ig.Checks { + if c == "U1000" { + // We never want to flag ignores for U1000, + // because U1000 isn't local to a single + // package. For example, an identifier may + // only be used by tests, in which case an + // ignore would only fire when not analyzing + // tests. To avoid spurious "useless ignore" + // warnings, just never flag U1000. + return false + } + + // Even though the runner always runs all analyzers, we + // still only flag unmatched ignores for the set of + // analyzers the user has expressed interest in. That way, + // `staticcheck -checks=SA1000` won't complain about an + // unmatched ignore for an unrelated check. + if allowedAnalyzers[c] { + return true + } + } + + return false } - r.goVersion = l.GoVersion - r.repeatAnalyzers = l.RepeatAnalyzers - pkgs, err := r.Run(cfg, patterns, allowedAnalyzers, hasCumulative) + dirs, err := res.Directives() if err != nil { return nil, err } - tpkgToPkg := map[*types.Package]*Package{} - for _, pkg := range pkgs { - tpkgToPkg[pkg.Types] = pkg - - for _, e := range pkg.errs { - switch e := e.(type) { - case types.Error: - p := Problem{ - Pos: e.Fset.PositionFor(e.Pos, false), - Message: e.Msg, - Severity: Error, - Check: "compile", - } - pkg.problems = append(pkg.problems, p) - case packages.Error: - msg := e.Msg - if len(msg) != 0 && msg[0] == '\n' { - // TODO(dh): See https://github.com/golang/go/issues/32363 - msg = msg[1:] - } + ignores, moreProblems := parseDirectives(dirs) - var pos token.Position - if e.Pos == "" { - // Under certain conditions (malformed package - // declarations, multiple packages in the same - // directory), go list emits an error on stderr - // instead of JSON. Those errors do not have - // associated position information in - // go/packages.Error, even though the output on - // stderr may contain it. - if p, n, err := parsePos(msg); err == nil { - if abs, err := filepath.Abs(p.Filename); err == nil { - p.Filename = abs - } - pos = p - msg = msg[n+2:] - } - } else { - var err error - pos, _, err = parsePos(e.Pos) - if err != nil { - panic(fmt.Sprintf("internal error: %s", e)) - } - } - p := Problem{ - Pos: pos, - Message: msg, - Severity: Error, - Check: "compile", - } - pkg.problems = append(pkg.problems, p) - case scanner.ErrorList: - for _, e := range e { - p := Problem{ - Pos: e.Pos, - Message: e.Msg, - Severity: Error, - Check: "compile", - } - pkg.problems = append(pkg.problems, p) - } - case error: - p := Problem{ - Pos: token.Position{}, - Message: e.Error(), - Severity: Error, - Check: "compile", - } - pkg.problems = append(pkg.problems, p) + for _, ig := range ignores { + for i := range problems { + p := &problems[i] + if ig.Match(*p) { + p.Severity = Ignored } } - } - atomic.StoreUint32(&r.stats.State, StateCumulative) - for _, cum := range l.CumulativeCheckers { - for _, res := range cum.Result() { - pkg := tpkgToPkg[res.Pkg()] - if pkg == nil { - panic(fmt.Sprintf("analyzer %s flagged object %s in package %s, a package that we aren't tracking", cum.Analyzer(), res, res.Pkg())) - } - allowedChecks := FilterChecks(allowedAnalyzers, pkg.cfg.Merge(l.Config).Checks) - if allowedChecks[cum.Analyzer().Name] { - pos := DisplayPosition(pkg.Fset, res.Pos()) - // FIXME(dh): why are we ignoring generated files - // here? Surely this is specific to 'unused', not all - // cumulative checkers - if _, ok := pkg.gen[pos.Filename]; ok { - continue - } - p := cum.ProblemObject(pkg.Fset, res) - pkg.problems = append(pkg.problems, p) + if ig, ok := ig.(*lineIgnore); ok && !ig.Matched && couldveMatched(ig) { + p := Problem{ + Diagnostic: runner.Diagnostic{ + Position: ig.Pos, + Message: "this linter directive didn't match anything; should it be removed?", + Category: "", + }, } + moreProblems = append(moreProblems, p) } } - for _, pkg := range pkgs { - if !pkg.fromSource { - // Don't cache packages that we loaded from the cache - continue - } - cpkg := cachedPackage{ - Problems: pkg.problems, - Ignores: pkg.ignores, - Config: pkg.cfg, - } - buf := &bytes.Buffer{} - if err := gob.NewEncoder(buf).Encode(cpkg); err != nil { - return nil, err - } - id := cache.Subkey(pkg.actionID, "data "+r.problemsCacheKey) - if err := r.cache.PutBytes(id, buf.Bytes()); err != nil { - return nil, err - } + return append(problems, moreProblems...), nil +} + +func NewLinter(cfg config.Config) (*Linter, error) { + r, err := runner.New(cfg) + if err != nil { + return nil, err + } + return &Linter{ + Config: cfg, + Runner: r, + }, nil +} + +func (l *Linter) SetGoVersion(n int) { + l.Runner.GoVersion = n +} + +func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error) { + results, err := l.Runner.Run(cfg, l.Checkers, patterns) + if err != nil { + return nil, err + } + + analyzerNames := make([]string, len(l.Checkers)) + for i, a := range l.Checkers { + analyzerNames[i] = a.Name } var problems []Problem - // Deduplicate line ignores. When U1000 processes a package and - // its test variant, it will only emit a single problem for an - // unused object, not two problems. We will, however, have two - // line ignores, one per package. Without deduplication, one line - // ignore will be marked as matched, while the other one won't, - // subsequently reporting a "this linter directive didn't match - // anything" error. - ignores := map[token.Position]Ignore{} - for _, pkg := range pkgs { - for _, ig := range pkg.ignores { - if lig, ok := ig.(*LineIgnore); ok { - ig = ignores[lig.Pos] - if ig == nil { - ignores[lig.Pos] = lig - ig = lig - } - } - for i := range pkg.problems { - p := &pkg.problems[i] - if ig.Match(*p) { - p.Severity = Ignored - } - } + used := map[unusedKey]bool{} + var unuseds []unusedPair + for _, res := range results { + if len(res.Errors) > 0 && !res.Failed { + panic("package has errors but isn't marked as failed") } - - if pkg.cfg == nil { - // The package failed to load, otherwise we would have a - // valid config. Pass through all errors. - problems = append(problems, pkg.problems...) + if res.Failed { + problems = append(problems, failed(res)...) } else { - for _, p := range pkg.problems { - allowedChecks := FilterChecks(allowedAnalyzers, pkg.cfg.Merge(l.Config).Checks) - allowedChecks["compile"] = true - if allowedChecks[p.Check] { - problems = append(problems, p) - } + allowedAnalyzers := FilterAnalyzerNames(analyzerNames, res.Config.Checks) + ps, u, err := success(allowedAnalyzers, res) + if err != nil { + return nil, err } - } - - for _, ig := range pkg.ignores { - ig, ok := ig.(*LineIgnore) - if !ok { - continue + filtered, err := filterIgnored(ps, res, allowedAnalyzers) + if err != nil { + return nil, err } - ig = ignores[ig.Pos].(*LineIgnore) - if ig.Matched { - continue + problems = append(problems, filtered...) + + for _, obj := range u.Used { + // FIXME(dh): pick the object whose filename does not include $GOROOT + key := unusedKey{ + pkgPath: res.Package.PkgPath, + base: filepath.Base(obj.Position.Filename), + line: obj.Position.Line, + name: obj.Name, + } + used[key] = true } - couldveMatched := false - allowedChecks := FilterChecks(allowedAnalyzers, pkg.cfg.Merge(l.Config).Checks) - for _, c := range ig.Checks { - if !allowedChecks[c] { - continue + if allowedAnalyzers["U1000"] { + for _, obj := range u.Unused { + key := unusedKey{ + pkgPath: res.Package.PkgPath, + base: filepath.Base(obj.Position.Filename), + line: obj.Position.Line, + name: obj.Name, + } + unuseds = append(unuseds, unusedPair{key, obj}) + if _, ok := used[key]; !ok { + used[key] = false + } } - couldveMatched = true - break } + } + } - if !couldveMatched { - // The ignored checks were disabled for the containing package. - // Don't flag the ignore for not having matched. - continue - } - p := Problem{ - Pos: ig.Pos, - Message: "this linter directive didn't match anything; should it be removed?", - Check: "", - } - problems = append(problems, p) + for _, uo := range unuseds { + if used[uo.key] { + continue + } + if uo.obj.InGenerated { + continue } + problems = append(problems, Problem{ + Diagnostic: runner.Diagnostic{ + Position: uo.obj.DisplayPosition, + Message: fmt.Sprintf("%s %s is unused", uo.obj.Kind, uo.obj.Name), + Category: "U1000", + }, + }) } if len(problems) == 0 { @@ -403,8 +381,8 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error } sort.Slice(problems, func(i, j int) bool { - pi := problems[i].Pos - pj := problems[j].Pos + pi := problems[i].Position + pj := problems[j].Position if pi.Filename != pj.Filename { return pi.Filename < pj.Filename @@ -424,15 +402,14 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error for i, p := range problems[1:] { // We may encounter duplicate problems because one file // can be part of many packages. - if !problems[i].Equal(p) { + if !problems[i].equal(p) { out = append(out, p) } } return out, nil } -func FilterChecks(allChecks []*analysis.Analyzer, checks []string) map[string]bool { - // OPT(dh): this entire computation could be cached per package +func FilterAnalyzerNames(analyzers []string, checks []string) map[string]bool { allowedChecks := map[string]bool{} for _, check := range checks { @@ -443,26 +420,26 @@ func FilterChecks(allChecks []*analysis.Analyzer, checks []string) map[string]bo } if check == "*" || check == "all" { // Match all - for _, c := range allChecks { - allowedChecks[c.Name] = b + for _, c := range analyzers { + allowedChecks[c] = b } } else if strings.HasSuffix(check, "*") { // Glob prefix := check[:len(check)-1] isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 - for _, c := range allChecks { - idx := strings.IndexFunc(c.Name, func(r rune) bool { return unicode.IsNumber(r) }) + for _, a := range analyzers { + idx := strings.IndexFunc(a, func(r rune) bool { return unicode.IsNumber(r) }) if isCat { // Glob is S*, which should match S1000 but not SA1000 - cat := c.Name[:idx] + cat := a[:idx] if prefix == cat { - allowedChecks[c.Name] = b + allowedChecks[a] = b } } else { // Glob is S1* - if strings.HasPrefix(c.Name, prefix) { - allowedChecks[c.Name] = b + if strings.HasPrefix(a, prefix) { + allowedChecks[a] = b } } } @@ -474,66 +451,22 @@ func FilterChecks(allChecks []*analysis.Analyzer, checks []string) map[string]bo return allowedChecks } -func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { - if p == token.NoPos { - return token.Position{} - } - - // Only use the adjusted position if it points to another Go file. - // This means we'll point to the original file for cgo files, but - // we won't point to a YACC grammar file. - pos := fset.PositionFor(p, false) - adjPos := fset.PositionFor(p, true) - - if filepath.Ext(adjPos.Filename) == ".go" { - return adjPos - } - return pos -} - -var bufferPool = &sync.Pool{ - New: func() interface{} { - buf := bytes.NewBuffer(nil) - buf.Grow(64) - return buf - }, -} - -func FuncName(f *types.Func) string { - buf := bufferPool.Get().(*bytes.Buffer) - buf.Reset() - if f.Type() != nil { - sig := f.Type().(*types.Signature) - if recv := sig.Recv(); recv != nil { - buf.WriteByte('(') - if _, ok := recv.Type().(*types.Interface); ok { - // gcimporter creates abstract methods of - // named interfaces using the interface type - // (not the named type) as the receiver. - // Don't print it in full. - buf.WriteString("interface") - } else { - types.WriteType(buf, recv.Type(), nil) - } - buf.WriteByte(')') - buf.WriteByte('.') - } else if f.Pkg() != nil { - writePackage(buf, f.Pkg()) - } - } - buf.WriteString(f.Name()) - s := buf.String() - bufferPool.Put(buf) - return s -} +var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?`) -func writePackage(buf *bytes.Buffer, pkg *types.Package) { - if pkg == nil { - return +func parsePos(pos string) (token.Position, int, error) { + if pos == "-" || pos == "" { + return token.Position{}, 0, nil } - s := pkg.Path() - if s != "" { - buf.WriteString(s) - buf.WriteByte('.') + parts := posRe.FindStringSubmatch(pos) + if parts == nil { + return token.Position{}, 0, fmt.Errorf("malformed position %q", pos) } + file := parts[1] + line, _ := strconv.Atoi(parts[2]) + col, _ := strconv.Atoi(parts[3]) + return token.Position{ + Filename: file, + Line: line, + Column: col, + }, len(parts[0]), nil } diff --git a/lint/lint_test.go b/lint/lint_test.go index 07e7623cd..e9bf7ad6d 100644 --- a/lint/lint_test.go +++ b/lint/lint_test.go @@ -10,6 +10,8 @@ import ( "testing" "golang.org/x/tools/go/packages" + "honnef.co/go/tools/config" + "honnef.co/go/tools/runner" ) func testdata() string { @@ -21,7 +23,10 @@ func testdata() string { } func lintPackage(t *testing.T, name string) []Problem { - l := Linter{} + l, err := NewLinter(config.Config{}) + if err != nil { + t.Fatal(err) + } cfg := &packages.Config{ Env: append(os.Environ(), "GOPATH="+testdata(), "GO111MODULE=off"), } @@ -48,7 +53,7 @@ func TestErrors(t *testing.T) { if want := "expected 'package', found pckage"; ps[0].Message != want { t.Errorf("got message %q, want %q", ps[0].Message, want) } - if ps[0].Pos.Filename == "" { + if ps[0].Position.Filename == "" { t.Errorf("didn't get useful position") } }) @@ -61,19 +66,21 @@ func TestErrors(t *testing.T) { if len(ps) != 1 { t.Fatalf("got %d problems, want 1", len(ps)) } - trimPosition(&ps[0].Pos) + trimPosition(&ps[0].Position) want := Problem{ - Pos: token.Position{ - Filename: "broken_typeerror/pkg.go", - Offset: 42, - Line: 5, - Column: 10, + Diagnostic: runner.Diagnostic{ + Position: token.Position{ + Filename: "broken_typeerror/pkg.go", + Offset: 0, + Line: 5, + Column: 10, + }, + Message: "cannot convert \"\" (untyped string constant) to int", + Category: "compile", }, - Message: "cannot convert \"\" (untyped string constant) to int", - Check: "compile", Severity: 0, } - if !ps[0].Equal(want) { + if !ps[0].equal(want) { t.Errorf("got %#v, want %#v", ps[0], want) } }) @@ -91,19 +98,21 @@ func TestErrors(t *testing.T) { t.Fatalf("got %d problems, want 1", len(ps)) } - trimPosition(&ps[0].Pos) + trimPosition(&ps[0].Position) want := Problem{ - Pos: token.Position{ - Filename: "broken_parse/pkg.go", - Offset: 13, - Line: 3, - Column: 1, + Diagnostic: runner.Diagnostic{ + Position: token.Position{ + Filename: "broken_parse/pkg.go", + Offset: 0, + Line: 3, + Column: 1, + }, + Message: "expected declaration, found asd", + Category: "compile", }, - Message: "expected declaration, found asd", - Check: "compile", Severity: 0, } - if !ps[0].Equal(want) { + if !ps[0].equal(want) { t.Errorf("got %#v, want %#v", ps[0], want) } }) diff --git a/lint/lintutil/format/format.go b/lint/lintutil/format/format.go index b28f8885b..ef75a75d4 100644 --- a/lint/lintutil/format/format.go +++ b/lint/lintutil/format/format.go @@ -51,9 +51,9 @@ type Text struct { } func (o Text) Format(p lint.Problem) { - fmt.Fprintf(o.W, "%s: %s\n", relativePositionString(p.Pos), p.String()) + fmt.Fprintf(o.W, "%s: %s\n", relativePositionString(p.Position), p.String()) for _, r := range p.Related { - fmt.Fprintf(o.W, "\t%s: %s\n", relativePositionString(r.Pos), r.Message) + fmt.Fprintf(o.W, "\t%s: %s\n", relativePositionString(r.Position), r.Message) } } @@ -92,12 +92,12 @@ func (o JSON) Format(p lint.Problem) { Message string `json:"message"` Related []related `json:"related,omitempty"` }{ - Code: p.Check, + Code: p.Category, Severity: severity(p.Severity), Location: location{ - File: p.Pos.Filename, - Line: p.Pos.Line, - Column: p.Pos.Column, + File: p.Position.Filename, + Line: p.Position.Line, + Column: p.Position.Column, }, End: location{ File: p.End.Filename, @@ -109,9 +109,9 @@ func (o JSON) Format(p lint.Problem) { for _, r := range p.Related { jp.Related = append(jp.Related, related{ Location: location{ - File: r.Pos.Filename, - Line: r.Pos.Line, - Column: r.Pos.Column, + File: r.Position.Filename, + Line: r.Position.Line, + Column: r.Position.Column, }, End: location{ File: r.End.Filename, @@ -132,7 +132,7 @@ type Stylish struct { } func (o *Stylish) Format(p lint.Problem) { - pos := p.Pos + pos := p.Position if pos.Filename == "" { pos.Filename = "-" } @@ -146,9 +146,9 @@ func (o *Stylish) Format(p lint.Problem) { o.prevFile = pos.Filename o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0) } - fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", pos.Line, pos.Column, p.Check, p.Message) + fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", pos.Line, pos.Column, p.Category, p.Message) for _, r := range p.Related { - fmt.Fprintf(o.tw, " (%d, %d)\t\t %s\n", r.Pos.Line, r.Pos.Column, r.Message) + fmt.Fprintf(o.tw, " (%d, %d)\t\t %s\n", r.Position.Line, r.Position.Column, r.Message) } } diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 278cd267b..bcf513b7a 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -24,13 +24,14 @@ import ( "strconv" "strings" "sync" - "sync/atomic" "time" "honnef.co/go/tools/config" "honnef.co/go/tools/internal/cache" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil/format" + "honnef.co/go/tools/loader" + "honnef.co/go/tools/runner" "honnef.co/go/tools/version" "golang.org/x/tools/go/analysis" @@ -38,7 +39,7 @@ import ( "golang.org/x/tools/go/packages" ) -func NewVersionFlag() flag.Getter { +func newVersionFlag() flag.Getter { tags := build.Default.ReleaseTags v := tags[len(tags)-1][2:] version := new(VersionFlag) @@ -52,7 +53,6 @@ type VersionFlag int func (v *VersionFlag) String() string { return fmt.Sprintf("1.%d", *v) - } func (v *VersionFlag) Set(s string) error { @@ -117,7 +117,6 @@ func FlagSet(name string) *flag.FlagSet { flags.Bool("debug.version", false, "Print detailed version information about this program") flags.Bool("debug.no-compile-errors", false, "Don't print compile errors") flags.String("debug.measure-analyzers", "", "Write analysis measurements to `file`. `file` will be opened for appending if it already exists.") - flags.Uint("debug.repeat-analyzers", 0, "Run analyzers `num` times") checks := list{"inherit"} fail := list{"all"} @@ -144,7 +143,7 @@ func findCheck(cs []*analysis.Analyzer, check string) (*analysis.Analyzer, bool) return nil, false } -func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *flag.FlagSet) { +func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int) @@ -157,9 +156,8 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool) debugNoCompile := fs.Lookup("debug.no-compile-errors").Value.(flag.Getter).Get().(bool) - debugRepeat := fs.Lookup("debug.repeat-analyzers").Value.(flag.Getter).Get().(uint) - var measureAnalyzers func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration) + var measureAnalyzers func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) if path := fs.Lookup("debug.measure-analyzers").Value.(flag.Getter).Get().(string); path != "" { f, err := os.OpenFile(path, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600) if err != nil { @@ -167,10 +165,11 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * } mu := &sync.Mutex{} - measureAnalyzers = func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration) { + measureAnalyzers = func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) { mu.Lock() defer mu.Unlock() - if _, err := fmt.Fprintf(f, "%s\t%s\t%d\n", analysis.Name, pkg.ID, d.Nanoseconds()); err != nil { + // FIXME(dh): print pkg.ID + if _, err := fmt.Fprintf(f, "%s\t%s\t%d\n", analysis.Name, pkg, d.Nanoseconds()); err != nil { log.Println("error writing analysis measurements:", err) } } @@ -223,9 +222,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * if explain != "" { var haystack []*analysis.Analyzer haystack = append(haystack, cs...) - for _, cum := range cums { - haystack = append(haystack, cum.Analyzer()) - } check, ok := findCheck(haystack, explain) if !ok { fmt.Fprintln(os.Stderr, "Couldn't find check", explain) @@ -252,13 +248,12 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * exit(2) } - ps, err := Lint(cs, cums, fs.Args(), &Options{ + ps, err := doLint(cs, fs.Args(), &Options{ Tags: tags, LintTests: tests, GoVersion: goVersion, Config: cfg, PrintAnalyzerMeasurement: measureAnalyzers, - RepeatAnalyzers: debugRepeat, }) if err != nil { fmt.Fprintln(os.Stderr, err) @@ -273,24 +268,23 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * ) fail := *fs.Lookup("fail").Value.(*list) - analyzers := make([]*analysis.Analyzer, len(cs), len(cs)+len(cums)) - copy(analyzers, cs) - for _, cum := range cums { - analyzers = append(analyzers, cum.Analyzer()) + analyzerNames := make([]string, len(cs)) + for i, a := range cs { + analyzerNames[i] = a.Name } - shouldExit := lint.FilterChecks(analyzers, fail) + shouldExit := lint.FilterAnalyzerNames(analyzerNames, fail) shouldExit["compile"] = true total = len(ps) for _, p := range ps { - if p.Check == "compile" && debugNoCompile { + if p.Category == "compile" && debugNoCompile { continue } if p.Severity == lint.Ignored && !showIgnored { ignored++ continue } - if shouldExit[p.Check] { + if shouldExit[p.Category] { errors++ } else { p.Severity = lint.Warning @@ -313,8 +307,7 @@ type Options struct { Tags string LintTests bool GoVersion int - PrintAnalyzerMeasurement func(analysis *analysis.Analyzer, pkg *lint.Package, d time.Duration) - RepeatAnalyzers uint + PrintAnalyzerMeasurement func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) } func computeSalt() ([]byte, error) { @@ -337,7 +330,7 @@ func computeSalt() ([]byte, error) { return h.Sum(nil), nil } -func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string, opt *Options) ([]lint.Problem, error) { +func doLint(cs []*analysis.Analyzer, paths []string, opt *Options) ([]lint.Problem, error) { salt, err := computeSalt() if err != nil { return nil, fmt.Errorf("could not compute salt for cache: %s", err) @@ -348,14 +341,14 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string opt = &Options{} } - l := &lint.Linter{ - Checkers: cs, - CumulativeCheckers: cums, - GoVersion: opt.GoVersion, - Config: opt.Config, - RepeatAnalyzers: opt.RepeatAnalyzers, + l, err := lint.NewLinter(opt.Config) + if err != nil { + return nil, err } - l.Stats.PrintAnalyzerMeasurement = opt.PrintAnalyzerMeasurement + l.Checkers = cs + l.SetGoVersion(opt.GoVersion) + l.Runner.Stats.PrintAnalyzerMeasurement = opt.PrintAnalyzerMeasurement + cfg := &packages.Config{} if opt.LintTests { cfg.Tests = true @@ -368,23 +361,24 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string // Individual stats are read atomically, but overall there // is no synchronisation. For printing rough progress // information, this doesn't matter. - switch atomic.LoadUint32(&l.Stats.State) { - case lint.StateInitializing: + switch l.Runner.Stats.State() { + case runner.StateInitializing: fmt.Fprintln(os.Stderr, "Status: initializing") - case lint.StateGraph: + case runner.StateLoadPackageGraph: fmt.Fprintln(os.Stderr, "Status: loading package graph") - case lint.StateProcessing: - fmt.Fprintf(os.Stderr, "Packages: %d/%d initial, %d/%d total; Workers: %d/%d; Problems: %d\n", - atomic.LoadUint32(&l.Stats.ProcessedInitialPackages), - atomic.LoadUint32(&l.Stats.InitialPackages), - atomic.LoadUint32(&l.Stats.ProcessedPackages), - atomic.LoadUint32(&l.Stats.TotalPackages), - atomic.LoadUint32(&l.Stats.ActiveWorkers), - atomic.LoadUint32(&l.Stats.TotalWorkers), - atomic.LoadUint32(&l.Stats.Problems), + case runner.StateBuildActionGraph: + fmt.Fprintln(os.Stderr, "Status: building action graph") + case runner.StateProcessing: + fmt.Fprintf(os.Stderr, "Packages: %d/%d initial, %d/%d total; Workers: %d/%d\n", + l.Runner.Stats.ProcessedInitialPackages(), + l.Runner.Stats.InitialPackages(), + l.Runner.Stats.ProcessedPackages(), + l.Runner.Stats.TotalPackages(), + l.Runner.ActiveWorkers(), + l.Runner.TotalWorkers(), ) - case lint.StateCumulative: - fmt.Fprintln(os.Stderr, "Status: processing cumulative checkers") + case runner.StateFinalizing: + fmt.Fprintln(os.Stderr, "Status: finalizing") } } if len(infoSignals) > 0 { @@ -397,7 +391,6 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string } }() } - ps, err := l.Lint(cfg, paths) return ps, err } @@ -436,7 +429,7 @@ func InitializeAnalyzers(docs map[string]*lint.Documentation, analyzers map[stri vc.Doc = doc.String() if vc.Flags.Usage == nil { fs := flag.NewFlagSet("", flag.PanicOnError) - fs.Var(NewVersionFlag(), "go", "Target Go version") + fs.Var(newVersionFlag(), "go", "Target Go version") vc.Flags = *fs } } diff --git a/lint/runner.go b/lint/runner.go deleted file mode 100644 index 74106ced8..000000000 --- a/lint/runner.go +++ /dev/null @@ -1,1114 +0,0 @@ -package lint - -/* -Package loading - -Conceptually, package loading in the runner can be imagined as a -graph-shaped work list. We iteratively pop off leaf nodes (packages -that have no unloaded dependencies) and load data from export data, -our cache, or source. - -Specifically, non-initial packages are loaded from export data and the -fact cache if possible, otherwise from source. Initial packages are -loaded from export data, the fact cache and the (problems, ignores, -config) cache if possible, otherwise from source. - -The appeal of this approach is that it is both simple to implement and -easily parallelizable. Each leaf node can be processed independently, -and new leaf nodes appear as their dependencies are being processed. - -The downside of this approach, however, is that we're doing more work -than necessary. Imagine an initial package A, which has the following -dependency chain: A->B->C->D – in the current implementation, we will -load all 4 packages. However, if package A can be loaded fully from -cached information, then none of its dependencies are necessary, and -we could avoid loading them. - - -Parallelism - -Runner implements parallel processing of packages by spawning one -goroutine per package in the dependency graph, without any semaphores. -Each goroutine initially waits on the completion of all of its -dependencies, thus establishing correct order of processing. Once all -dependencies finish processing, the goroutine will load the package -from export data or source – this loading is guarded by a semaphore, -sized according to the number of CPU cores. This way, we only have as -many packages occupying memory and CPU resources as there are actual -cores to process them. - -This combination of unbounded goroutines but bounded package loading -means that if we have many parallel, independent subgraphs, they will -all execute in parallel, while not wasting resources for long linear -chains or trying to process more subgraphs in parallel than the system -can handle. - - -Caching - -We make use of several caches. These caches are Go's export data, our -facts cache, and our (problems, ignores, config) cache. - -Initial packages will either be loaded from a combination of all three -caches, or from source. Non-initial packages will either be loaded -from a combination of export data and facts cache, or from source. - -The facts cache is separate from the (problems, ignores, config) cache -because when we process non-initial packages, we generate facts, but -we discard problems and ignores. - -The facts cache is keyed by (package, analyzer), whereas the -(problems, ignores, config) cache is keyed by (package, list of -analyzes). The difference between the two exists because there are -only a handful of analyses that produce facts, but hundreds of -analyses that don't. Creating one cache entry per fact-generating -analysis is feasible, creating one cache entry per normal analysis has -significant performance and storage overheads. - -The downside of keying by the list of analyzes is, naturally, that a -change in list of analyzes changes the cache key. `staticcheck -checks -A` and `staticcheck -checks A,B` will therefore need their own cache -entries and not reuse each other's work. This problem does not affect -the facts cache. - -*/ - -import ( - "bytes" - "encoding/gob" - "encoding/hex" - "fmt" - "go/ast" - "go/token" - "go/types" - "reflect" - "regexp" - "runtime" - "sort" - "strconv" - "strings" - "sync" - "sync/atomic" - "time" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/types/objectpath" - "honnef.co/go/tools/config" - "honnef.co/go/tools/facts" - "honnef.co/go/tools/internal/cache" - "honnef.co/go/tools/loader" -) - -func init() { - gob.Register(&FileIgnore{}) - gob.Register(&LineIgnore{}) -} - -// If enabled, abuse of the go/analysis API will lead to panics -const sanityCheck = true - -// OPT(dh): for a dependency tree A->B->C->D, if we have cached data -// for B, there should be no need to load C and D individually. Go's -// export data for B contains all the data we need on types, and our -// fact cache could store the union of B, C and D in B. -// -// This may change unused's behavior, however, as it may observe fewer -// interfaces from transitive dependencies. - -// OPT(dh): every single package will have the same value for -// canClearTypes. We could move the Package.decUse method to runner to -// eliminate this field. This is probably not worth it, though. There -// are only thousands of packages, so the field only takes up -// kilobytes of memory. - -// OPT(dh): do we really need the Package.gen field? it's based -// trivially on pkg.results and merely caches the result of a type -// assertion. How often do we actually use the field? - -type Package struct { - // dependents is initially set to 1 plus the number of packages - // that directly import this package. It is atomically decreased - // by 1 every time a dependent has been processed or when the - // package itself has been processed. Once the value reaches zero, - // the package is no longer needed. - dependents uint64 - - *packages.Package - Imports []*Package - initial bool - // fromSource is set to true for packages that have been loaded - // from source. This is the case for initial packages, packages - // with missing export data, and packages with no cached facts. - fromSource bool - // hash stores the package hash, as computed by packageHash - hash string - actionID cache.ActionID - done chan struct{} - - resultsMu sync.Mutex - // results maps analyzer IDs to analyzer results. it is - // implemented as a deduplicating concurrent cache. - results []*result - - cfg *config.Config - // gen maps file names to the code generator that created them - gen map[string]facts.Generator - problems []Problem - ignores []Ignore - errs []error - - // these slices are indexed by analysis - facts []map[types.Object][]analysis.Fact - pkgFacts [][]analysis.Fact - - // canClearTypes is set to true if we can discard type - // information after the package and its dependents have been - // processed. This is the case when no cumulative checkers are - // being run. - canClearTypes bool -} - -type cachedPackage struct { - Problems []Problem - Ignores []Ignore - Config *config.Config -} - -func (pkg *Package) decUse() { - ret := atomic.AddUint64(&pkg.dependents, ^uint64(0)) - if ret == 0 { - // nobody depends on this package anymore - if pkg.canClearTypes { - pkg.Types = nil - } - pkg.facts = nil - pkg.pkgFacts = nil - - for _, imp := range pkg.Imports { - imp.decUse() - } - } -} - -type result struct { - v interface{} - err error - ready chan struct{} -} - -type Runner struct { - cache *cache.Cache - goVersion int - stats *Stats - repeatAnalyzers uint - - analyzerIDs analyzerIDs - problemsCacheKey string - - // limits parallelism of loading packages - loadSem chan struct{} -} - -type analyzerIDs struct { - m map[*analysis.Analyzer]int -} - -func (ids analyzerIDs) get(a *analysis.Analyzer) int { - id, ok := ids.m[a] - if !ok { - panic(fmt.Sprintf("no analyzer ID for %s", a.Name)) - } - return id -} - -type Fact struct { - Path string - Fact analysis.Fact -} - -type analysisAction struct { - analyzer *analysis.Analyzer - analyzerID int - pkg *Package - newPackageFacts []analysis.Fact - problems []Problem - - pkgFacts map[*types.Package][]analysis.Fact -} - -func (ac *analysisAction) String() string { - return fmt.Sprintf("%s @ %s", ac.analyzer, ac.pkg) -} - -func (ac *analysisAction) allObjectFacts() []analysis.ObjectFact { - out := make([]analysis.ObjectFact, 0, len(ac.pkg.facts[ac.analyzerID])) - for obj, facts := range ac.pkg.facts[ac.analyzerID] { - for _, fact := range facts { - out = append(out, analysis.ObjectFact{ - Object: obj, - Fact: fact, - }) - } - } - return out -} - -func (ac *analysisAction) allPackageFacts() []analysis.PackageFact { - out := make([]analysis.PackageFact, 0, len(ac.pkgFacts)) - for pkg, facts := range ac.pkgFacts { - for _, fact := range facts { - out = append(out, analysis.PackageFact{ - Package: pkg, - Fact: fact, - }) - } - } - return out -} - -func (ac *analysisAction) importObjectFact(obj types.Object, fact analysis.Fact) bool { - if sanityCheck && len(ac.analyzer.FactTypes) == 0 { - panic("analysis doesn't export any facts") - } - for _, f := range ac.pkg.facts[ac.analyzerID][obj] { - if reflect.TypeOf(f) == reflect.TypeOf(fact) { - reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) - return true - } - } - return false -} - -func (ac *analysisAction) importPackageFact(pkg *types.Package, fact analysis.Fact) bool { - if sanityCheck && len(ac.analyzer.FactTypes) == 0 { - panic("analysis doesn't export any facts") - } - for _, f := range ac.pkgFacts[pkg] { - if reflect.TypeOf(f) == reflect.TypeOf(fact) { - reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) - return true - } - } - return false -} - -func (ac *analysisAction) exportObjectFact(obj types.Object, fact analysis.Fact) { - if sanityCheck && len(ac.analyzer.FactTypes) == 0 { - panic("analysis doesn't export any facts") - } - ac.pkg.facts[ac.analyzerID][obj] = append(ac.pkg.facts[ac.analyzerID][obj], fact) -} - -func (ac *analysisAction) exportPackageFact(fact analysis.Fact) { - if sanityCheck && len(ac.analyzer.FactTypes) == 0 { - panic("analysis doesn't export any facts") - } - ac.pkgFacts[ac.pkg.Types] = append(ac.pkgFacts[ac.pkg.Types], fact) - ac.newPackageFacts = append(ac.newPackageFacts, fact) -} - -func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) { - p := Problem{ - Pos: DisplayPosition(pass.Fset, d.Pos), - End: DisplayPosition(pass.Fset, d.End), - Message: d.Message, - Check: pass.Analyzer.Name, - } - for _, r := range d.Related { - p.Related = append(p.Related, Related{ - Pos: DisplayPosition(pass.Fset, r.Pos), - End: DisplayPosition(pass.Fset, r.End), - Message: r.Message, - }) - } - ac.problems = append(ac.problems, p) -} - -func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) { - ac.pkg.resultsMu.Lock() - res := ac.pkg.results[r.analyzerIDs.get(ac.analyzer)] - if res != nil { - ac.pkg.resultsMu.Unlock() - <-res.ready - return res.v, res.err - } else { - res = &result{ - ready: make(chan struct{}), - } - ac.pkg.results[r.analyzerIDs.get(ac.analyzer)] = res - ac.pkg.resultsMu.Unlock() - - defer func() { - res.v = ret - res.err = err - close(res.ready) - }() - - pass := new(analysis.Pass) - *pass = analysis.Pass{ - Analyzer: ac.analyzer, - Fset: ac.pkg.Fset, - Files: ac.pkg.Syntax, - // type information may be nil or may be populated. if it is - // nil, it will get populated later. - Pkg: ac.pkg.Types, - TypesInfo: ac.pkg.TypesInfo, - TypesSizes: ac.pkg.TypesSizes, - ResultOf: map[*analysis.Analyzer]interface{}{}, - ImportObjectFact: ac.importObjectFact, - ImportPackageFact: ac.importPackageFact, - ExportObjectFact: ac.exportObjectFact, - ExportPackageFact: ac.exportPackageFact, - Report: func(d analysis.Diagnostic) { - ac.report(pass, d) - }, - AllObjectFacts: ac.allObjectFacts, - AllPackageFacts: ac.allPackageFacts, - } - - if !ac.pkg.initial { - // Don't report problems in dependencies - pass.Report = func(analysis.Diagnostic) {} - } - return r.runAnalysisUser(pass, ac) - } -} - -func (r *Runner) loadCachedPackage(pkg *Package, analyzers []*analysis.Analyzer) (cachedPackage, bool) { - // OPT(dh): we can cache this computation, it'll be the same for all packages - id := cache.Subkey(pkg.actionID, "data "+r.problemsCacheKey) - - b, _, err := r.cache.GetBytes(id) - if err != nil { - return cachedPackage{}, false - } - var cpkg cachedPackage - if err := gob.NewDecoder(bytes.NewReader(b)).Decode(&cpkg); err != nil { - return cachedPackage{}, false - } - return cpkg, true -} - -func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bool) { - if len(a.FactTypes) == 0 { - return nil, true - } - - var facts []Fact - // Look in the cache for facts - aID := passActionID(pkg, a) - aID = cache.Subkey(aID, "facts") - b, _, err := r.cache.GetBytes(aID) - if err != nil { - // No cached facts, analyse this package like a user-provided one, but ignore diagnostics - return nil, false - } - - if err := gob.NewDecoder(bytes.NewReader(b)).Decode(&facts); err != nil { - // Cached facts are broken, analyse this package like a user-provided one, but ignore diagnostics - return nil, false - } - return facts, true -} - -type dependencyError struct { - dep string - err error -} - -func (err dependencyError) nested() dependencyError { - if o, ok := err.err.(dependencyError); ok { - return o.nested() - } - return err -} - -func (err dependencyError) Error() string { - if o, ok := err.err.(dependencyError); ok { - return o.Error() - } - return fmt.Sprintf("error running dependency %s: %s", err.dep, err.err) -} - -func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysisAction { - aid := r.analyzerIDs.get(a) - ac := &analysisAction{ - analyzer: a, - analyzerID: aid, - pkg: pkg, - } - - if len(a.FactTypes) == 0 { - return ac - } - - // Merge all package facts of dependencies - ac.pkgFacts = map[*types.Package][]analysis.Fact{} - seen := map[*Package]struct{}{} - var dfs func(*Package) - dfs = func(pkg *Package) { - if _, ok := seen[pkg]; ok { - return - } - seen[pkg] = struct{}{} - s := pkg.pkgFacts[aid] - ac.pkgFacts[pkg.Types] = s[0:len(s):len(s)] - for _, imp := range pkg.Imports { - dfs(imp) - } - } - dfs(pkg) - - return ac -} - -// analyzes that we always want to run, even if they're not being run -// explicitly or as dependencies. these are necessary for the inner -// workings of the runner. -var injectedAnalyses = []*analysis.Analyzer{facts.Generated, config.Analyzer} - -func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) { - if !ac.pkg.fromSource { - panic(fmt.Sprintf("internal error: %s was not loaded from source", ac.pkg)) - } - - // User-provided package, analyse it - // First analyze it with dependencies - for _, req := range ac.analyzer.Requires { - acReq := r.makeAnalysisAction(req, ac.pkg) - ret, err := r.runAnalysis(acReq) - if err != nil { - // We couldn't run a dependency, no point in going on - return nil, dependencyError{req.Name, err} - } - - pass.ResultOf[req] = ret - } - - // Then with this analyzer - var ret interface{} - for i := uint(0); i < r.repeatAnalyzers+1; i++ { - var err error - t := time.Now() - ret, err = ac.analyzer.Run(pass) - r.stats.MeasureAnalyzer(ac.analyzer, ac.pkg, time.Since(t)) - if err != nil { - return nil, err - } - } - - if len(ac.analyzer.FactTypes) > 0 { - // Merge new facts into the package and persist them. - var facts []Fact - for _, fact := range ac.newPackageFacts { - id := r.analyzerIDs.get(ac.analyzer) - ac.pkg.pkgFacts[id] = append(ac.pkg.pkgFacts[id], fact) - facts = append(facts, Fact{"", fact}) - } - for obj, afacts := range ac.pkg.facts[ac.analyzerID] { - if obj.Pkg() != ac.pkg.Package.Types { - continue - } - path, err := objectpath.For(obj) - if err != nil { - continue - } - for _, fact := range afacts { - facts = append(facts, Fact{string(path), fact}) - } - } - - if err := r.cacheData(facts, ac.pkg, ac.analyzer, "facts"); err != nil { - return nil, err - } - } - - return ret, nil -} - -func (r *Runner) cacheData(v interface{}, pkg *Package, a *analysis.Analyzer, subkey string) error { - buf := &bytes.Buffer{} - if err := gob.NewEncoder(buf).Encode(v); err != nil { - return err - } - aID := passActionID(pkg, a) - aID = cache.Subkey(aID, subkey) - if err := r.cache.PutBytes(aID, buf.Bytes()); err != nil { - return err - } - return nil -} - -func NewRunner(stats *Stats) (*Runner, error) { - cache, err := cache.Default() - if err != nil { - return nil, err - } - - return &Runner{ - cache: cache, - stats: stats, - }, nil -} - -// Run loads packages corresponding to patterns and analyses them with -// analyzers. It returns the loaded packages, which contain reported -// diagnostics as well as extracted ignore directives. -// -// Note that diagnostics have not been filtered at this point yet, to -// accommodate cumulative analyzes that require additional steps to -// produce diagnostics. -func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer, hasCumulative bool) ([]*Package, error) { - checkerNames := make([]string, len(analyzers)) - for i, a := range analyzers { - checkerNames[i] = a.Name - } - sort.Strings(checkerNames) - r.problemsCacheKey = strings.Join(checkerNames, " ") - - var allAnalyzers []*analysis.Analyzer - r.analyzerIDs = analyzerIDs{m: map[*analysis.Analyzer]int{}} - id := 0 - seen := map[*analysis.Analyzer]struct{}{} - var dfs func(a *analysis.Analyzer) - dfs = func(a *analysis.Analyzer) { - if _, ok := seen[a]; ok { - return - } - seen[a] = struct{}{} - allAnalyzers = append(allAnalyzers, a) - r.analyzerIDs.m[a] = id - id++ - for _, f := range a.FactTypes { - gob.Register(f) - } - for _, req := range a.Requires { - dfs(req) - } - } - for _, a := range analyzers { - if v := a.Flags.Lookup("go"); v != nil { - v.Value.Set(fmt.Sprintf("1.%d", r.goVersion)) - } - dfs(a) - } - for _, a := range injectedAnalyses { - dfs(a) - } - // Run all analyzers on all packages (subject to further - // restrictions enforced later). This guarantees that if analyzer - // A1 depends on A2, and A2 has facts, that A2 will run on the - // dependencies of user-provided packages, even though A1 won't. - analyzers = allAnalyzers - - var dcfg packages.Config - if cfg != nil { - dcfg = *cfg - } - - atomic.StoreUint32(&r.stats.State, StateGraph) - initialPkgs, err := loader.Graph(dcfg, patterns...) - if err != nil { - return nil, err - } - defer r.cache.Trim() - - var allPkgs []*Package - m := map[*packages.Package]*Package{} - packages.Visit(initialPkgs, nil, func(l *packages.Package) { - m[l] = &Package{ - Package: l, - results: make([]*result, len(r.analyzerIDs.m)), - facts: make([]map[types.Object][]analysis.Fact, len(r.analyzerIDs.m)), - pkgFacts: make([][]analysis.Fact, len(r.analyzerIDs.m)), - done: make(chan struct{}), - // every package needs itself - dependents: 1, - canClearTypes: !hasCumulative, - } - allPkgs = append(allPkgs, m[l]) - for i := range m[l].facts { - m[l].facts[i] = map[types.Object][]analysis.Fact{} - } - for _, err := range l.Errors { - m[l].errs = append(m[l].errs, err) - } - for _, v := range l.Imports { - m[v].dependents++ - m[l].Imports = append(m[l].Imports, m[v]) - } - - m[l].hash, err = r.packageHash(m[l]) - m[l].actionID = packageActionID(m[l]) - if err != nil { - m[l].errs = append(m[l].errs, err) - } - }) - - pkgs := make([]*Package, len(initialPkgs)) - for i, l := range initialPkgs { - pkgs[i] = m[l] - pkgs[i].initial = true - } - - atomic.StoreUint32(&r.stats.InitialPackages, uint32(len(initialPkgs))) - atomic.StoreUint32(&r.stats.TotalPackages, uint32(len(allPkgs))) - atomic.StoreUint32(&r.stats.State, StateProcessing) - - var wg sync.WaitGroup - wg.Add(len(allPkgs)) - r.loadSem = make(chan struct{}, runtime.GOMAXPROCS(-1)) - atomic.StoreUint32(&r.stats.TotalWorkers, uint32(cap(r.loadSem))) - for _, pkg := range allPkgs { - pkg := pkg - go func() { - r.processPkg(pkg, analyzers) - - if pkg.initial { - atomic.AddUint32(&r.stats.ProcessedInitialPackages, 1) - } - atomic.AddUint32(&r.stats.Problems, uint32(len(pkg.problems))) - wg.Done() - }() - } - wg.Wait() - - return pkgs, nil -} - -var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?`) - -func parsePos(pos string) (token.Position, int, error) { - if pos == "-" || pos == "" { - return token.Position{}, 0, nil - } - parts := posRe.FindStringSubmatch(pos) - if parts == nil { - return token.Position{}, 0, fmt.Errorf("malformed position %q", pos) - } - file := parts[1] - line, _ := strconv.Atoi(parts[2]) - col, _ := strconv.Atoi(parts[3]) - return token.Position{ - Filename: file, - Line: line, - Column: col, - }, len(parts[0]), nil -} - -// loadPkg loads a Go package. It may be loaded from a combination of -// caches, or from source. -func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { - if pkg.Types != nil { - panic(fmt.Sprintf("internal error: %s has already been loaded", pkg.Package)) - } - - if pkg.initial { - // Try to load cached package - cpkg, ok := r.loadCachedPackage(pkg, analyzers) - if ok { - pkg.problems = cpkg.Problems - pkg.ignores = cpkg.Ignores - pkg.cfg = cpkg.Config - } else { - pkg.fromSource = true - return loader.LoadFromSource(pkg.Package) - } - } - - // At this point we're either working with a non-initial package, - // or we managed to load cached problems for the package. We still - // need export data and facts. - - // OPT(dh): we don't need type information for this package if no - // other package depends on it. this may be the case for initial - // packages. - - // Load package from export data - if err := loader.LoadFromExport(pkg.Package); err != nil { - // We asked Go to give us up to date export data, yet - // we can't load it. There must be something wrong. - // - // Attempt loading from source. This should fail (because - // otherwise there would be export data); we just want to - // get the compile errors. If loading from source succeeds - // we discard the result, anyway. Otherwise we'll fail - // when trying to reload from export data later. - // - // FIXME(dh): we no longer reload from export data, so - // theoretically we should be able to continue - pkg.fromSource = true - if err := loader.LoadFromSource(pkg.Package); err != nil { - return err - } - // Make sure this package can't be imported successfully - pkg.Package.Errors = append(pkg.Package.Errors, packages.Error{ - Pos: "-", - Msg: fmt.Sprintf("could not load export data: %s", err), - Kind: packages.ParseError, - }) - return fmt.Errorf("could not load export data: %s", err) - } - - failed := false - seen := make([]bool, len(r.analyzerIDs.m)) - var dfs func(*analysis.Analyzer) - dfs = func(a *analysis.Analyzer) { - if seen[r.analyzerIDs.get(a)] { - return - } - seen[r.analyzerIDs.get(a)] = true - - if len(a.FactTypes) > 0 { - facts, ok := r.loadCachedFacts(a, pkg) - if !ok { - failed = true - return - } - - for _, f := range facts { - if f.Path == "" { - // This is a package fact - pkg.pkgFacts[r.analyzerIDs.get(a)] = append(pkg.pkgFacts[r.analyzerIDs.get(a)], f.Fact) - continue - } - obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.Path)) - if err != nil { - // Be lenient about these errors. For example, when - // analysing io/ioutil from source, we may get a fact - // for methods on the devNull type, and objectpath - // will happily create a path for them. However, when - // we later load io/ioutil from export data, the path - // no longer resolves. - // - // If an exported type embeds the unexported type, - // then (part of) the unexported type will become part - // of the type information and our path will resolve - // again. - continue - } - pkg.facts[r.analyzerIDs.get(a)][obj] = append(pkg.facts[r.analyzerIDs.get(a)][obj], f.Fact) - } - } - - for _, req := range a.Requires { - dfs(req) - } - } - for _, a := range analyzers { - dfs(a) - } - - if !failed { - return nil - } - - // We failed to load some cached facts - pkg.fromSource = true - // XXX we added facts to the maps, we need to get rid of those - return loader.LoadFromSource(pkg.Package) -} - -type analysisError struct { - analyzer *analysis.Analyzer - pkg *Package - err error -} - -func (err analysisError) Error() string { - return fmt.Sprintf("error running analyzer %s on %s: %s", err.analyzer, err.pkg, err.err) -} - -// processPkg processes a package. This involves loading the package, -// either from export data or from source. For packages loaded from -// source, the provides analyzers will be run on the package. -func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { - defer func() { - // Clear information we no longer need. Make sure to do this - // when returning from processPkg so that we clear - // dependencies, not just initial packages. - pkg.TypesInfo = nil - pkg.Syntax = nil - pkg.results = nil - - atomic.AddUint32(&r.stats.ProcessedPackages, 1) - pkg.decUse() - close(pkg.done) - }() - - // Ensure all packages have the generated map and config. This is - // required by internals of the runner. Analyses that themselves - // make use of either have an explicit dependency so that other - // runners work correctly, too. - analyzers = append(analyzers[0:len(analyzers):len(analyzers)], injectedAnalyses...) - - if len(pkg.errs) != 0 { - return - } - - for _, imp := range pkg.Imports { - <-imp.done - if len(imp.errs) > 0 { - if imp.initial { - // Don't print the error of the dependency since it's - // an initial package and we're already printing the - // error. - pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s", imp, pkg)) - } else { - var s string - for _, err := range imp.errs { - s += "\n\t" + err.Error() - } - pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s: %s", imp, pkg, s)) - } - return - } - } - if pkg.PkgPath == "unsafe" { - pkg.Types = types.Unsafe - return - } - - r.loadSem <- struct{}{} - atomic.AddUint32(&r.stats.ActiveWorkers, 1) - defer func() { - <-r.loadSem - atomic.AddUint32(&r.stats.ActiveWorkers, ^uint32(0)) - }() - if err := r.loadPkg(pkg, analyzers); err != nil { - pkg.errs = append(pkg.errs, err) - return - } - - // A package's object facts is the union of all of its dependencies. - for _, imp := range pkg.Imports { - for ai, m := range imp.facts { - for obj, facts := range m { - pkg.facts[ai][obj] = facts[0:len(facts):len(facts)] - } - } - } - - if !pkg.fromSource { - // Nothing left to do for the package. - return - } - - // Run analyses on initial packages and those missing facts - var wg sync.WaitGroup - wg.Add(len(analyzers)) - errs := make([]error, len(analyzers)) - var acs []*analysisAction - for i, a := range analyzers { - i := i - a := a - ac := r.makeAnalysisAction(a, pkg) - acs = append(acs, ac) - go func() { - defer wg.Done() - // Only initial packages and packages with missing - // facts will have been loaded from source. - if pkg.initial || len(a.FactTypes) > 0 { - if _, err := r.runAnalysis(ac); err != nil { - errs[i] = analysisError{a, pkg, err} - return - } - } - }() - } - wg.Wait() - - depErrors := map[dependencyError]int{} - for _, err := range errs { - if err == nil { - continue - } - switch err := err.(type) { - case analysisError: - switch err := err.err.(type) { - case dependencyError: - depErrors[err.nested()]++ - default: - pkg.errs = append(pkg.errs, err) - } - default: - pkg.errs = append(pkg.errs, err) - } - } - for err, count := range depErrors { - pkg.errs = append(pkg.errs, - fmt.Errorf("could not run %s@%s, preventing %d analyzers from running: %s", err.dep, pkg, count, err.err)) - } - - // We can't process ignores at this point because `unused` needs - // to see more than one package to make its decision. - // - // OPT(dh): can't we guard this block of code by pkg.initial? - ignores, problems := parseDirectives(pkg.Package) - pkg.ignores = append(pkg.ignores, ignores...) - pkg.problems = append(pkg.problems, problems...) - for _, ac := range acs { - pkg.problems = append(pkg.problems, ac.problems...) - } - - if pkg.initial { - // Only initial packages have these analyzers run, and only - // initial packages need these. - if pkg.results[r.analyzerIDs.get(config.Analyzer)].v != nil { - pkg.cfg = pkg.results[r.analyzerIDs.get(config.Analyzer)].v.(*config.Config) - } - pkg.gen = pkg.results[r.analyzerIDs.get(facts.Generated)].v.(map[string]facts.Generator) - } - - // In a previous version of the code, we would throw away all type - // information and reload it from export data. That was - // nonsensical. The *types.Package doesn't keep any information - // live that export data wouldn't also. We only need to discard - // the AST and the TypesInfo maps; that happens after we return - // from processPkg. -} - -func parseDirective(s string) (cmd string, args []string) { - if !strings.HasPrefix(s, "//lint:") { - return "", nil - } - s = strings.TrimPrefix(s, "//lint:") - fields := strings.Split(s, " ") - return fields[0], fields[1:] -} - -// parseDirectives extracts all linter directives from the source -// files of the package. Malformed directives are returned as problems. -func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) { - var ignores []Ignore - var problems []Problem - - for _, f := range pkg.Syntax { - found := false - commentLoop: - for _, cg := range f.Comments { - for _, c := range cg.List { - if strings.Contains(c.Text, "//lint:") { - found = true - break commentLoop - } - } - } - if !found { - continue - } - cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) - for node, cgs := range cm { - for _, cg := range cgs { - for _, c := range cg.List { - if !strings.HasPrefix(c.Text, "//lint:") { - continue - } - cmd, args := parseDirective(c.Text) - switch cmd { - case "ignore", "file-ignore": - if len(args) < 2 { - p := Problem{ - Pos: DisplayPosition(pkg.Fset, c.Pos()), - Message: "malformed linter directive; missing the required reason field?", - Severity: Error, - Check: "compile", - } - problems = append(problems, p) - continue - } - default: - // unknown directive, ignore - continue - } - checks := strings.Split(args[0], ",") - pos := DisplayPosition(pkg.Fset, node.Pos()) - var ig Ignore - switch cmd { - case "ignore": - ig = &LineIgnore{ - File: pos.Filename, - Line: pos.Line, - Checks: checks, - Pos: DisplayPosition(pkg.Fset, c.Pos()), - } - case "file-ignore": - ig = &FileIgnore{ - File: pos.Filename, - Checks: checks, - } - } - ignores = append(ignores, ig) - } - } - } - } - - return ignores, problems -} - -// packageHash computes a package's hash. The hash is based on all Go -// files that make up the package, as well as the hashes of imported -// packages. -func (r *Runner) packageHash(pkg *Package) (string, error) { - key := cache.NewHash("package hash") - fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) - fmt.Fprintf(key, "go %d\n", r.goVersion) - for _, f := range pkg.CompiledGoFiles { - h, err := cache.FileHash(f) - if err != nil { - return "", err - } - fmt.Fprintf(key, "file %s %x\n", f, h) - } - - // Actually load the configuration to calculate its hash. This - // will take into consideration inheritance of configuration - // files, as well as the default configuration. - // - // OPT(dh): doing this means we'll load the config twice: once for - // computing the hash, and once when analyzing the package from - // source. - cdir := config.Dir(pkg.GoFiles) - if cdir == "" { - fmt.Fprintf(key, "file %s %x\n", config.ConfigName, [cache.HashSize]byte{}) - } else { - cfg, err := config.Load(cdir) - if err != nil { - return "", err - } - h := cache.NewHash(config.ConfigName) - if _, err := h.Write([]byte(cfg.String())); err != nil { - return "", err - } - fmt.Fprintf(key, "file %s %x\n", config.ConfigName, h.Sum()) - } - - imps := make([]*Package, len(pkg.Imports)) - copy(imps, pkg.Imports) - sort.Slice(imps, func(i, j int) bool { - return imps[i].PkgPath < imps[j].PkgPath - }) - for _, dep := range imps { - if dep.PkgPath == "unsafe" { - continue - } - - fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, dep.hash) - } - h := key.Sum() - return hex.EncodeToString(h[:]), nil -} - -func packageActionID(pkg *Package) cache.ActionID { - key := cache.NewHash("package ID") - fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) - fmt.Fprintf(key, "pkghash %s\n", pkg.hash) - return key.Sum() -} - -// passActionID computes an ActionID for an analysis pass. -func passActionID(pkg *Package, analyzer *analysis.Analyzer) cache.ActionID { - return cache.Subkey(pkg.actionID, fmt.Sprintf("analyzer %s", analyzer.Name)) -} diff --git a/lint/stats.go b/lint/stats.go deleted file mode 100644 index 85eb97844..000000000 --- a/lint/stats.go +++ /dev/null @@ -1,38 +0,0 @@ -package lint - -import ( - "time" - - "golang.org/x/tools/go/analysis" -) - -const ( - StateInitializing = 0 - StateGraph = 1 - StateProcessing = 2 - StateCumulative = 3 -) - -type Stats struct { - State uint32 - - InitialPackages uint32 - TotalPackages uint32 - ProcessedPackages uint32 - ProcessedInitialPackages uint32 - Problems uint32 - ActiveWorkers uint32 - TotalWorkers uint32 - PrintAnalyzerMeasurement func(*analysis.Analyzer, *Package, time.Duration) -} - -type AnalysisMeasurementKey struct { - Analysis string - Pkg string -} - -func (s *Stats) MeasureAnalyzer(analysis *analysis.Analyzer, pkg *Package, d time.Duration) { - if s.PrintAnalyzerMeasurement != nil { - s.PrintAnalyzerMeasurement(analysis, pkg, d) - } -} diff --git a/loader/buildid.go b/loader/buildid.go new file mode 100644 index 000000000..32c026b7d --- /dev/null +++ b/loader/buildid.go @@ -0,0 +1,238 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +import ( + "bytes" + "debug/elf" + "errors" + "fmt" + "io" + "os" + "strconv" + "strings" +) + +var errBuildIDMalformed = fmt.Errorf("malformed object file") + +var ( + bangArch = []byte("!") + pkgdef = []byte("__.PKGDEF") + goobject = []byte("go object ") + buildid = []byte("build id ") +) + +// ReadFile reads the build ID from an archive or executable file. +func ReadFile(name string) (id string, err error) { + f, err := os.Open(name) + if err != nil { + return "", err + } + defer f.Close() + + buf := make([]byte, 8) + if _, err := f.ReadAt(buf, 0); err != nil { + return "", err + } + if string(buf) != "!\n" { + if string(buf) == "\n" { + return "", errors.New("unsupported") + } + return readBinary(name, f) + } + + // Read just enough of the target to fetch the build ID. + // The archive is expected to look like: + // + // ! + // __.PKGDEF 0 0 0 644 7955 ` + // go object darwin amd64 devel X:none + // build id "b41e5c45250e25c9fd5e9f9a1de7857ea0d41224" + // + // The variable-sized strings are GOOS, GOARCH, and the experiment list (X:none). + // Reading the first 1024 bytes should be plenty. + data := make([]byte, 1024) + n, err := io.ReadFull(f, data) + if err != nil && n == 0 { + return "", err + } + + tryGccgo := func() (string, error) { + return readGccgoArchive(name, f) + } + + // Archive header. + for i := 0; ; i++ { // returns during i==3 + j := bytes.IndexByte(data, '\n') + if j < 0 { + return tryGccgo() + } + line := data[:j] + data = data[j+1:] + switch i { + case 0: + if !bytes.Equal(line, bangArch) { + return tryGccgo() + } + case 1: + if !bytes.HasPrefix(line, pkgdef) { + return tryGccgo() + } + case 2: + if !bytes.HasPrefix(line, goobject) { + return tryGccgo() + } + case 3: + if !bytes.HasPrefix(line, buildid) { + // Found the object header, just doesn't have a build id line. + // Treat as successful, with empty build id. + return "", nil + } + id, err := strconv.Unquote(string(line[len(buildid):])) + if err != nil { + return tryGccgo() + } + return id, nil + } + } +} + +// readGccgoArchive tries to parse the archive as a standard Unix +// archive file, and fetch the build ID from the _buildid.o entry. +// The _buildid.o entry is written by (*Builder).gccgoBuildIDELFFile +// in cmd/go/internal/work/exec.go. +func readGccgoArchive(name string, f *os.File) (string, error) { + bad := func() (string, error) { + return "", &os.PathError{Op: "parse", Path: name, Err: errBuildIDMalformed} + } + + off := int64(8) + for { + if _, err := f.Seek(off, io.SeekStart); err != nil { + return "", err + } + + // TODO(iant): Make a debug/ar package, and use it + // here and in cmd/link. + var hdr [60]byte + if _, err := io.ReadFull(f, hdr[:]); err != nil { + if err == io.EOF { + // No more entries, no build ID. + return "", nil + } + return "", err + } + off += 60 + + sizeStr := strings.TrimSpace(string(hdr[48:58])) + size, err := strconv.ParseInt(sizeStr, 0, 64) + if err != nil { + return bad() + } + + name := strings.TrimSpace(string(hdr[:16])) + if name == "_buildid.o/" { + sr := io.NewSectionReader(f, off, size) + e, err := elf.NewFile(sr) + if err != nil { + return bad() + } + s := e.Section(".go.buildid") + if s == nil { + return bad() + } + data, err := s.Data() + if err != nil { + return bad() + } + return string(data), nil + } + + off += size + if off&1 != 0 { + off++ + } + } +} + +var ( + goBuildPrefix = []byte("\xff Go build ID: \"") + goBuildEnd = []byte("\"\n \xff") + + elfPrefix = []byte("\x7fELF") + + machoPrefixes = [][]byte{ + {0xfe, 0xed, 0xfa, 0xce}, + {0xfe, 0xed, 0xfa, 0xcf}, + {0xce, 0xfa, 0xed, 0xfe}, + {0xcf, 0xfa, 0xed, 0xfe}, + } +) + +var readSize = 32 * 1024 // changed for testing + +// readBinary reads the build ID from a binary. +// +// ELF binaries store the build ID in a proper PT_NOTE section. +// +// Other binary formats are not so flexible. For those, the linker +// stores the build ID as non-instruction bytes at the very beginning +// of the text segment, which should appear near the beginning +// of the file. This is clumsy but fairly portable. Custom locations +// can be added for other binary types as needed, like we did for ELF. +func readBinary(name string, f *os.File) (id string, err error) { + // Read the first 32 kB of the binary file. + // That should be enough to find the build ID. + // In ELF files, the build ID is in the leading headers, + // which are typically less than 4 kB, not to mention 32 kB. + // In Mach-O files, there's no limit, so we have to parse the file. + // On other systems, we're trying to read enough that + // we get the beginning of the text segment in the read. + // The offset where the text segment begins in a hello + // world compiled for each different object format today: + // + // Plan 9: 0x20 + // Windows: 0x600 + // + data := make([]byte, readSize) + _, err = io.ReadFull(f, data) + if err == io.ErrUnexpectedEOF { + err = nil + } + if err != nil { + return "", err + } + + if bytes.HasPrefix(data, elfPrefix) { + return readELF(name, f, data) + } + for _, m := range machoPrefixes { + if bytes.HasPrefix(data, m) { + return readMacho(name, f, data) + } + } + return readRaw(name, data) +} + +// readRaw finds the raw build ID stored in text segment data. +func readRaw(name string, data []byte) (id string, err error) { + i := bytes.Index(data, goBuildPrefix) + if i < 0 { + // Missing. Treat as successful but build ID empty. + return "", nil + } + + j := bytes.Index(data[i+len(goBuildPrefix):], goBuildEnd) + if j < 0 { + return "", &os.PathError{Op: "parse", Path: name, Err: errBuildIDMalformed} + } + + quoted := data[i+len(goBuildPrefix)-1 : i+len(goBuildPrefix)+j+1] + id, err = strconv.Unquote(string(quoted)) + if err != nil { + return "", &os.PathError{Op: "parse", Path: name, Err: errBuildIDMalformed} + } + return id, nil +} diff --git a/loader/hash.go b/loader/hash.go new file mode 100644 index 000000000..68c067a8d --- /dev/null +++ b/loader/hash.go @@ -0,0 +1,83 @@ +package loader + +import ( + "fmt" + "runtime" + "sort" + "strings" + + "honnef.co/go/tools/internal/cache" +) + +// computeHash computes a package's hash. The hash is based on all Go +// files that make up the package, as well as the hashes of imported +// packages. +func computeHash(pkg *PackageSpec) (cache.ActionID, error) { + key := cache.NewHash("package " + pkg.PkgPath) + fmt.Fprintf(key, "goos %s goarch %s\n", runtime.GOOS, runtime.GOARCH) + fmt.Fprintf(key, "import %q\n", pkg.PkgPath) + + // Compute the hashes of all files making up the package. As an + // optimization, we use the build ID that Go already computed for + // us, because it is virtually identical to hashed all + // CompiledGoFiles. + success := false + if pkg.ExportFile != "" { + id, err := getBuildid(pkg.ExportFile) + if err == nil { + if idx := strings.IndexRune(id, '/'); idx > -1 { + fmt.Fprintf(key, "files %s\n", id[:idx]) + success = true + } + } + } + if !success { + for _, f := range pkg.CompiledGoFiles { + h, err := cache.FileHash(f) + if err != nil { + return cache.ActionID{}, err + } + fmt.Fprintf(key, "file %s %x\n", f, h) + } + } + + imps := make([]*PackageSpec, 0, len(pkg.Imports)) + for _, v := range pkg.Imports { + imps = append(imps, v) + } + sort.Slice(imps, func(i, j int) bool { + return imps[i].PkgPath < imps[j].PkgPath + }) + + for _, dep := range imps { + if dep.ExportFile == "" { + fmt.Fprintf(key, "import %s \n", dep.PkgPath) + } else { + id, err := getBuildid(dep.ExportFile) + if err == nil { + fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, id) + } else { + fh, err := cache.FileHash(dep.ExportFile) + if err != nil { + return cache.ActionID{}, err + } + fmt.Fprintf(key, "import %s %x\n", dep.PkgPath, fh) + } + } + } + return key.Sum(), nil +} + +var buildidCache = map[string]string{} + +func getBuildid(f string) (string, error) { + if h, ok := buildidCache[f]; ok { + return h, nil + } + h, err := ReadFile(f) + if err != nil { + return "", err + } + buildidCache[f] = h + return h, nil +} diff --git a/loader/loader.go b/loader/loader.go index a14f274d2..3ecd51b9a 100644 --- a/loader/loader.go +++ b/loader/loader.go @@ -8,30 +8,107 @@ import ( "go/scanner" "go/token" "go/types" - "log" "os" + "time" + + "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/cache" + "honnef.co/go/tools/internal/go/gcexportdata" - "golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/go/packages" ) +type PackageSpec struct { + ID string + Name string + PkgPath string + // Errors that occured while building the import graph. These will + // primarily be parse errors or failure to resolve imports, but + // may also be other errors. + Errors []packages.Error + GoFiles []string + CompiledGoFiles []string + OtherFiles []string + ExportFile string + Imports map[string]*PackageSpec + TypesSizes types.Sizes + Hash cache.ActionID + + Config config.Config +} + +func (spec *PackageSpec) String() string { + return spec.ID +} + +type Package struct { + *PackageSpec + + // Errors that occured while loading the package. These will + // primarily be parse or type errors, but may also be lower-level + // failures such as file-system ones. + Errors []packages.Error + Types *types.Package + Fset *token.FileSet + Syntax []*ast.File + TypesInfo *types.Info +} + // Graph resolves patterns and returns packages with all the // information required to later load type information, and optionally // syntax trees. // // The provided config can set any setting with the exception of Mode. -func Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) { - cfg.Mode = packages.NeedName | packages.NeedImports | packages.NeedDeps | packages.NeedExportsFile | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedTypesSizes - pkgs, err := packages.Load(&cfg, patterns...) +func Graph(cfg *packages.Config, patterns ...string) ([]*PackageSpec, error) { + var dcfg packages.Config + if cfg != nil { + dcfg = *cfg + } + dcfg.Mode = packages.NeedName | + packages.NeedImports | + packages.NeedDeps | + packages.NeedExportsFile | + packages.NeedFiles | + packages.NeedCompiledGoFiles | + packages.NeedTypesSizes + pkgs, err := packages.Load(&dcfg, patterns...) if err != nil { return nil, err } - fset := token.NewFileSet() + + m := map[*packages.Package]*PackageSpec{} packages.Visit(pkgs, nil, func(pkg *packages.Package) { - pkg.Fset = fset + spec := &PackageSpec{ + ID: pkg.ID, + Name: pkg.Name, + PkgPath: pkg.PkgPath, + Errors: pkg.Errors, + GoFiles: pkg.GoFiles, + CompiledGoFiles: pkg.CompiledGoFiles, + OtherFiles: pkg.OtherFiles, + ExportFile: pkg.ExportFile, + Imports: map[string]*PackageSpec{}, + TypesSizes: pkg.TypesSizes, + } + for path, imp := range pkg.Imports { + spec.Imports[path] = m[imp] + } + if cdir := config.Dir(pkg.GoFiles); cdir != "" { + cfg, err := config.Load(cdir) + if err != nil { + spec.Errors = append(spec.Errors, convertError(err)...) + } + spec.Config = cfg + } else { + spec.Config = config.DefaultConfig + } + spec.Hash, err = computeHash(spec) + if err != nil { + spec.Errors = append(spec.Errors, convertError(err)...) + } + m[pkg] = spec }) - - n := 0 + out := make([]*PackageSpec, 0, len(pkgs)) for _, pkg := range pkgs { if len(pkg.CompiledGoFiles) == 0 && len(pkg.Errors) == 0 && pkg.PkgPath != "unsafe" { // If a package consists only of test files, then @@ -44,86 +121,124 @@ func Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) // errors. continue } - pkgs[n] = pkg - n++ + out = append(out, m[pkg]) } - return pkgs[:n], nil + + return out, nil +} + +type program struct { + fset *token.FileSet + packages map[string]*types.Package } -// LoadFromExport loads a package from export data. All of its -// dependencies must have been loaded already. -func LoadFromExport(pkg *packages.Package) error { - pkg.IllTyped = true - for path, pkg := range pkg.Imports { - if pkg.Types == nil { - return fmt.Errorf("dependency %q hasn't been loaded yet", path) +type Stats struct { + Source time.Duration + Export map[*PackageSpec]time.Duration +} + +// Load loads the package described in spec. Imports will be loaded +// from export data, while the package itself will be loaded from +// source. +// +// An error will only be returned for system failures, such as failure +// to read export data from disk. Syntax and type errors, among +// others, will only populate the returned package's Errors field. +func Load(spec *PackageSpec) (*Package, Stats, error) { + prog := &program{ + fset: token.NewFileSet(), + packages: map[string]*types.Package{}, + } + + stats := Stats{ + Export: map[*PackageSpec]time.Duration{}, + } + for _, imp := range spec.Imports { + if imp.PkgPath == "unsafe" { + continue + } + t := time.Now() + _, err := prog.LoadFromExport(imp) + stats.Export[imp] = time.Since(t) + if err != nil { + return nil, stats, err } } - if pkg.ExportFile == "" { - return fmt.Errorf("no export data for %q", pkg.ID) + t := time.Now() + pkg := prog.LoadFromSource(spec) + stats.Source = time.Since(t) + return pkg, stats, nil +} + +// LoadFromExport loads a package from export data. +func (prog *program) LoadFromExport(spec *PackageSpec) (*Package, error) { + // log.Printf("Loading package %s from export", spec) + if spec.ExportFile == "" { + return nil, fmt.Errorf("no export data for %q", spec.ID) } - f, err := os.Open(pkg.ExportFile) + f, err := os.Open(spec.ExportFile) if err != nil { - return err + return nil, err } defer f.Close() r, err := gcexportdata.NewReader(f) if err != nil { - return err - } - - view := make(map[string]*types.Package) // view seen by gcexportdata - seen := make(map[*packages.Package]bool) // all visited packages - var visit func(pkgs map[string]*packages.Package) - visit = func(pkgs map[string]*packages.Package) { - for _, pkg := range pkgs { - if !seen[pkg] { - seen[pkg] = true - view[pkg.PkgPath] = pkg.Types - visit(pkg.Imports) - } - } + return nil, err } - visit(pkg.Imports) - tpkg, err := gcexportdata.Read(r, pkg.Fset, view, pkg.PkgPath) + tpkg, err := gcexportdata.Read(r, prog.fset, prog.packages, spec.PkgPath) if err != nil { - return err + return nil, err + } + pkg := &Package{ + PackageSpec: spec, + Types: tpkg, + Fset: prog.fset, } - pkg.Types = tpkg - pkg.IllTyped = false - return nil + // runtime.SetFinalizer(pkg, func(pkg *Package) { + // log.Println("Unloading package", pkg.PkgPath) + // }) + return pkg, nil } // LoadFromSource loads a package from source. All of its dependencies // must have been loaded already. -func LoadFromSource(pkg *packages.Package) error { - pkg.IllTyped = true - pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) +func (prog *program) LoadFromSource(spec *PackageSpec) *Package { + if len(spec.Errors) > 0 { + panic("LoadFromSource called on package with errors") + } + + pkg := &Package{ + PackageSpec: spec, + Types: types.NewPackage(spec.PkgPath, spec.Name), + Syntax: make([]*ast.File, len(spec.CompiledGoFiles)), + Fset: prog.fset, + TypesInfo: &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + }, + } + // runtime.SetFinalizer(pkg, func(pkg *Package) { + // log.Println("Unloading package", pkg.PkgPath) + // }) // OPT(dh): many packages have few files, much fewer than there // are CPU cores. Additionally, parsing each individual file is // very fast. A naive parallel implementation of this loop won't // be faster, and tends to be slower due to extra scheduling, // bookkeeping and potentially false sharing of cache lines. - pkg.Syntax = make([]*ast.File, len(pkg.CompiledGoFiles)) - for i, file := range pkg.CompiledGoFiles { - f, err := parser.ParseFile(pkg.Fset, file, nil, parser.ParseComments) + for i, file := range spec.CompiledGoFiles { + f, err := parser.ParseFile(prog.fset, file, nil, parser.ParseComments) if err != nil { pkg.Errors = append(pkg.Errors, convertError(err)...) - return err + return pkg } pkg.Syntax[i] = f } - pkg.TypesInfo = &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - } - importer := func(path string) (*types.Package, error) { if path == "unsafe" { return types.Unsafe, nil @@ -134,14 +249,15 @@ func LoadFromSource(pkg *packages.Package) error { // we'll encounter the raw C import. return nil, errors.New("cgo preprocessing failed") } - imp := pkg.Imports[path] - if imp == nil { - return nil, nil + ispecpkg := spec.Imports[path] + if ispecpkg == nil { + return nil, fmt.Errorf("trying to import %q in the context of %q returned nil PackageSpec", path, spec) } - if len(imp.Errors) > 0 { - return nil, imp.Errors[0] + ipkg := prog.packages[ispecpkg.PkgPath] + if ipkg == nil { + return nil, fmt.Errorf("trying to import %q (%q) in the context of %q returned nil PackageSpec", ispecpkg.PkgPath, path, spec) } - return imp.Types, nil + return ipkg, nil } tc := &types.Config{ Importer: importerFunc(importer), @@ -149,12 +265,8 @@ func LoadFromSource(pkg *packages.Package) error { pkg.Errors = append(pkg.Errors, convertError(err)...) }, } - err := types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax) - if err != nil { - return err - } - pkg.IllTyped = false - return nil + types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax) + return pkg } func convertError(err error) []packages.Error { @@ -192,15 +304,11 @@ func convertError(err error) []packages.Error { }) default: - // unexpected impoverished error from parser? errs = append(errs, packages.Error{ Pos: "-", Msg: err.Error(), Kind: packages.UnknownError, }) - - // If you see this error message, please file a bug. - log.Printf("internal error: error %q (%T) without position", err, err) } return errs } diff --git a/loader/note.go b/loader/note.go new file mode 100644 index 000000000..a56ee7005 --- /dev/null +++ b/loader/note.go @@ -0,0 +1,207 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +import ( + "bytes" + "debug/elf" + "debug/macho" + "encoding/binary" + "fmt" + "io" + "os" +) + +func readAligned4(r io.Reader, sz int32) ([]byte, error) { + full := (sz + 3) &^ 3 + data := make([]byte, full) + _, err := io.ReadFull(r, data) + if err != nil { + return nil, err + } + data = data[:sz] + return data, nil +} + +func ReadELFNote(filename, name string, typ int32) ([]byte, error) { + f, err := elf.Open(filename) + if err != nil { + return nil, err + } + defer f.Close() + for _, sect := range f.Sections { + if sect.Type != elf.SHT_NOTE { + continue + } + r := sect.Open() + for { + var namesize, descsize, noteType int32 + err = binary.Read(r, f.ByteOrder, &namesize) + if err != nil { + if err == io.EOF { + break + } + return nil, fmt.Errorf("read namesize failed: %v", err) + } + err = binary.Read(r, f.ByteOrder, &descsize) + if err != nil { + return nil, fmt.Errorf("read descsize failed: %v", err) + } + err = binary.Read(r, f.ByteOrder, ¬eType) + if err != nil { + return nil, fmt.Errorf("read type failed: %v", err) + } + noteName, err := readAligned4(r, namesize) + if err != nil { + return nil, fmt.Errorf("read name failed: %v", err) + } + desc, err := readAligned4(r, descsize) + if err != nil { + return nil, fmt.Errorf("read desc failed: %v", err) + } + if name == string(noteName) && typ == noteType { + return desc, nil + } + } + } + return nil, nil +} + +var elfGoNote = []byte("Go\x00\x00") +var elfGNUNote = []byte("GNU\x00") + +// The Go build ID is stored in a note described by an ELF PT_NOTE prog +// header. The caller has already opened filename, to get f, and read +// at least 4 kB out, in data. +func readELF(name string, f *os.File, data []byte) (buildid string, err error) { + // Assume the note content is in the data, already read. + // Rewrite the ELF header to set shnum to 0, so that we can pass + // the data to elf.NewFile and it will decode the Prog list but not + // try to read the section headers and the string table from disk. + // That's a waste of I/O when all we care about is the Prog list + // and the one ELF note. + switch elf.Class(data[elf.EI_CLASS]) { + case elf.ELFCLASS32: + data[48] = 0 + data[49] = 0 + case elf.ELFCLASS64: + data[60] = 0 + data[61] = 0 + } + + const elfGoBuildIDTag = 4 + const gnuBuildIDTag = 3 + + ef, err := elf.NewFile(bytes.NewReader(data)) + if err != nil { + return "", &os.PathError{Path: name, Op: "parse", Err: err} + } + var gnu string + for _, p := range ef.Progs { + if p.Type != elf.PT_NOTE || p.Filesz < 16 { + continue + } + + var note []byte + if p.Off+p.Filesz < uint64(len(data)) { + note = data[p.Off : p.Off+p.Filesz] + } else { + // For some linkers, such as the Solaris linker, + // the buildid may not be found in data (which + // likely contains the first 16kB of the file) + // or even the first few megabytes of the file + // due to differences in note segment placement; + // in that case, extract the note data manually. + _, err = f.Seek(int64(p.Off), io.SeekStart) + if err != nil { + return "", err + } + + note = make([]byte, p.Filesz) + _, err = io.ReadFull(f, note) + if err != nil { + return "", err + } + } + + filesz := p.Filesz + off := p.Off + for filesz >= 16 { + nameSize := ef.ByteOrder.Uint32(note) + valSize := ef.ByteOrder.Uint32(note[4:]) + tag := ef.ByteOrder.Uint32(note[8:]) + nname := note[12:16] + if nameSize == 4 && 16+valSize <= uint32(len(note)) && tag == elfGoBuildIDTag && bytes.Equal(nname, elfGoNote) { + return string(note[16 : 16+valSize]), nil + } + + if nameSize == 4 && 16+valSize <= uint32(len(note)) && tag == gnuBuildIDTag && bytes.Equal(nname, elfGNUNote) { + gnu = string(note[16 : 16+valSize]) + } + + nameSize = (nameSize + 3) &^ 3 + valSize = (valSize + 3) &^ 3 + notesz := uint64(12 + nameSize + valSize) + if filesz <= notesz { + break + } + off += notesz + align := p.Align + alignedOff := (off + align - 1) &^ (align - 1) + notesz += alignedOff - off + off = alignedOff + filesz -= notesz + note = note[notesz:] + } + } + + // If we didn't find a Go note, use a GNU note if available. + // This is what gccgo uses. + if gnu != "" { + return gnu, nil + } + + // No note. Treat as successful but build ID empty. + return "", nil +} + +// The Go build ID is stored at the beginning of the Mach-O __text segment. +// The caller has already opened filename, to get f, and read a few kB out, in data. +// Sadly, that's not guaranteed to hold the note, because there is an arbitrary amount +// of other junk placed in the file ahead of the main text. +func readMacho(name string, f *os.File, data []byte) (buildid string, err error) { + // If the data we want has already been read, don't worry about Mach-O parsing. + // This is both an optimization and a hedge against the Mach-O parsing failing + // in the future due to, for example, the name of the __text section changing. + if b, err := readRaw(name, data); b != "" && err == nil { + return b, err + } + + mf, err := macho.NewFile(f) + if err != nil { + return "", &os.PathError{Path: name, Op: "parse", Err: err} + } + + sect := mf.Section("__text") + if sect == nil { + // Every binary has a __text section. Something is wrong. + return "", &os.PathError{Path: name, Op: "parse", Err: fmt.Errorf("cannot find __text section")} + } + + // It should be in the first few bytes, but read a lot just in case, + // especially given our past problems on OS X with the build ID moving. + // There shouldn't be much difference between reading 4kB and 32kB: + // the hard part is getting to the data, not transferring it. + n := sect.Size + if n > uint64(readSize) { + n = uint64(readSize) + } + buf := make([]byte, n) + if _, err := f.ReadAt(buf, int64(sect.Offset)); err != nil { + return "", err + } + + return readRaw(name, buf) +} diff --git a/pattern/match.go b/pattern/match.go index ff039baa7..0c42178f7 100644 --- a/pattern/match.go +++ b/pattern/match.go @@ -7,7 +7,7 @@ import ( "go/types" "reflect" - "honnef.co/go/tools/lint" + "honnef.co/go/tools/code" ) var tokensByString = map[string]Token{ @@ -452,7 +452,7 @@ func (fn Function) Match(m *Matcher, node interface{}) (interface{}, bool) { obj = m.TypesInfo.ObjectOf(node) switch obj := obj.(type) { case *types.Func: - name = lint.FuncName(obj) + name = code.FuncName(obj) case *types.Builtin: name = obj.Name() default: @@ -464,7 +464,7 @@ func (fn Function) Match(m *Matcher, node interface{}) (interface{}, bool) { if !ok { return nil, false } - name = lint.FuncName(obj.(*types.Func)) + name = code.FuncName(obj.(*types.Func)) default: return nil, false } diff --git a/report/report.go b/report/report.go index 9b8b6ee74..5b5343617 100644 --- a/report/report.go +++ b/report/report.go @@ -5,12 +5,12 @@ import ( "go/ast" "go/printer" "go/token" + "path/filepath" "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" "honnef.co/go/tools/facts" - "honnef.co/go/tools/lint" ) type Options struct { @@ -148,7 +148,7 @@ func Report(pass *analysis.Pass, node Positioner, message string, opts ...Option opt(cfg) } - file := lint.DisplayPosition(pass.Fset, node.Pos()).Filename + file := DisplayPosition(pass.Fset, node.Pos()).Filename if cfg.FilterGenerated { m := pass.ResultOf[facts.Generated].(map[string]facts.Generator) if _, ok := m[file]; ok { @@ -182,3 +182,21 @@ func RenderArgs(pass *analysis.Pass, args []ast.Expr) string { } return strings.Join(ss, ", ") } + +func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { + if p == token.NoPos { + return token.Position{} + } + + // Only use the adjusted position if it points to another Go file. + // This means we'll point to the original file for cgo files, but + // we won't point to a YACC grammar file. + pos := fset.PositionFor(p, false) + adjPos := fset.PositionFor(p, true) + + if filepath.Ext(adjPos.Filename) == ".go" { + return adjPos + } + + return pos +} diff --git a/runner/runner.go b/runner/runner.go new file mode 100644 index 000000000..b088d3e50 --- /dev/null +++ b/runner/runner.go @@ -0,0 +1,1196 @@ +// Package runner implements a go/analysis runner. It makes heavy use +// of on-disk caching to reduce overall memory usage and to speed up +// repeat runs. +// +// Public API +// +// A Runner maps a list of analyzers and package patterns to a list of +// results. Results provide access to diagnostics, directives, errors +// encountered, and information about packages. Results explicitly do +// not contain ASTs or type information. All position information is +// returned in the form of token.Position, not token.Pos. All work +// that requires access to the loaded representation of a package has +// to occur inside analyzers. +// +// Planning and execution +// +// Analyzing packages is split into two phases: planning and +// execution. +// +// During planning, a directed acyclic graph of package dependencies +// is computed. We materialize the full graph so that we can execute +// the graph from the bottom up, without keeping unnecessary data in +// memory during a DFS and with simplified parallel execution. +// +// During execution, leaf nodes (nodes with no outstanding +// dependencies) get executed in parallel, bounded by a semaphore +// sized according to the number of CPUs. Conceptually, this happens +// in a loop, processing new leaf nodes as they appear, until no more +// nodes are left. In the actual implementation, nodes know their +// dependents, and the last dependency of a node to be processed is +// responsible for scheduling its dependent. +// +// The graph is rooted at a synthetic root node. Upon execution of the +// root node, the algorithm terminates. +// +// Analyzing a package repeats the same planning + execution steps, +// but this time on a graph of analyzers for the package. Parallel +// execution of individual analyzers is bounded by the same semaphore +// as executing packages. +// +// Parallelism +// +// Actions are executed in parallel where the dependency graph allows. +// Overall parallelism is bounded by a semaphore, sized according to +// runtime.NumCPU(). Each concurrently processed package takes up a +// token, as does each analyzer – but a package can always execute at +// least one analyzer, using the package's token. +// +// Depending on the overall shape of the graph, there may be NumCPU +// packages running a single analyzer each, a single package running +// NumCPU analyzers, or anything in between. +// +// Total memory consumption grows roughly linearly with the number of +// CPUs, while total execution time is inversely proportional to the +// number of CPUs. Overall, parallelism is affected by the shape of +// the dependency graph. A lot of inter-connected packages will see +// less parallelism than a lot of independent packages. +// +// Caching +// +// The runner caches facts, directives and diagnostics in a +// content-addressable cache that is designed after Go's own cache. +// Additionally, it makes use of Go's export data. +// +// This cache not only speeds up repeat runs, it also reduces peak +// memory usage. When we've analyzed a package, we cache the results +// and drop them from memory. When a dependent needs any of this +// information, or when analysis is complete and we wish to render the +// results, the data gets loaded from disk again. +// +// Data only exists in memory when it is immediately needed, not +// retained for possible future uses. This trades increased CPU usage +// for reduced memory usage. A single dependency may be loaded many +// times over, but it greatly reduces peak memory usage, as an +// arbitrary amount of time may pass between analyzing a dependency +// and its dependent, during which other packages will be processed. +package runner + +// OPT(dh): we could reduce disk storage usage of cached data by +// compressing it, either directly at the cache layer, or by feeding +// compressed data to the cache. Of course doing so may negatively +// affect CPU usage, and there are lower hanging fruit, such as +// needing to cache less data in the first place. + +// OPT(dh): right now, each package is analyzed completely +// independently. Each package loads all of its dependencies from +// export data and cached facts. If we have two packages A and B, +// which both depend on C, and which both get analyzed in parallel, +// then C will be loaded twice. This wastes CPU time and memory. It +// would be nice if we could reuse a single C for the analysis of both +// A and B. +// +// We can't reuse the actual types.Package or facts, because each +// package gets its own token.FileSet. Sharing a global FileSet has +// several drawbacks, including increased memory usage and running the +// risk of running out of FileSet address space. +// +// We could however avoid loading the same raw export data from disk +// twice, as well as deserializing gob data twice. One possible +// solution would be a duplicate-suppressing in-memory cache that +// caches data for a limited amount of time. When the same package +// needs to be loaded twice in close succession, we can reuse work, +// without holding unnecessary data in memory for an extended period +// of time. +// +// We would likely need to do extensive benchmarking to figure out how +// long to keep data around to find a sweetspot where we reduce CPU +// load without increasing memory usage. +// +// We can probably populate the cache after we've analyzed a package, +// on the assumption that it will have to be loaded again in the near +// future. + +import ( + "bytes" + "encoding/gob" + "fmt" + "go/token" + "go/types" + "io" + "os" + "reflect" + "runtime" + "sort" + "strings" + "sync/atomic" + "time" + + "honnef.co/go/tools/config" + "honnef.co/go/tools/facts" + "honnef.co/go/tools/internal/cache" + tsync "honnef.co/go/tools/internal/sync" + "honnef.co/go/tools/loader" + "honnef.co/go/tools/report" + "honnef.co/go/tools/unused" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/objectpath" +) + +type Diagnostic struct { + Position token.Position + End token.Position + Category string + Message string + + SuggestedFixed []SuggestedFix + Related []RelatedInformation +} + +// RelatedInformation provides additional context for a diagnostic. +type RelatedInformation struct { + Position token.Position + End token.Position + Message string +} + +type SuggestedFix struct { + Message string + TextEdits []TextEdit +} + +type TextEdit struct { + Position token.Position + End token.Position + NewText []byte +} + +// A Result describes the result of analyzing a single package. +// +// It holds references to cached diagnostics and directives. They can +// be loaded on demand with Diagnostics and Directives respectively. +type Result struct { + Package *loader.PackageSpec + Config config.Config + + Failed bool + Errors []error + // Action results, paths to files + diagnostics string + directives string + unused string +} + +// Diagnostics loads and returns the diagnostics found while analyzing +// the package. +func (r Result) Diagnostics() ([]Diagnostic, error) { + if r.Failed { + panic("Diagnostics called on failed Result") + } + if r.diagnostics == "" { + // this package was only a dependency + return nil, nil + } + var diags []Diagnostic + f, err := os.Open(r.diagnostics) + if err != nil { + return nil, fmt.Errorf("failed loading diagnostics: %w", err) + } + defer f.Close() + dec := gob.NewDecoder(f) + for { + var diag Diagnostic + err := dec.Decode(&diag) + if err != nil { + if err == io.EOF { + break + } + return nil, fmt.Errorf("failed loading diagnostics: %w", err) + } + diags = append(diags, diag) + } + return diags, nil +} + +// Directives loads and returns the directives found while analyzing +// the package. +func (r Result) Directives() ([]facts.SerializedDirective, error) { + if r.Failed { + panic("Directives called on failed Result") + } + if r.directives == "" { + // this package was only a dependency + return nil, nil + } + var dirs []facts.SerializedDirective + f, err := os.Open(r.directives) + if err != nil { + return nil, fmt.Errorf("failed loading directives: %w", err) + } + defer f.Close() + if err := gob.NewDecoder(f).Decode(&dirs); err != nil { + return nil, fmt.Errorf("failed loading directives: %w", err) + } + return dirs, nil +} + +func (r Result) Unused() (unused.SerializedResult, error) { + if r.Failed { + panic("Unused called on failed Result") + } + if r.unused == "" { + // this package was only a dependency + return unused.SerializedResult{}, nil + } + var res unused.SerializedResult + f, err := os.Open(r.unused) + if err != nil { + return unused.SerializedResult{}, fmt.Errorf("failed loading unused: %w", err) + } + defer f.Close() + if err := gob.NewDecoder(f).Decode(&res); err != nil { + return unused.SerializedResult{}, fmt.Errorf("failed loading unused: %w", err) + } + return res, nil +} + +type action interface { + Deps() []action + Triggers() []action + DecrementPending() bool + MarkFailed() + IsFailed() bool + AddError(error) +} + +type baseAction struct { + // Action description + + deps []action + triggers []action + pending uint32 + + // Action results + + // failed is set to true if the action couldn't be processed. This + // may either be due to an error specific to this action, in + // which case the errors field will be populated, or due to a + // dependency being marked as failed, in which case errors will be + // empty. + failed bool + errors []error +} + +func (act *baseAction) Deps() []action { return act.deps } +func (act *baseAction) Triggers() []action { return act.triggers } +func (act *baseAction) DecrementPending() bool { + return atomic.AddUint32(&act.pending, ^uint32(0)) == 0 +} +func (act *baseAction) MarkFailed() { act.failed = true } +func (act *baseAction) IsFailed() bool { return act.failed } +func (act *baseAction) AddError(err error) { act.errors = append(act.errors, err) } + +// packageAction describes the act of loading a package, fully +// analyzing it, and storing the results. +type packageAction struct { + baseAction + + // Action description + + Package *loader.PackageSpec + factsOnly bool + hash cache.ActionID + + // Action results + + cfg config.Config + vetx string + directives string + diagnostics string + unused string +} + +func (act *packageAction) String() string { + return fmt.Sprintf("packageAction(%s)", act.Package) +} + +type objectFactKey struct { + Obj types.Object + Type reflect.Type +} + +type packageFactKey struct { + Pkg *types.Package + Type reflect.Type +} + +type gobFact struct { + PkgPath string + ObjPath string + Fact analysis.Fact +} + +// analyzerAction describes the act of analyzing a package with a +// single analyzer. +type analyzerAction struct { + baseAction + + // Action description + + Analyzer *analysis.Analyzer + + // Action results + + // We can store actual results here without worrying about memory + // consumption because analyzer actions get garbage collected once + // a package has been fully analyzed. + Result interface{} + Diagnostics []analysis.Diagnostic + ObjectFacts map[objectFactKey]analysis.Fact + PackageFacts map[packageFactKey]analysis.Fact + Pass *analysis.Pass +} + +func (act *analyzerAction) String() string { + return fmt.Sprintf("analyzerAction(%s)", act.Analyzer) +} + +// A Runner executes analyzers on packages. +type Runner struct { + Stats Stats + GoVersion int + + // Config that gets merged with per-package configs + cfg config.Config + cache *cache.Cache + semaphore tsync.Semaphore +} + +type subrunner struct { + *Runner + analyzers []*analysis.Analyzer + analyzerNames string +} + +// New returns a new Runner. +func New(cfg config.Config) (*Runner, error) { + cache, err := cache.Default() + if err != nil { + return nil, err + } + + return &Runner{ + cfg: cfg, + cache: cache, + semaphore: tsync.NewSemaphore(runtime.NumCPU()), + }, nil +} + +func newSubrunner(r *Runner, analyzers []*analysis.Analyzer) *subrunner { + analyzerNames := make([]string, len(analyzers)) + for i, a := range analyzers { + analyzerNames[i] = a.Name + } + sort.Strings(analyzerNames) + return &subrunner{ + Runner: r, + analyzers: analyzers, + analyzerNames: strings.Join(analyzerNames, ","), + } +} + +func newPackageActionRoot(pkg *loader.PackageSpec, cache map[*loader.PackageSpec]*packageAction) *packageAction { + a := newPackageAction(pkg, cache) + a.factsOnly = false + return a +} + +func newPackageAction(pkg *loader.PackageSpec, cache map[*loader.PackageSpec]*packageAction) *packageAction { + if a, ok := cache[pkg]; ok { + return a + } + + a := &packageAction{ + Package: pkg, + factsOnly: true, // will be overwritten by any call to Action + } + cache[pkg] = a + + // OPT(dh): pre-allocate a.errors + if len(pkg.Errors) > 0 { + for _, err := range pkg.Errors { + a.errors = append(a.errors, err) + } + a.failed = true + + // We don't need to process our imports if this package is + // already broken. + return a + } + + // OPT(dh): pre-allocate a.deps + for _, dep := range pkg.Imports { + if dep.PkgPath == "unsafe" { + continue + } + depa := newPackageAction(dep, cache) + depa.triggers = append(depa.triggers, a) + a.deps = append(a.deps, depa) + + if depa.failed { + a.failed = true + } + } + // sort dependencies because the list of dependencies is part of + // the cache key + sort.Slice(a.deps, func(i, j int) bool { + return a.deps[i].(*packageAction).Package.ID < a.deps[j].(*packageAction).Package.ID + }) + + a.pending = uint32(len(a.deps)) + + return a +} + +func newAnalyzerAction(an *analysis.Analyzer, cache map[*analysis.Analyzer]*analyzerAction) *analyzerAction { + if a, ok := cache[an]; ok { + return a + } + + a := &analyzerAction{ + Analyzer: an, + ObjectFacts: map[objectFactKey]analysis.Fact{}, + PackageFacts: map[packageFactKey]analysis.Fact{}, + } + cache[an] = a + for _, dep := range an.Requires { + depa := newAnalyzerAction(dep, cache) + depa.triggers = append(depa.triggers, a) + a.deps = append(a.deps, depa) + } + a.pending = uint32(len(a.deps)) + return a +} + +func getCachedFiles(cache *cache.Cache, ids []cache.ActionID, out []*string) error { + for i, id := range ids { + var err error + *out[i], _, err = cache.GetFile(id) + if err != nil { + return err + } + } + return nil +} + +func (r *subrunner) do(act action) error { + a := act.(*packageAction) + defer func() { + r.Stats.finishPackage() + if !a.factsOnly { + r.Stats.finishInitialPackage() + } + }() + + // compute hash of action + a.cfg = a.Package.Config.Merge(r.cfg) + h := cache.NewHash("staticcheck " + a.Package.PkgPath) + + // Note that we do not filter the list of analyzers by the + // package's configuration. We don't allow configuration to + // accidentally break dependencies between analyzers, and it's + // easier to always run all checks and filter the output. This + // also makes cached data more reusable. + + // OPT(dh): not all changes in configuration invalidate cached + // data. specifically, when a.factsOnly == true, we only care + // about checks that produce facts, and settings that affect those + // checks. + + // Config used for constructing the hash; this config doesn't have + // Checks populated, because we always run all checks. + hashCfg := a.cfg + hashCfg.Checks = nil + // note that we don't hash staticcheck's version; it is set as the + // salt by a package main. + fmt.Fprintf(h, "cfg %#v\n", hashCfg) + fmt.Fprintf(h, "pkg %x\n", a.Package.Hash) + fmt.Fprintf(h, "analyzers %s\n", r.analyzerNames) + fmt.Fprintf(h, "go 1.%d\n", r.GoVersion) + + // OPT(dh): do we actually need to hash vetx? can we not assume + // that for identical inputs, staticcheck will produce identical + // vetx? + for _, dep := range a.deps { + dep := dep.(*packageAction) + vetxHash, err := cache.FileHash(dep.vetx) + if err != nil { + return fmt.Errorf("failed computing hash: %w", err) + } + fmt.Fprintf(h, "vetout %q %x\n", dep.Package.PkgPath, vetxHash) + } + a.hash = cache.ActionID(h.Sum()) + + // try to fetch hashed data + ids := make([]cache.ActionID, 0, 4) + ids = append(ids, cache.Subkey(a.hash, "vetx")) + if !a.factsOnly { + ids = append(ids, + cache.Subkey(a.hash, "directives"), + cache.Subkey(a.hash, "diagnostics"), + // OPT(dh): only load "unused" data if we're running the U1000 analyzer + cache.Subkey(a.hash, "unused"), + ) + } + if err := getCachedFiles(r.cache, ids, []*string{&a.vetx, &a.directives, &a.diagnostics, &a.unused}); err != nil { + result, err := r.doUncached(a) + if err != nil { + return err + } + if a.failed { + return nil + } + + // OPT(dh): doUncached returns facts in one format, only for + // us to immediately convert them to another format. + + // OPT(dh) instead of collecting all object facts and encoding + // them after analysis finishes, we could encode them as we + // go. however, that would require some locking. + gobFacts := &bytes.Buffer{} + enc := gob.NewEncoder(gobFacts) + for _, f := range result.objFacts { + objPath, err := objectpath.For(f.Object) + if err != nil { + continue + } + gf := gobFact{ + PkgPath: f.Object.Pkg().Path(), + ObjPath: string(objPath), + Fact: f.Fact, + } + if err := enc.Encode(gf); err != nil { + return fmt.Errorf("failed gob encoding data: %w", err) + } + } + for _, f := range result.pkgFacts { + gf := gobFact{ + PkgPath: f.Package.Path(), + Fact: f.Fact, + } + if err := enc.Encode(gf); err != nil { + return fmt.Errorf("failed gob encoding data: %w", err) + } + } + + // OPT(dh): We could sort gobFacts for more consistent output, + // but it doesn't matter. The hash of a package includes all + // of its files, so whether the vetx hash changes or not, a + // change to a package requires re-analyzing all dependents, + // even if the vetx data stayed the same. See also the note at + // the top of loader/hash.go. + + a.vetx, err = r.writeCache(a, "vetx", gobFacts.Bytes()) + if err != nil { + return err + } + + dirs := make([]facts.SerializedDirective, len(result.dirs)) + for i, dir := range result.dirs { + dirs[i] = facts.SerializeDirective(dir, result.lpkg.Fset) + } + a.directives, err = r.writeCacheGob(a, "directives", dirs) + if err != nil { + return err + } + + gobDiags := &bytes.Buffer{} + enc = gob.NewEncoder(gobDiags) + for _, diag := range result.diags { + d := Diagnostic{ + Position: report.DisplayPosition(result.lpkg.Fset, diag.Pos), + End: report.DisplayPosition(result.lpkg.Fset, diag.End), + Category: diag.Category, + Message: diag.Message, + } + for _, sugg := range diag.SuggestedFixes { + s := SuggestedFix{ + Message: sugg.Message, + } + for _, edit := range sugg.TextEdits { + s.TextEdits = append(s.TextEdits, TextEdit{ + Position: report.DisplayPosition(result.lpkg.Fset, edit.Pos), + End: report.DisplayPosition(result.lpkg.Fset, edit.End), + NewText: edit.NewText, + }) + } + d.SuggestedFixed = append(d.SuggestedFixed, s) + } + for _, rel := range diag.Related { + d.Related = append(d.Related, RelatedInformation{ + Position: report.DisplayPosition(result.lpkg.Fset, rel.Pos), + End: report.DisplayPosition(result.lpkg.Fset, rel.End), + Message: rel.Message, + }) + } + if err := enc.Encode(d); err != nil { + return fmt.Errorf("failed gob encoding data: %w", err) + } + } + a.diagnostics, err = r.writeCache(a, "diagnostics", gobDiags.Bytes()) + if err != nil { + return err + } + + a.unused, err = r.writeCacheGob(a, "unused", result.unused) + if err != nil { + return err + } + } + return nil +} + +// ActiveWorkers returns the number of currently running workers. +func (r *Runner) ActiveWorkers() int { + return r.semaphore.Len() +} + +// TotalWorkers returns the maximum number of possible workers. +func (r *Runner) TotalWorkers() int { + return r.semaphore.Cap() +} + +func (r *Runner) writeCache(a *packageAction, kind string, data []byte) (string, error) { + h := cache.Subkey(a.hash, kind) + if err := r.cache.PutBytes(h, data); err != nil { + return "", fmt.Errorf("failed caching data: %w", err) + } + // OPT(dh): change PutBytes signature so we get the file name right away, not requiring a call to GetFile + f, _, err := r.cache.GetFile(h) + if err != nil { + return "", fmt.Errorf("failed finding cache entry: %w", err) + } + return f, nil +} + +func (r *Runner) writeCacheGob(a *packageAction, kind string, data interface{}) (string, error) { + buf := bytes.NewBuffer(nil) + if err := gob.NewEncoder(buf).Encode(data); err != nil { + return "", fmt.Errorf("failed gob encoding data: %w", err) + } + return r.writeCache(a, kind, buf.Bytes()) +} + +type packageActionResult struct { + objFacts []analysis.ObjectFact + pkgFacts []analysis.PackageFact + diags []analysis.Diagnostic + unused unused.SerializedResult + dirs []facts.Directive + lpkg *loader.Package +} + +func (r *subrunner) doUncached(a *packageAction) (packageActionResult, error) { + // OPT(dh): for a -> b; c -> b; if both a and b are being + // processed concurrently, we shouldn't load b's export data + // twice. + + pkg, _, err := loader.Load(a.Package) + if err != nil { + return packageActionResult{}, err + } + + if len(pkg.Errors) > 0 { + // this handles errors that occured during type-checking the + // package in loader.Load + for _, err := range pkg.Errors { + a.errors = append(a.errors, err) + } + a.failed = true + return packageActionResult{}, nil + } + + // OPT(dh): instead of parsing directives twice (twice because + // U1000 depends on the facts.Directives analyzer), reuse the + // existing result + dirs := facts.ParseDirectives(pkg.Syntax, pkg.Fset) + res, err := r.runAnalyzers(a, pkg) + + return packageActionResult{ + objFacts: res.objFacts, + pkgFacts: res.pkgFacts, + diags: res.diagnostics, + unused: res.unused, + dirs: dirs, + lpkg: pkg, + }, err +} + +func pkgPaths(root *types.Package) map[string]*types.Package { + out := map[string]*types.Package{} + var dfs func(*types.Package) + dfs = func(pkg *types.Package) { + if _, ok := out[pkg.Path()]; ok { + return + } + out[pkg.Path()] = pkg + for _, imp := range pkg.Imports() { + dfs(imp) + } + } + dfs(root) + return out +} + +func (r *Runner) loadFacts(root *types.Package, dep *packageAction, objFacts map[objectFactKey]analysis.Fact, pkgFacts map[packageFactKey]analysis.Fact) error { + // Load facts of all imported packages + vetx, err := os.Open(dep.vetx) + if err != nil { + return fmt.Errorf("failed loading cached facts: %w", err) + } + defer vetx.Close() + + pathToPkg := pkgPaths(root) + dec := gob.NewDecoder(vetx) + for { + var gf gobFact + err := dec.Decode(&gf) + if err != nil { + if err == io.EOF { + break + } + return fmt.Errorf("failed loading cached facts: %w", err) + } + + pkg, ok := pathToPkg[gf.PkgPath] + if !ok { + continue + } + if gf.ObjPath == "" { + pkgFacts[packageFactKey{ + Pkg: pkg, + Type: reflect.TypeOf(gf.Fact), + }] = gf.Fact + } else { + obj, err := objectpath.Object(pkg, objectpath.Path(gf.ObjPath)) + if err != nil { + continue + } + objFacts[objectFactKey{ + Obj: obj, + Type: reflect.TypeOf(gf.Fact), + }] = gf.Fact + } + } + return nil +} + +func genericHandle(a action, root action, queue chan action, sem *tsync.Semaphore, exec func(a action) error) { + if a == root { + close(queue) + if sem != nil { + sem.Release() + } + return + } + if !a.IsFailed() { + // the action may have already been marked as failed during + // construction of the action graph, for example because of + // unresolved imports. + + for _, dep := range a.Deps() { + if dep.IsFailed() { + // One of our dependencies failed, so mark this package as + // failed and bail. We don't need to record an error for + // this package, the relevant error will have been + // reported by the first package in the chain that failed. + a.MarkFailed() + break + } + } + } + + if !a.IsFailed() { + if err := exec(a); err != nil { + a.MarkFailed() + a.AddError(err) + } + } + if sem != nil { + sem.Release() + } + + for _, t := range a.Triggers() { + if t.DecrementPending() { + queue <- t + } + } +} + +type analyzerRunner struct { + pkg *loader.Package + // object facts of our dependencies; may contain facts of + // analyzers other than the current one + depObjFacts map[objectFactKey]analysis.Fact + // package facts of our dependencies; may contain facts of + // analyzers other than the current one + depPkgFacts map[packageFactKey]analysis.Fact + factsOnly bool + + stats *Stats +} + +func (ar *analyzerRunner) do(act action) error { + a := act.(*analyzerAction) + results := map[*analysis.Analyzer]interface{}{} + // TODO(dh): does this have to be recursive? + for _, dep := range a.deps { + dep := dep.(*analyzerAction) + results[dep.Analyzer] = dep.Result + } + factTypes := map[reflect.Type]struct{}{} + for _, typ := range a.Analyzer.FactTypes { + factTypes[reflect.TypeOf(typ)] = struct{}{} + } + filterFactType := func(typ reflect.Type) bool { + _, ok := factTypes[typ] + return ok + } + a.Pass = &analysis.Pass{ + Analyzer: a.Analyzer, + Fset: ar.pkg.Fset, + Files: ar.pkg.Syntax, + OtherFiles: ar.pkg.OtherFiles, + Pkg: ar.pkg.Types, + TypesInfo: ar.pkg.TypesInfo, + TypesSizes: ar.pkg.TypesSizes, + Report: func(d analysis.Diagnostic) { + if !ar.factsOnly { + if d.Category == "" { + d.Category = a.Analyzer.Name + } + a.Diagnostics = append(a.Diagnostics, d) + } + }, + ResultOf: results, + ImportObjectFact: func(obj types.Object, fact analysis.Fact) bool { + key := objectFactKey{ + Obj: obj, + Type: reflect.TypeOf(fact), + } + if f, ok := ar.depObjFacts[key]; ok { + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + return true + } else if f, ok := a.ObjectFacts[key]; ok { + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + return true + } + return false + }, + ImportPackageFact: func(pkg *types.Package, fact analysis.Fact) bool { + key := packageFactKey{ + Pkg: pkg, + Type: reflect.TypeOf(fact), + } + if f, ok := ar.depPkgFacts[key]; ok { + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + return true + } else if f, ok := a.PackageFacts[key]; ok { + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + return true + } + return false + }, + ExportObjectFact: func(obj types.Object, fact analysis.Fact) { + key := objectFactKey{ + Obj: obj, + Type: reflect.TypeOf(fact), + } + a.ObjectFacts[key] = fact + }, + ExportPackageFact: func(fact analysis.Fact) { + key := packageFactKey{ + Pkg: ar.pkg.Types, + Type: reflect.TypeOf(fact), + } + a.PackageFacts[key] = fact + }, + AllPackageFacts: func() []analysis.PackageFact { + out := make([]analysis.PackageFact, 0, len(ar.depPkgFacts)+len(a.PackageFacts)) + for key, fact := range ar.depPkgFacts { + out = append(out, analysis.PackageFact{ + Package: key.Pkg, + Fact: fact, + }) + } + for key, fact := range a.PackageFacts { + out = append(out, analysis.PackageFact{ + Package: key.Pkg, + Fact: fact, + }) + } + return out + }, + AllObjectFacts: func() []analysis.ObjectFact { + out := make([]analysis.ObjectFact, 0, len(ar.depObjFacts)+len(a.ObjectFacts)) + for key, fact := range ar.depObjFacts { + if filterFactType(key.Type) { + out = append(out, analysis.ObjectFact{ + Object: key.Obj, + Fact: fact, + }) + } + } + for key, fact := range a.ObjectFacts { + if filterFactType(key.Type) { + out = append(out, analysis.ObjectFact{ + Object: key.Obj, + Fact: fact, + }) + } + } + return out + }, + } + + t := time.Now() + res, err := a.Analyzer.Run(a.Pass) + ar.stats.measureAnalyzer(a.Analyzer, ar.pkg.PackageSpec, time.Since(t)) + if err != nil { + return err + } + a.Result = res + return nil +} + +type analysisResult struct { + objFacts []analysis.ObjectFact + pkgFacts []analysis.PackageFact + diagnostics []analysis.Diagnostic + unused unused.SerializedResult +} + +func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (analysisResult, error) { + depObjFacts := map[objectFactKey]analysis.Fact{} + depPkgFacts := map[packageFactKey]analysis.Fact{} + + for _, dep := range pkgAct.deps { + if err := r.loadFacts(pkg.Types, dep.(*packageAction), depObjFacts, depPkgFacts); err != nil { + return analysisResult{}, err + } + } + + // OPT(dh): this computation is the same for all packages + // (depending on act.factsOnly), we should cache it in the runner. + analyzerActionCache := map[*analysis.Analyzer]*analyzerAction{} + root := &analyzerAction{} + for _, a := range r.analyzers { + // When analyzing non-initial packages, we only care about + // analyzers that produce facts. + if !pkgAct.factsOnly || len(a.FactTypes) > 0 { + a := newAnalyzerAction(a, analyzerActionCache) + root.deps = append(root.deps, a) + a.triggers = append(a.triggers, root) + } + } + root.pending = uint32(len(root.deps)) + all := actionList(root) + + ar := &analyzerRunner{ + pkg: pkg, + factsOnly: pkgAct.factsOnly, + depObjFacts: depObjFacts, + depPkgFacts: depPkgFacts, + stats: &r.Stats, + } + queue := make(chan action, len(all)) + for _, a := range all { + if len(a.Deps()) == 0 { + queue <- a + } + } + + for item := range queue { + b := r.semaphore.AcquireMaybe() + if b { + go genericHandle(item, root, queue, &r.semaphore, ar.do) + } else { + // the semaphore is exhausted; run the analysis under the + // token we've acquired for analyzing the package. + genericHandle(item, root, queue, nil, ar.do) + } + } + + var unusedResult unused.SerializedResult + for _, a := range all { + a := a.(*analyzerAction) + + if a != root && a.Analyzer.Name == "U1000" { + // TODO(dh): figure out a clean abstraction, instead of + // special-casing U1000. + unusedResult = unused.Serialize(a.Pass, a.Result.(unused.Result), pkg.Fset) + } + + for key, fact := range a.ObjectFacts { + depObjFacts[key] = fact + } + for key, fact := range a.PackageFacts { + depPkgFacts[key] = fact + } + } + + // OPT(dh): cull objects not reachable via the exported closure + objFacts := make([]analysis.ObjectFact, 0, len(depObjFacts)) + pkgFacts := make([]analysis.PackageFact, 0, len(depPkgFacts)) + for key, fact := range depObjFacts { + objFacts = append(objFacts, analysis.ObjectFact{Object: key.Obj, Fact: fact}) + } + for key, fact := range depPkgFacts { + pkgFacts = append(pkgFacts, analysis.PackageFact{Package: key.Pkg, Fact: fact}) + } + + var diags []analysis.Diagnostic + for _, a := range root.deps { + a := a.(*analyzerAction) + diags = append(diags, a.Diagnostics...) + } + return analysisResult{ + objFacts: objFacts, + pkgFacts: pkgFacts, + diagnostics: diags, + unused: unusedResult, + }, nil +} + +func actionList(root action) []action { + seen := map[action]struct{}{} + all := []action{} + var walk func(action) + walk = func(a action) { + if _, ok := seen[a]; ok { + return + } + seen[a] = struct{}{} + for _, a1 := range a.Deps() { + walk(a1) + } + all = append(all, a) + } + walk(root) + return all +} + +func registerGobTypes(analyzers []*analysis.Analyzer) { + for _, a := range analyzers { + for _, typ := range a.FactTypes { + // FIXME(dh): use RegisterName so we can work around collisions + // in names. For pointer-types, gob incorrectly qualifies + // type names with the package name, not the import path. + gob.Register(typ) + } + } +} + +func allAnalyzers(analyzers []*analysis.Analyzer) []*analysis.Analyzer { + seen := map[*analysis.Analyzer]struct{}{} + out := make([]*analysis.Analyzer, 0, len(analyzers)) + var dfs func(*analysis.Analyzer) + dfs = func(a *analysis.Analyzer) { + if _, ok := seen[a]; ok { + return + } + seen[a] = struct{}{} + out = append(out, a) + for _, dep := range a.Requires { + dfs(dep) + } + } + for _, a := range analyzers { + dfs(a) + } + return out +} + +// Run loads the packages specified by patterns, runs analyzers on +// them and returns the results. Each result corresponds to a single +// package. Results will be returned for all packages, including +// dependencies. Errors specific to packages will be reported in the +// respective results. +// +// If cfg is nil, a default config will be used. Otherwise, cfg will +// be used, with the exception of the Mode field. +// +// Run can be called multiple times on the same Runner and it is safe +// for concurrent use. All runs will share the same semaphore. +func (r *Runner) Run(cfg *packages.Config, analyzers []*analysis.Analyzer, patterns []string) ([]Result, error) { + analyzers = allAnalyzers(analyzers) + registerGobTypes(analyzers) + + for _, a := range analyzers { + flag := a.Flags.Lookup("go") + if flag == nil { + continue + } + // OPT(dh): this is terrible + flag.Value.Set(fmt.Sprintf("1.%d", r.GoVersion)) + } + + r.Stats.setState(StateLoadPackageGraph) + lpkgs, err := loader.Graph(cfg, patterns...) + if err != nil { + return nil, err + } + r.Stats.setInitialPackages(len(lpkgs)) + + r.Stats.setState(StateBuildActionGraph) + packageActionCache := map[*loader.PackageSpec]*packageAction{} + root := &packageAction{} + for _, lpkg := range lpkgs { + a := newPackageActionRoot(lpkg, packageActionCache) + root.deps = append(root.deps, a) + a.triggers = append(a.triggers, root) + } + root.pending = uint32(len(root.deps)) + + all := actionList(root) + queue := make(chan action) + r.Stats.setTotalPackages(len(all) - 1) + + r.Stats.setState(StateProcessing) + go func() { + for _, a := range all { + if len(a.Deps()) == 0 { + queue <- a + } + } + }() + + sr := newSubrunner(r, analyzers) + for item := range queue { + r.semaphore.Acquire() + go genericHandle(item, root, queue, &r.semaphore, func(act action) error { + return sr.do(act) + }) + } + + r.Stats.setState(StateFinalizing) + out := make([]Result, len(all)-1) + for i, item := range all { + item := item.(*packageAction) + if item.Package == nil { + continue + } + out[i] = Result{ + Package: item.Package, + Config: item.cfg, + Failed: item.failed, + Errors: item.errors, + diagnostics: item.diagnostics, + directives: item.directives, + unused: item.unused, + } + } + return out, nil +} diff --git a/runner/stats.go b/runner/stats.go new file mode 100644 index 000000000..ce3d589df --- /dev/null +++ b/runner/stats.go @@ -0,0 +1,48 @@ +package runner + +import ( + "sync/atomic" + "time" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/loader" +) + +const ( + StateInitializing = iota + StateLoadPackageGraph + StateBuildActionGraph + StateProcessing + StateFinalizing +) + +type Stats struct { + state uint32 + initialPackages uint32 + totalPackages uint32 + processedPackages uint32 + processedInitialPackages uint32 + + // optional function to call every time an analyzer has finished analyzing a package. + PrintAnalyzerMeasurement func(*analysis.Analyzer, *loader.PackageSpec, time.Duration) +} + +func (s *Stats) setState(state uint32) { atomic.StoreUint32(&s.state, state) } +func (s *Stats) State() int { return int(atomic.LoadUint32(&s.state)) } +func (s *Stats) setInitialPackages(n int) { atomic.StoreUint32(&s.initialPackages, uint32(n)) } +func (s *Stats) InitialPackages() int { return int(atomic.LoadUint32(&s.initialPackages)) } +func (s *Stats) setTotalPackages(n int) { atomic.StoreUint32(&s.totalPackages, uint32(n)) } +func (s *Stats) TotalPackages() int { return int(atomic.LoadUint32(&s.totalPackages)) } + +func (s *Stats) finishPackage() { atomic.AddUint32(&s.processedPackages, 1) } +func (s *Stats) finishInitialPackage() { atomic.AddUint32(&s.processedInitialPackages, 1) } +func (s *Stats) ProcessedPackages() int { return int(atomic.LoadUint32(&s.processedPackages)) } +func (s *Stats) ProcessedInitialPackages() int { + return int(atomic.LoadUint32(&s.processedInitialPackages)) +} + +func (s *Stats) measureAnalyzer(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) { + if s.PrintAnalyzerMeasurement != nil { + s.PrintAnalyzerMeasurement(analysis, pkg, d) + } +} diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 7d7c5021d..1e9f5fcb3 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -28,7 +28,6 @@ import ( "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/ir" "honnef.co/go/tools/ir/irutil" - "honnef.co/go/tools/lint" . "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/pattern" "honnef.co/go/tools/printf" @@ -2396,7 +2395,7 @@ func CheckCyclicFinalizer(pass *analysis.Pass) (interface{}, error) { } for _, b := range mc.Bindings { if b == v { - pos := lint.DisplayPosition(pass.Fset, mc.Fn.Pos()) + pos := report.DisplayPosition(pass.Fset, mc.Fn.Pos()) report.Report(pass, site, fmt.Sprintf("the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos)) } } @@ -2957,7 +2956,7 @@ func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, e return } - r, ok := rules[lint.FuncName(obj)] + r, ok := rules[code.FuncName(obj)] if !ok { return } diff --git a/stylecheck/analysis.go b/stylecheck/analysis.go index 0f93f4436..7d8e6f3e0 100644 --- a/stylecheck/analysis.go +++ b/stylecheck/analysis.go @@ -1,12 +1,13 @@ package stylecheck import ( - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" "honnef.co/go/tools/config" "honnef.co/go/tools/facts" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/lint/lintutil" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" ) var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ @@ -38,8 +39,7 @@ var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer Requires: []*analysis.Analyzer{inspect.Analyzer}, }, "ST1012": { - Run: CheckErrorVarNames, - Requires: []*analysis.Analyzer{config.Analyzer}, + Run: CheckErrorVarNames, }, "ST1013": { Run: CheckHTTPStatusCodes, @@ -64,7 +64,7 @@ var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer }, "ST1019": { Run: CheckDuplicatedImports, - Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer}, + Requires: []*analysis.Analyzer{facts.Generated}, }, "ST1020": { Run: CheckExportedFunctionDocs, diff --git a/unused/edge.go b/unused/edge.go index 02e0d09cf..5fcf1465a 100644 --- a/unused/edge.go +++ b/unused/edge.go @@ -51,4 +51,5 @@ const ( edgeUnsafeConversion edgeUsedConstant edgeVarDecl + edgeIgnored ) diff --git a/unused/implements.go b/unused/implements.go index 835baac69..f62018572 100644 --- a/unused/implements.go +++ b/unused/implements.go @@ -37,7 +37,7 @@ func sameId(obj types.Object, pkg *types.Package, name string) bool { return pkg.Path() == obj.Pkg().Path() } -func (g *Graph) implements(V types.Type, T *types.Interface, msV *types.MethodSet) ([]*types.Selection, bool) { +func (g *graph) implements(V types.Type, T *types.Interface, msV *types.MethodSet) ([]*types.Selection, bool) { // fast path for common case if T.Empty() { return nil, true diff --git a/unused/testdata/src/alias/alias.go b/unused/testdata/src/alias/alias.go index 911501e59..5d05524db 100644 --- a/unused/testdata/src/alias/alias.go +++ b/unused/testdata/src/alias/alias.go @@ -1,15 +1,15 @@ package main -type t1 struct{} -type t2 struct{} // want `t2` -type t3 struct{} +type t1 struct{} // used +type t2 struct{} // unused +type t3 struct{} // used -type alias1 = t1 -type alias2 = t2 // want `alias2` -type alias3 = t3 -type alias4 = int +type alias1 = t1 // used +type alias2 = t2 // unused +type alias3 = t3 // used +type alias4 = int // used -func main() { +func main() { // used var _ alias1 var _ t3 } diff --git a/unused/testdata/src/anonymous/anonymous.go b/unused/testdata/src/anonymous/anonymous.go index d0577a737..70ac16184 100644 --- a/unused/testdata/src/anonymous/anonymous.go +++ b/unused/testdata/src/anonymous/anonymous.go @@ -2,17 +2,17 @@ package pkg import "fmt" -type Node interface { - position() int +type Node interface { // used + position() int // used } -type noder struct{} +type noder struct{} // used -func (noder) position() int { panic("unreachable") } +func (noder) position() int { panic("unreachable") } // used -func Fn() { +func Fn() { // used nodes := []Node{struct { - noder + noder // used }{}} fmt.Println(nodes) } diff --git a/unused/testdata/src/blank/blank.go b/unused/testdata/src/blank/blank.go index ee707b626..fe61bd2f7 100644 --- a/unused/testdata/src/blank/blank.go +++ b/unused/testdata/src/blank/blank.go @@ -2,28 +2,28 @@ package pkg import _ "fmt" -type t1 struct{} // want `t1` -type t2 struct { - _ int +type t1 struct{} // unused +type t2 struct { // used + _ int // used } -type t3 struct{} -type t4 struct{} -type t5 struct{} +type t3 struct{} // used +type t4 struct{} // used +type t5 struct{} // used var _ = t2{} -func fn1() { // want `fn1` +func fn1() { // unused _ = t1{} var _ = t1{} } -func fn2() { +func fn2() { // used _ = t3{} var _ t4 var _ *t5 = nil } -func init() { +func init() { // used fn2() } diff --git a/unused/testdata/src/cgo/cgo.go b/unused/testdata/src/cgo/cgo.go index 4b852d173..8506e9e05 100644 --- a/unused/testdata/src/cgo/cgo.go +++ b/unused/testdata/src/cgo/cgo.go @@ -1,6 +1,6 @@ package pkg //go:cgo_export_dynamic -func foo() {} +func foo() {} // used -func bar() {} // want `bar` +func bar() {} // unused diff --git a/unused/testdata/src/consts/consts.go b/unused/testdata/src/consts/consts.go index 1cab7ddde..7d3636e90 100644 --- a/unused/testdata/src/consts/consts.go +++ b/unused/testdata/src/consts/consts.go @@ -1,35 +1,35 @@ package pkg -const c1 = 1 +const c1 = 1 // used -const c2 = 1 -const c3 = 1 -const c4 = 1 -const C5 = 1 +const c2 = 1 // used +const c3 = 1 // used +const c4 = 1 // used +const C5 = 1 // used const ( - c6 = 0 - c7 - c8 + c6 = 0 // used + c7 // used + c8 // used - c9 // want `c9` - c10 // want `c10` - c11 // want `c11` + c9 // unused + c10 // unused + c11 // unused ) var _ = []int{c3: 1} -type T1 struct { - F1 [c1]int +type T1 struct { // used + F1 [c1]int // used } -func init() { +func init() { // used _ = []int{c2: 1} var _ [c4]int _ = c7 } -func Fn() { - const X = 1 // want `X` +func Fn() { // used + const X = 1 // unused } diff --git a/unused/testdata/src/conversion/conversion.go b/unused/testdata/src/conversion/conversion.go index 0821c67da..c487a93fa 100644 --- a/unused/testdata/src/conversion/conversion.go +++ b/unused/testdata/src/conversion/conversion.go @@ -5,57 +5,57 @@ import ( "unsafe" ) -type t1 struct { - a int - b int +type t1 struct { // used + a int // used + b int // used } -type t2 struct { - a int - b int +type t2 struct { // used + a int // used + b int // used } -type t3 struct { - a int - b int // want `b` +type t3 struct { // used + a int // used + b int // unused } -type t4 struct { - a int - b int // want `b` +type t4 struct { // used + a int // used + b int // unused } -type t5 struct { - a int - b int +type t5 struct { // used + a int // used + b int // used } -type t6 struct { - a int - b int +type t6 struct { // used + a int // used + b int // used } -type t7 struct { - a int - b int +type t7 struct { // used + a int // used + b int // used } -type t8 struct { - a int - b int +type t8 struct { // used + a int // used + b int // used } -type t9 struct { - Offset int64 - Err error +type t9 struct { // used + Offset int64 // used + Err error // used } -type t10 struct { - a int - b int +type t10 struct { // used + a int // used + b int // used } -func fn() { +func fn() { // used // All fields in t2 used because they're initialised in t1 v1 := t1{0, 1} v2 := t2(v1) @@ -89,4 +89,4 @@ func fn() { _ = v10 } -func init() { fn() } +func init() { fn() } // used diff --git a/unused/testdata/src/cyclic/cyclic.go b/unused/testdata/src/cyclic/cyclic.go index b9dfc952d..0e259575a 100644 --- a/unused/testdata/src/cyclic/cyclic.go +++ b/unused/testdata/src/cyclic/cyclic.go @@ -1,9 +1,9 @@ package pkg -func a() { // want `a` +func a() { // unused b() } -func b() { // want `b` +func b() { // unused a() } diff --git a/unused/testdata/src/defer/defer.go b/unused/testdata/src/defer/defer.go index a6cfdee73..e24592bbe 100644 --- a/unused/testdata/src/defer/defer.go +++ b/unused/testdata/src/defer/defer.go @@ -1,13 +1,13 @@ package pkg -type t struct{} +type t struct{} // used -func (t) fn1() {} -func (t) fn2() {} -func fn1() {} -func fn2() {} +func (t) fn1() {} // used +func (t) fn2() {} // used +func fn1() {} // used +func fn2() {} // used -func Fn() { +func Fn() { // used var v t defer fn1() defer v.fn1() diff --git a/unused/testdata/src/elem/elem.go b/unused/testdata/src/elem/elem.go index 24cbf03cc..e6e04830e 100644 --- a/unused/testdata/src/elem/elem.go +++ b/unused/testdata/src/elem/elem.go @@ -2,15 +2,17 @@ package pkg -type t15 struct{ f151 int } -type a2 [1]t15 +type t15 struct { // used + f151 int // used +} +type a2 [1]t15 // used -type t16 struct{} -type a3 [1][1]t16 +type t16 struct{} // used +type a3 [1][1]t16 // used -func foo() { +func foo() { // used _ = a2{0: {1}} _ = a3{{{}}} } -func init() { foo() } +func init() { foo() } // used diff --git a/unused/testdata/src/embedded_call/embedded_call.go b/unused/testdata/src/embedded_call/embedded_call.go index 196ac0dec..a66cba503 100644 --- a/unused/testdata/src/embedded_call/embedded_call.go +++ b/unused/testdata/src/embedded_call/embedded_call.go @@ -1,20 +1,22 @@ package pkg -var t1 struct { - t2 - t3 - t4 +var t1 struct { // used + t2 // used + t3 // used + t4 // used } -type t2 struct{} -type t3 struct{} -type t4 struct{ t5 } -type t5 struct{} +type t2 struct{} // used +type t3 struct{} // used +type t4 struct { // used + t5 // used +} +type t5 struct{} // used -func (t2) foo() {} -func (t3) bar() {} -func (t5) baz() {} -func init() { +func (t2) foo() {} // used +func (t3) bar() {} // used +func (t5) baz() {} // used +func init() { // used t1.foo() _ = t1.bar t1.baz() diff --git a/unused/testdata/src/embedding/embedding.go b/unused/testdata/src/embedding/embedding.go index b907e2918..535eac28d 100644 --- a/unused/testdata/src/embedding/embedding.go +++ b/unused/testdata/src/embedding/embedding.go @@ -1,77 +1,87 @@ package pkg -type I interface { - f1() - f2() +type I interface { // used + f1() // used + f2() // used } -func init() { +func init() { // used var _ I } -type t1 struct{} -type T2 struct{ t1 } +type t1 struct{} // used +type T2 struct { // used + t1 // used +} -func (t1) f1() {} -func (T2) f2() {} +func (t1) f1() {} // used +func (T2) f2() {} // used -func Fn() { +func Fn() { // used var v T2 _ = v.t1 } -type I2 interface { - f3() - f4() +type I2 interface { // used + f3() // used + f4() // used } -type t3 struct{} -type t4 struct { - x int // want `x` - y int // want `y` - t3 +type t3 struct{} // used +type t4 struct { // used + x int // unused + y int // unused + t3 // used } -func (*t3) f3() {} -func (*t4) f4() {} +func (*t3) f3() {} // used +func (*t4) f4() {} // used -func init() { +func init() { // used var i I2 = &t4{} i.f3() i.f4() } -type i3 interface { - F() +type i3 interface { // used + F() // used } -type I4 interface { +type I4 interface { // used i3 } -type T5 struct { - t6 +type T5 struct { // used + t6 // used } -type t6 struct { - F int +type t6 struct { // used + F int // used } -type t7 struct{ X int } -type t8 struct{ t7 } -type t9 struct{ t8 } +type t7 struct { // used + X int // used +} +type t8 struct { // used + t7 // used +} +type t9 struct { // used + t8 // used +} var _ = t9{} -type t10 struct{} +type t10 struct{} // used -func (*t10) Foo() {} +func (*t10) Foo() {} // used -type t11 struct{ t10 } +type t11 struct { // used + t10 // used +} var _ = t11{} -type i5 interface{} -type I6 interface { +type i5 interface{} // used +type I6 interface { // used i5 } diff --git a/unused/testdata/src/embedding2/embedding2.go b/unused/testdata/src/embedding2/embedding2.go index 7efb67429..ace443bbd 100644 --- a/unused/testdata/src/embedding2/embedding2.go +++ b/unused/testdata/src/embedding2/embedding2.go @@ -1,28 +1,28 @@ package main -type AA interface { - A() +type AA interface { // used + A() // used } -type BB interface { +type BB interface { // used AA } -type CC interface { +type CC interface { // used BB - C() + C() // used } -func c(cc CC) { +func c(cc CC) { // used cc.A() } -type z struct{} +type z struct{} // used -func (z) A() {} -func (z) B() {} -func (z) C() {} +func (z) A() {} // used +func (z) B() {} // used +func (z) C() {} // used -func main() { +func main() { // used c(z{}) } diff --git a/unused/testdata/src/exported_fields/exported_fields.go b/unused/testdata/src/exported_fields/exported_fields.go index 2c909a8c4..1c102e174 100644 --- a/unused/testdata/src/exported_fields/exported_fields.go +++ b/unused/testdata/src/exported_fields/exported_fields.go @@ -1,36 +1,36 @@ package pkg -type t1 struct { - F1 int +type t1 struct { // used + F1 int // used } -type T2 struct { - F2 int +type T2 struct { // used + F2 int // used } -var v struct { - T3 +var v struct { // used + T3 // used } -type T3 struct{} +type T3 struct{} // used -func (T3) Foo() {} +func (T3) Foo() {} // used -func init() { +func init() { // used v.Foo() } -func init() { +func init() { // used _ = t1{} } -type codeResponse struct { - Tree *codeNode `json:"tree"` +type codeResponse struct { // used + Tree *codeNode `json:"tree"` // used } -type codeNode struct { +type codeNode struct { // used } -func init() { +func init() { // used _ = codeResponse{} } diff --git a/unused/testdata/src/exported_fields_main/exported_fields_main.go b/unused/testdata/src/exported_fields_main/exported_fields_main.go index ffb99d990..0a5f85757 100644 --- a/unused/testdata/src/exported_fields_main/exported_fields_main.go +++ b/unused/testdata/src/exported_fields_main/exported_fields_main.go @@ -1,14 +1,14 @@ package main -type t1 struct { - F1 int +type t1 struct { // used + F1 int // used } -type T2 struct { - F2 int +type T2 struct { // used + F2 int // used } -func init() { +func init() { // used _ = t1{} _ = T2{} } diff --git a/unused/testdata/src/exported_method_test/exported_method_test.go b/unused/testdata/src/exported_method_test/exported_method_test.go index 346056d68..698383a69 100644 --- a/unused/testdata/src/exported_method_test/exported_method_test.go +++ b/unused/testdata/src/exported_method_test/exported_method_test.go @@ -7,18 +7,18 @@ import ( "testing" ) -type countReadSeeker struct { - io.ReadSeeker - N int64 +type countReadSeeker struct { // used_test + io.ReadSeeker // used_test + N int64 // used_test } -func (rs *countReadSeeker) Read(buf []byte) (int, error) { +func (rs *countReadSeeker) Read(buf []byte) (int, error) { // used_test n, err := rs.ReadSeeker.Read(buf) rs.N += int64(n) return n, err } -func TestFoo(t *testing.T) { +func TestFoo(t *testing.T) { // used_test r := bytes.NewReader([]byte("Hello, world!")) cr := &countReadSeeker{ReadSeeker: r} ioutil.ReadAll(cr) @@ -27,12 +27,12 @@ func TestFoo(t *testing.T) { } } -var sink int +var sink int // used_test -func BenchmarkFoo(b *testing.B) { +func BenchmarkFoo(b *testing.B) { // used_test for i := 0; i < b.N; i++ { sink = fn() } } -func fn() int { return 0 } +func fn() int { return 0 } // used_test diff --git a/unused/testdata/src/fields/fields.go b/unused/testdata/src/fields/fields.go index 8b07c8152..41c9080f7 100644 --- a/unused/testdata/src/fields/fields.go +++ b/unused/testdata/src/fields/fields.go @@ -2,44 +2,98 @@ package pkg -type t1 struct{ f11, f12 int } -type t2 struct{ f21, f22 int } -type t3 struct{ f31 t4 } -type t4 struct{ f41 int } -type t5 struct{ f51 int } -type t6 struct{ f61 int } -type t7 struct{ f71 int } -type m1 map[string]t7 -type t8 struct{ f81 int } -type t9 struct{ f91 int } -type t10 struct{ f101 int } -type t11 struct{ f111 int } -type s1 []t11 -type t12 struct{ f121 int } -type s2 []t12 -type t13 struct{ f131 int } -type t14 struct{ f141 int } -type a1 [1]t14 -type t15 struct{ f151 int } -type a2 [1]t15 -type t16 struct{ f161 int } -type t17 struct{ f171, f172 int } // want `t17` -type t18 struct{ f181, f182, f183 int } // want `f182` `f183` +type t1 struct { // used + f11 int // used + f12 int // used +} +type t2 struct { // used + f21 int // used + f22 int // used +} +type t3 struct { // used + f31 t4 // used +} +type t4 struct { // used + f41 int // used +} +type t5 struct { // used + f51 int // used +} +type t6 struct { // used + f61 int // used +} +type t7 struct { // used + f71 int // used +} +type m1 map[string]t7 // used +type t8 struct { // used + f81 int // used +} +type t9 struct { // used + f91 int // used +} +type t10 struct { // used + f101 int // used +} +type t11 struct { // used + f111 int // used +} +type s1 []t11 // used +type t12 struct { // used + f121 int // used +} +type s2 []t12 // used +type t13 struct { // used + f131 int // used +} +type t14 struct { // used + f141 int // used +} +type a1 [1]t14 // used +type t15 struct { // used + f151 int // used +} +type a2 [1]t15 // used +type t16 struct { // used + f161 int // used +} +type t17 struct { // unused + f171 int + f172 int +} +type t18 struct { // used + f181 int // used + f182 int // unused + f183 int // unused +} -type t19 struct{ f191 int } -type m2 map[string]t19 +type t19 struct { // used + f191 int // used +} +type m2 map[string]t19 // used -type t20 struct{ f201 int } -type m3 map[string]t20 +type t20 struct { // used + f201 int // used +} +type m3 map[string]t20 // used -type t21 struct{ f211, f212 int } // want `f211` +type t21 struct { // used + f211 int // unused + f212 int // used +} +type t22 struct { // unused + f221 int + f222 int +} -func foo() { +func foo() { // used _ = t10{1} _ = t21{f212: 1} _ = []t1{{1, 2}} _ = t2{1, 2} - _ = []struct{ a int }{{1}} + _ = []struct { + a int // used + }{{1}} // XXX // _ = []struct{ foo struct{ bar int } }{{struct{ bar int }{1}}} @@ -57,11 +111,19 @@ func foo() { _ = a1{{1}} _ = a2{0: {1}} _ = map[[1]t16]int{{{1}}: 1} - y := struct{ x int }{} + y := struct { + x int // used + }{} _ = y _ = t18{f181: 1} _ = []m2{{"a": {1}}} _ = [][]m3{{{"a": {1}}}} } -func init() { foo() } +func init() { foo() } // used + +func superUnused() { // unused + var _ struct { + x int + } +} diff --git a/unused/testdata/src/functions/functions.go b/unused/testdata/src/functions/functions.go index cb74a895f..56f6c15da 100644 --- a/unused/testdata/src/functions/functions.go +++ b/unused/testdata/src/functions/functions.go @@ -1,35 +1,35 @@ package main -type state func() state +type state func() state // used -func a() state { +func a() state { // used return a } -func main() { +func main() { // used st := a _ = st() } -type t1 struct{} // want `t1` -type t2 struct{} -type t3 struct{} +type t1 struct{} // unused +type t2 struct{} // used +type t3 struct{} // used -func fn1() t1 { return t1{} } // want `fn1` -func fn2() (x t2) { return } -func fn3() *t3 { return nil } +func fn1() t1 { return t1{} } // unused +func fn2() (x t2) { return } // used +func fn3() *t3 { return nil } // used -func fn4() { - const x = 1 - const y = 2 // want `y` - type foo int // want `foo` - type bar int +func fn4() { // used + const x = 1 // used + const y = 2 // unused + type foo int // unused + type bar int // used _ = x _ = bar(0) } -func init() { +func init() { // used fn2() fn3() fn4() diff --git a/unused/testdata/src/ignored/ignored.go b/unused/testdata/src/ignored/ignored.go new file mode 100644 index 000000000..b64c8d64c --- /dev/null +++ b/unused/testdata/src/ignored/ignored.go @@ -0,0 +1,56 @@ +package pkg + +//lint:ignore U1000 consider yourself used +type t1 struct{} // used +type t2 struct{} // used +type t3 struct{} // used + +func (t1) fn1() {} // used +func (t1) fn2() {} // used +func (t1) fn3() {} // used + +//lint:ignore U1000 be gone +func (t2) fn1() {} // used +func (t2) fn2() {} // unused +func (t2) fn3() {} // unused + +func (t3) fn1() {} // unused +func (t3) fn2() {} // unused +func (t3) fn3() {} // unused + +//lint:ignore U1000 consider yourself used +func fn() { // used + var _ t2 + var _ t3 +} + +//lint:ignore U1000 bye +type t4 struct { // used + x int // used +} + +func (t4) bar() {} // used + +//lint:ignore U1000 consider yourself used +type t5 map[int]struct { // used + y int // used +} + +//lint:ignore U1000 consider yourself used +type t6 interface { // used + foo() // used +} + +//lint:ignore U1000 cpnsider yourself used +type t7 = struct { // used + z int // used +} + +//lint:ignore U1000 consider yourself used +type t8 struct{} // used + +func (t8) fn() { // used + otherFn() +} + +func otherFn() {} // used diff --git a/unused/testdata/src/interfaces/interfaces.go b/unused/testdata/src/interfaces/interfaces.go index 59b1be73e..4301a89bc 100644 --- a/unused/testdata/src/interfaces/interfaces.go +++ b/unused/testdata/src/interfaces/interfaces.go @@ -1,39 +1,46 @@ package pkg -type I interface { - fn1() +type I interface { // used + fn1() // used } -type t struct{} +type t struct{} // used -func (t) fn1() {} -func (t) fn2() {} // want `fn2` +func (t) fn1() {} // used +func (t) fn2() {} // unused -func init() { +func init() { // used _ = t{} } -type I1 interface { - Foo() +type I1 interface { // used + Foo() // used } -type I2 interface { - Foo() +type I2 interface { // used + Foo() // used + bar() // used +} + +type i3 interface { // unused + foo() bar() } -type t1 struct{} -type t2 struct{} -type t3 struct{} -type t4 struct{ t3 } +type t1 struct{} // used +type t2 struct{} // used +type t3 struct{} // used +type t4 struct { // used + t3 // used +} -func (t1) Foo() {} -func (t2) Foo() {} -func (t2) bar() {} -func (t3) Foo() {} -func (t3) bar() {} +func (t1) Foo() {} // used +func (t2) Foo() {} // used +func (t2) bar() {} // used +func (t3) Foo() {} // used +func (t3) bar() {} // used -func Fn() { +func Fn() { // used var v1 t1 var v2 t2 var v3 t3 diff --git a/unused/testdata/src/interfaces2/interfaces.go b/unused/testdata/src/interfaces2/interfaces.go index d038ef699..15407b6dc 100644 --- a/unused/testdata/src/interfaces2/interfaces.go +++ b/unused/testdata/src/interfaces2/interfaces.go @@ -1,12 +1,14 @@ package pkg -type I interface { - foo() +type I interface { // used + foo() // used } -type T struct{} +type T struct{} // used -func (T) foo() {} -func (T) bar() {} // want `bar` +func (T) foo() {} // used +func (T) bar() {} // unused -var _ struct{ T } +var _ struct { + T // used +} diff --git a/unused/testdata/src/linkname/linkname.go b/unused/testdata/src/linkname/linkname.go index 1423a2148..eb33d6ae7 100644 --- a/unused/testdata/src/linkname/linkname.go +++ b/unused/testdata/src/linkname/linkname.go @@ -6,22 +6,22 @@ import _ "unsafe" //go:linkname ol other4 //go:linkname foo other1 -func foo() {} +func foo() {} // used //go:linkname bar other2 -var bar int +var bar int // used var ( - baz int // want `baz` + baz int // unused //go:linkname qux other3 - qux int + qux int // used ) //go:linkname fisk other3 var ( - fisk int + fisk int // used ) -var ol int +var ol int // used //go:linkname doesnotexist other5 diff --git a/unused/testdata/src/main/main.go b/unused/testdata/src/main/main.go index ae5c913ae..c688829e1 100644 --- a/unused/testdata/src/main/main.go +++ b/unused/testdata/src/main/main.go @@ -1,14 +1,15 @@ package main -func Fn1() {} -func Fn2() {} // want `Fn2` +func Fn1() {} // used +func Fn2() {} // used +func fn3() {} // unused -const X = 1 // want `X` +const X = 1 // used -var Y = 2 // want `Y` +var Y = 2 // used -type Z struct{} // want `Z` +type Z struct{} // used -func main() { +func main() { // used Fn1() } diff --git a/unused/testdata/src/mapslice/mapslice.go b/unused/testdata/src/mapslice/mapslice.go index 2769b2c21..f19c64ee9 100644 --- a/unused/testdata/src/mapslice/mapslice.go +++ b/unused/testdata/src/mapslice/mapslice.go @@ -1,8 +1,8 @@ package pkg -type M map[int]int +type M map[int]int // used -func Fn() { +func Fn() { // used var n M _ = []M{n} } diff --git a/unused/testdata/src/methods/methods.go b/unused/testdata/src/methods/methods.go index 0eaf6ee7f..f590584de 100644 --- a/unused/testdata/src/methods/methods.go +++ b/unused/testdata/src/methods/methods.go @@ -1,14 +1,16 @@ package pkg -type t1 struct{} -type t2 struct{ t3 } -type t3 struct{} +type t1 struct{} // used +type t2 struct { // used + t3 // used +} +type t3 struct{} // used -func (t1) Foo() {} -func (t3) Foo() {} -func (t3) foo() {} // want `foo` +func (t1) Foo() {} // used +func (t3) Foo() {} // used +func (t3) foo() {} // unused -func init() { +func init() { // used _ = t1{} _ = t2{} } diff --git a/unused/testdata/src/named/named.go b/unused/testdata/src/named/named.go index 7105f0a0e..dbaf30c37 100644 --- a/unused/testdata/src/named/named.go +++ b/unused/testdata/src/named/named.go @@ -1,4 +1,4 @@ package pkg -type t1 struct{} -type T2 t1 +type t1 struct{} // used +type T2 t1 // used diff --git a/unused/testdata/src/nested/nested.go b/unused/testdata/src/nested/nested.go index 7e108a28c..03aeb61dc 100644 --- a/unused/testdata/src/nested/nested.go +++ b/unused/testdata/src/nested/nested.go @@ -1,11 +1,11 @@ package pkg -type t struct{} // want `t` +type t1 struct{} // unused -func (t) fragment() {} +func (t1) fragment() {} // unused -func fn() bool { // want `fn` - var v interface{} = t{} +func fn1() bool { // unused + var v interface{} = t1{} switch obj := v.(type) { case interface { fragment() @@ -14,3 +14,18 @@ func fn() bool { // want `fn` } return false } + +type t2 struct{} // used + +func (t2) fragment() {} // used + +func Fn() bool { // used + var v interface{} = t2{} + switch obj := v.(type) { + case interface { + fragment() // used + }: + obj.fragment() + } + return false +} diff --git a/unused/testdata/src/nocopy-main/nocopy-main.go b/unused/testdata/src/nocopy-main/nocopy-main.go index 369a5d503..771c6a882 100644 --- a/unused/testdata/src/nocopy-main/nocopy-main.go +++ b/unused/testdata/src/nocopy-main/nocopy-main.go @@ -1,24 +1,26 @@ package main -type myNoCopy1 struct{} -type myNoCopy2 struct{} -type locker struct{} // want `locker` -type someStruct struct{ x int } // want `someStruct` +type myNoCopy1 struct{} // used +type myNoCopy2 struct{} // used +type locker struct{} // unused +type someStruct struct { // unused + x int +} -func (myNoCopy1) Lock() {} -func (recv myNoCopy2) Lock() {} -func (locker) Lock() {} -func (locker) Unlock() {} -func (someStruct) Lock() {} +func (myNoCopy1) Lock() {} // used +func (recv myNoCopy2) Lock() {} // used +func (locker) Lock() {} // unused +func (locker) Unlock() {} // unused +func (someStruct) Lock() {} // unused -type T struct { - noCopy1 myNoCopy1 - noCopy2 myNoCopy2 - field1 someStruct // want `field1` - field2 locker // want `field2` - field3 int // want `field3` +type T struct { // used + noCopy1 myNoCopy1 // used + noCopy2 myNoCopy2 // used + field1 someStruct // unused + field2 locker // unused + field3 int // unused } -func main() { +func main() { // used _ = T{} } diff --git a/unused/testdata/src/nocopy/nocopy.go b/unused/testdata/src/nocopy/nocopy.go index 98e46d4eb..0f0ba4497 100644 --- a/unused/testdata/src/nocopy/nocopy.go +++ b/unused/testdata/src/nocopy/nocopy.go @@ -1,20 +1,22 @@ package bar -type myNoCopy1 struct{} -type myNoCopy2 struct{} -type locker struct{} // want `locker` -type someStruct struct{ x int } // want `someStruct` +type myNoCopy1 struct{} // used +type myNoCopy2 struct{} // used +type locker struct{} // unused +type someStruct struct { // unused + x int +} -func (myNoCopy1) Lock() {} -func (recv myNoCopy2) Lock() {} -func (locker) Lock() {} -func (locker) Unlock() {} -func (someStruct) Lock() {} +func (myNoCopy1) Lock() {} // used +func (recv myNoCopy2) Lock() {} // used +func (locker) Lock() {} // unused +func (locker) Unlock() {} // unused +func (someStruct) Lock() {} // unused -type T struct { - noCopy1 myNoCopy1 - noCopy2 myNoCopy2 - field1 someStruct // want `field1` - field2 locker // want `field2` - field3 int // want `field3` +type T struct { // used + noCopy1 myNoCopy1 // used + noCopy2 myNoCopy2 // used + field1 someStruct // unused + field2 locker // unused + field3 int // unused } diff --git a/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go b/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go index fb577f97c..cbf99f213 100644 --- a/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go +++ b/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go @@ -1,17 +1,17 @@ package pkg -func init() { +func init() { // used var p P _ = p.n } -type T0 struct { - m int // want `m` - n int +type T0 struct { // used + m int // unused + n int // used } -type T1 struct { - T0 +type T1 struct { // used + T0 // used } -type P *T1 +type P *T1 // used diff --git a/unused/testdata/src/quiet/quiet.go b/unused/testdata/src/quiet/quiet.go index 82f8479b8..150f82769 100644 --- a/unused/testdata/src/quiet/quiet.go +++ b/unused/testdata/src/quiet/quiet.go @@ -1,34 +1,34 @@ package pkg -type iface interface { // want `iface` +type iface interface { // unused foo() } -type t1 struct{} // want `t1` -func (t1) foo() {} +type t1 struct{} // unused +func (t1) foo() {} // unused -type t2 struct{} +type t2 struct{} // used -func (t t2) bar(arg int) (ret int) { return 0 } // want `bar` +func (t t2) bar(arg int) (ret int) { return 0 } // unused -func init() { +func init() { // used _ = t2{} } -type t3 struct { // want `t3` +type t3 struct { // unused a int b int } -type T struct{} +type T struct{} // used -func fn1() { // want `fn1` +func fn1() { // unused meh := func(arg T) { } meh(T{}) } -type localityList []int // want `localityList` +type localityList []int // unused -func (l *localityList) Fn1() {} -func (l *localityList) Fn2() {} +func (l *localityList) Fn1() {} // unused +func (l *localityList) Fn2() {} // unused diff --git a/unused/testdata/src/selectors/selectors.go b/unused/testdata/src/selectors/selectors.go index 9ab337888..91743b320 100644 --- a/unused/testdata/src/selectors/selectors.go +++ b/unused/testdata/src/selectors/selectors.go @@ -1,14 +1,14 @@ package pkg -type t struct { - f int +type t struct { // used + f int // used } -func fn(v *t) { +func fn(v *t) { // used println(v.f) } -func init() { +func init() { // used var v t fn(&v) } diff --git a/unused/testdata/src/switch_interface/switch_interface.go b/unused/testdata/src/switch_interface/switch_interface.go index 99c2ce858..7fd63d544 100644 --- a/unused/testdata/src/switch_interface/switch_interface.go +++ b/unused/testdata/src/switch_interface/switch_interface.go @@ -1,19 +1,19 @@ package pkg -type t struct{} +type t struct{} // used -func (t) fragment() {} +func (t) fragment() {} // used -func fn() bool { +func fn() bool { // used var v interface{} = t{} switch obj := v.(type) { case interface { - fragment() + fragment() // used }: obj.fragment() } return false } -var x = fn() +var x = fn() // used var _ = x diff --git a/unused/testdata/src/tests-main/main_test.go b/unused/testdata/src/tests-main/main_test.go index fffcc5f62..7e644de94 100644 --- a/unused/testdata/src/tests-main/main_test.go +++ b/unused/testdata/src/tests-main/main_test.go @@ -4,8 +4,8 @@ import ( "testing" ) -type t1 struct{} +type t1 struct{} // used_test -func TestFoo(t *testing.T) { +func TestFoo(t *testing.T) { // used_test _ = t1{} } diff --git a/unused/testdata/src/tests/tests.go b/unused/testdata/src/tests/tests.go index ca2d5b3cd..4a5e69f59 100644 --- a/unused/testdata/src/tests/tests.go +++ b/unused/testdata/src/tests/tests.go @@ -1,3 +1,3 @@ package pkg -func fn() {} +func fn() {} // unused used_test diff --git a/unused/testdata/src/tests/tests_test.go b/unused/testdata/src/tests/tests_test.go index 4025030d5..74c5091da 100644 --- a/unused/testdata/src/tests/tests_test.go +++ b/unused/testdata/src/tests/tests_test.go @@ -2,6 +2,6 @@ package pkg import "testing" -func TestFn(t *testing.T) { +func TestFn(t *testing.T) { // used_test fn() } diff --git a/unused/testdata/src/type-dedup/dedup.go b/unused/testdata/src/type-dedup/dedup.go index 53cf2f989..83f3792be 100644 --- a/unused/testdata/src/type-dedup/dedup.go +++ b/unused/testdata/src/type-dedup/dedup.go @@ -1,16 +1,16 @@ package pkg -type t1 struct { - a int - b int // want `b` +type t1 struct { // used + a int // used + b int // unused } -type t2 struct { - a int // want `a` - b int +type t2 struct { // used + a int // unused + b int // used } -func Fn() { +func Fn() { // used x := t1{} y := t2{} println(x.a) diff --git a/unused/testdata/src/type-dedup2/dedup.go b/unused/testdata/src/type-dedup2/dedup.go index 56c7dc951..38ebc7f71 100644 --- a/unused/testdata/src/type-dedup2/dedup.go +++ b/unused/testdata/src/type-dedup2/dedup.go @@ -1,23 +1,23 @@ package pkg -func fn1(t struct { - a int - b int +func fn1(t struct { // used + a int // used + b int // used }) { println(t.a) fn2(t) } -func fn2(t struct { - a int - b int +func fn2(t struct { // used + a int // used + b int // used }) { println(t.b) } -func Fn() { +func Fn() { // used fn1(struct { - a int - b int + a int // used + b int // used }{}) } diff --git a/unused/testdata/src/type-dedup3/dedup.go b/unused/testdata/src/type-dedup3/dedup.go index 095e95f86..e796b6a20 100644 --- a/unused/testdata/src/type-dedup3/dedup.go +++ b/unused/testdata/src/type-dedup3/dedup.go @@ -1,23 +1,23 @@ package pkg -func fn1(t struct { - a int - b int +func fn1(t struct { // used + a int // used + b int // used }) { fn2(t) } -func fn2(t struct { - a int - b int +func fn2(t struct { // used + a int // used + b int // used }) { println(t.a) println(t.b) } -func Fn() { +func Fn() { // used fn1(struct { - a int - b int + a int // used + b int // used }{1, 2}) } diff --git a/unused/testdata/src/types/types.go b/unused/testdata/src/types/types.go index 393df3fa5..e8c010441 100644 --- a/unused/testdata/src/types/types.go +++ b/unused/testdata/src/types/types.go @@ -2,16 +2,16 @@ package pkg import "reflect" -type wkt interface { - XXX_WellKnownType() string +type wkt interface { // used + XXX_WellKnownType() string // used } -var typeOfWkt = reflect.TypeOf((*wkt)(nil)).Elem() +var typeOfWkt = reflect.TypeOf((*wkt)(nil)).Elem() // used -func Fn() { +func Fn() { // used _ = typeOfWkt } -type t *int +type t *int // used var _ t diff --git a/unused/testdata/src/unused-argument/unused-argument.go b/unused/testdata/src/unused-argument/unused-argument.go index 423592692..2dc76bc4f 100644 --- a/unused/testdata/src/unused-argument/unused-argument.go +++ b/unused/testdata/src/unused-argument/unused-argument.go @@ -1,10 +1,10 @@ package main -type t1 struct{} -type t2 struct{} +type t1 struct{} // used +type t2 struct{} // used -func (t1) foo(arg *t2) {} +func (t1) foo(arg *t2) {} // used -func init() { +func init() { // used t1{}.foo(nil) } diff --git a/unused/testdata/src/unused_type/unused_type.go b/unused/testdata/src/unused_type/unused_type.go index 0881ffe61..94672c16b 100644 --- a/unused/testdata/src/unused_type/unused_type.go +++ b/unused/testdata/src/unused_type/unused_type.go @@ -1,17 +1,17 @@ package pkg -type t1 struct{} // want `t1` +type t1 struct{} // unused -func (t1) Fn() {} +func (t1) Fn() {} // unused -type t2 struct{} +type t2 struct{} // used -func (*t2) Fn() {} +func (*t2) Fn() {} // used -func init() { +func init() { // used (*t2).Fn(nil) } -type t3 struct{} // want `t3` +type t3 struct{} // unused -func (t3) fn() +func (t3) fn() // unused diff --git a/unused/testdata/src/variables/variables.go b/unused/testdata/src/variables/variables.go index d5129a833..658694470 100644 --- a/unused/testdata/src/variables/variables.go +++ b/unused/testdata/src/variables/variables.go @@ -1,22 +1,23 @@ package pkg -var a byte -var b [16]byte +var a byte // used +var b [16]byte // used -type t1 struct{} -type t2 struct{} -type t3 struct{} -type t4 struct{} -type t5 struct{} +type t1 struct{} // used +type t2 struct{} // used +type t3 struct{} // used +type t4 struct{} // used +type t5 struct{} // used -type iface interface{} +type iface interface{} // used -var x t1 -var y = t2{} -var j, k = t3{}, t4{} -var l iface = t5{} +var x t1 // used +var y = t2{} // used +var j = t3{} // used +var k = t4{} // used +var l iface = t5{} // used -func Fn() { +func Fn() { // used println(a) _ = b[:] diff --git a/unused/testdata/src/variables/vartype.go b/unused/testdata/src/variables/vartype.go index ede73ffa5..744d20f7f 100644 --- a/unused/testdata/src/variables/vartype.go +++ b/unused/testdata/src/variables/vartype.go @@ -1,10 +1,10 @@ package pkg -type t181025 struct{} +type t181025 struct{} // used -func (t181025) F() {} +func (t181025) F() {} // used // package-level variable after function declaration used to trigger a // bug in unused. -var V181025 t181025 +var V181025 t181025 // used diff --git a/unused/unused.go b/unused/unused.go index 0df5fc8ff..1033c581c 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -6,18 +6,20 @@ import ( "go/token" "go/types" "io" + "reflect" "strings" - "sync" - "sync/atomic" "golang.org/x/tools/go/analysis" "honnef.co/go/tools/code" + "honnef.co/go/tools/facts" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/ir" - "honnef.co/go/tools/lint" + "honnef.co/go/tools/report" ) +var Debug io.Writer + // The graph we construct omits nodes along a path that do not // contribute any new information to the solution. For example, the // full graph for a function with a receiver would be Func -> @@ -36,10 +38,10 @@ import ( /* - packages use: - - (1.1) exported named types (unless in package main) - - (1.2) exported functions (unless in package main) - - (1.3) exported variables (unless in package main) - - (1.4) exported constants (unless in package main) + - (1.1) exported named types + - (1.2) exported functions + - (1.3) exported variables + - (1.4) exported constants - (1.5) init functions - (1.6) functions exported to cgo - (1.7) the main function iff in the main package @@ -137,14 +139,6 @@ import ( positives. Thus, we only accurately track fields of named struct types, and assume that unnamed struct types use all their fields. - -- Differences in whole program mode: - - (e2) types aim to implement all exported interfaces from all packages - - (e3) exported identifiers aren't automatically used. for fields and - methods this poses extra issues due to reflection. We assume - that all exported fields are used. We also maintain a list of - known reflection-based method callers. - */ func assert(b bool) { @@ -153,24 +147,6 @@ func assert(b bool) { } } -func typString(obj types.Object) string { - switch obj := obj.(type) { - case *types.Func: - return "func" - case *types.Var: - if obj.IsField() { - return "field" - } - return "var" - case *types.Const: - return "const" - case *types.TypeName: - return "type" - default: - return "identifier" - } -} - // /usr/lib/go/src/runtime/proc.go:433:6: func badmorestackg0 is unused (U1000) // Functions defined in the Go runtime that may be called through @@ -421,79 +397,78 @@ type pkg struct { TypesSizes types.Sizes IR *ir.Package SrcFuncs []*ir.Function + Directives []facts.Directive } -type Checker struct { - WholeProgram bool - Debug io.Writer - - mu sync.Mutex - initialPackages map[*types.Package]struct{} - allPackages map[*types.Package]struct{} - graph *Graph +// TODO(dh): should we return a map instead of two slices? +type Result struct { + Used []types.Object + Unused []types.Object } -func NewChecker(wholeProgram bool) *Checker { - return &Checker{ - initialPackages: map[*types.Package]struct{}{}, - allPackages: map[*types.Package]struct{}{}, - WholeProgram: wholeProgram, - } +type SerializedResult struct { + Used []SerializedObject + Unused []SerializedObject } -func (c *Checker) Analyzer() *analysis.Analyzer { - name := "U1000" - if c.WholeProgram { - name = "U1001" - } - return &analysis.Analyzer{ - Name: name, - Doc: "Unused code", - Run: c.Run, - Requires: []*analysis.Analyzer{buildir.Analyzer}, - } +var Analyzer = &analysis.Analyzer{ + Name: "U1000", + Doc: "Unused code", + Run: run, + Requires: []*analysis.Analyzer{buildir.Analyzer, facts.Generated, facts.Directives}, + ResultType: reflect.TypeOf(Result{}), } -func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { - c.mu.Lock() - if c.graph == nil { - c.graph = NewGraph() - c.graph.wholeProgram = c.WholeProgram - c.graph.fset = pass.Fset - } +type SerializedObject struct { + Name string + Position token.Position + DisplayPosition token.Position + Kind string + InGenerated bool +} - var visit func(pkg *types.Package) - visit = func(pkg *types.Package) { - if _, ok := c.allPackages[pkg]; ok { - return - } - c.allPackages[pkg] = struct{}{} - for _, imp := range pkg.Imports() { - visit(imp) +func typString(obj types.Object) string { + switch obj := obj.(type) { + case *types.Func: + return "func" + case *types.Var: + if obj.IsField() { + return "field" } + return "var" + case *types.Const: + return "const" + case *types.TypeName: + return "type" + default: + return "identifier" } - visit(pass.Pkg) +} - c.initialPackages[pass.Pkg] = struct{}{} - c.mu.Unlock() +func Serialize(pass *analysis.Pass, res Result, fset *token.FileSet) SerializedResult { + // OPT(dh): there's no point in serializing Used objects that are + // always used, such as exported names, blank identifiers, or + // anonymous struct fields. Used only exists to overrule Unused of + // a different package. If something can never be unused, then its + // presence in Used is useless. + // + // I'm not sure if this should happen when serializing, or when + // returning Result. - irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR) - pkg := &pkg{ - Fset: pass.Fset, - Files: pass.Files, - Pkg: pass.Pkg, - TypesInfo: pass.TypesInfo, - TypesSizes: pass.TypesSizes, - IR: irpkg.Pkg, - SrcFuncs: irpkg.SrcFuncs, + out := SerializedResult{ + Used: make([]SerializedObject, len(res.Used)), + Unused: make([]SerializedObject, len(res.Unused)), } - - c.processPkg(c.graph, pkg) - - return nil, nil + for i, obj := range res.Used { + out.Used[i] = serializeObject(pass, fset, obj) + } + for i, obj := range res.Unused { + out.Unused[i] = serializeObject(pass, fset, obj) + } + return out } -func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Problem { +func serializeObject(pass *analysis.Pass, fset *token.FileSet, obj types.Object) SerializedObject { name := obj.Name() if sig, ok := obj.Type().(*types.Signature); ok && sig.Recv() != nil { switch sig.Recv().Type().(type) { @@ -506,305 +481,159 @@ func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Prob } } } - - checkName := "U1000" - if c.WholeProgram { - checkName = "U1001" - } - return lint.Problem{ - Pos: lint.DisplayPosition(fset, obj.Pos()), - Message: fmt.Sprintf("%s %s is unused", typString(obj), name), - Check: checkName, + return SerializedObject{ + Name: name, + Position: fset.PositionFor(obj.Pos(), false), + DisplayPosition: report.DisplayPosition(fset, obj.Pos()), + Kind: typString(obj), + InGenerated: code.IsGenerated(pass, obj.Pos()), } } -func (c *Checker) Result() []types.Object { - out := c.results() - - out2 := make([]types.Object, 0, len(out)) - for _, v := range out { - if _, ok := c.initialPackages[v.Pkg()]; !ok { - continue - } - out2 = append(out2, v) - } - - return out2 +type checker struct { + graph *graph } -func (c *Checker) debugf(f string, v ...interface{}) { - if c.Debug != nil { - fmt.Fprintf(c.Debug, f, v...) +func debugf(f string, v ...interface{}) { + if Debug != nil { + fmt.Fprintf(Debug, f, v...) } } -func (graph *Graph) quieten(node *Node) { - if node.seen { - return - } - switch obj := node.obj.(type) { - case *types.Named: - for i := 0; i < obj.NumMethods(); i++ { - m := obj.Method(i) - if node, ok := graph.nodeMaybe(m); ok { - node.quiet = true - } - } - case *types.Struct: - for i := 0; i < obj.NumFields(); i++ { - if node, ok := graph.nodeMaybe(obj.Field(i)); ok { - node.quiet = true - } - } - case *types.Interface: - for i := 0; i < obj.NumExplicitMethods(); i++ { - m := obj.ExplicitMethod(i) - if node, ok := graph.nodeMaybe(m); ok { - node.quiet = true - } - } +func run(pass *analysis.Pass) (interface{}, error) { + irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR) + dirs := pass.ResultOf[facts.Directives].([]facts.Directive) + pkg := &pkg{ + Fset: pass.Fset, + Files: pass.Files, + Pkg: pass.Pkg, + TypesInfo: pass.TypesInfo, + TypesSizes: pass.TypesSizes, + IR: irpkg.Pkg, + SrcFuncs: irpkg.SrcFuncs, + Directives: dirs, } -} -func (c *Checker) results() []types.Object { - if c.graph == nil { - // We never analyzed any packages - return nil + c := &checker{ + graph: newGraph(pkg), } - var out []types.Object + c.graph.entry(pkg) + used, unused := c.results() - if c.WholeProgram { - var ifaces []*types.Interface - var notIfaces []types.Type - - // implement as many interfaces as possible - c.graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { - switch t := t.(type) { - case *types.Interface: - if t.NumMethods() > 0 { - ifaces = append(ifaces, t) - } - default: - if _, ok := t.Underlying().(*types.Interface); !ok { - notIfaces = append(notIfaces, t) - } - } - }) - - for pkg := range c.allPackages { - for _, iface := range interfacesFromExportData(pkg) { - if iface.NumMethods() > 0 { - ifaces = append(ifaces, iface) - } - } - } - - ctx := &context{ - g: c.graph, - seenTypes: &c.graph.seenTypes, - } - // (8.0) handle interfaces - // (e2) types aim to implement all exported interfaces from all packages - for _, t := range notIfaces { - // OPT(dh): it is unfortunate that we do not have access - // to a populated method set at this point. - ms := types.NewMethodSet(t) - for _, iface := range ifaces { - if sels, ok := c.graph.implements(t, iface, ms); ok { - for _, sel := range sels { - c.graph.useMethod(ctx, t, sel, t, edgeImplements) - } - } - } - } - } - - if c.Debug != nil { - debugNode := func(node *Node) { - if node.obj == nil { - c.debugf("n%d [label=\"Root\"];\n", node.id) + if Debug != nil { + debugNode := func(n *node) { + if n.obj == nil { + debugf("n%d [label=\"Root\"];\n", n.id) } else { - c.debugf("n%d [label=%q];\n", node.id, fmt.Sprintf("(%T) %s", node.obj, node.obj)) + color := "red" + if n.seen { + color = "green" + } + debugf("n%d [label=%q, color=%q];\n", n.id, fmt.Sprintf("(%T) %s", n.obj, n.obj), color) } - for _, e := range node.used { + for _, e := range n.used { for i := edgeKind(1); i < 64; i++ { if e.kind.is(1 << i) { - c.debugf("n%d -> n%d [label=%q];\n", node.id, e.node.id, edgeKind(1< n%d [label=%q];\n", n.id, e.node.id, edgeKind(1< 1 { - cg := &ConstGroup{} - ctx.see(cg) + cg := &constGroup{} + g.see(cg) for _, spec := range specs { for _, name := range spec.(*ast.ValueSpec).Names { obj := pkg.TypesInfo.ObjectOf(name) // (10.1) const groups - ctx.seeAndUse(obj, cg, edgeConstGroup) - ctx.use(cg, obj, edgeConstGroup) + g.seeAndUse(obj, cg, edgeConstGroup) + g.use(cg, obj, edgeConstGroup) } } } @@ -1238,16 +1004,16 @@ func (g *Graph) entry(pkg *pkg) { for _, name := range v.Names { T := pkg.TypesInfo.TypeOf(name) if fn != nil { - ctx.seeAndUse(T, fn, edgeVarDecl) + g.seeAndUse(T, fn, edgeVarDecl) } else { // TODO(dh): we likely want to make // the type used by the variable, not // the package containing the // variable. But then we have to take // special care of blank identifiers. - ctx.seeAndUse(T, nil, edgeVarDecl) + g.seeAndUse(T, nil, edgeVarDecl) } - g.typ(ctx, T, nil) + g.typ(T, nil) } } case token.TYPE: @@ -1261,11 +1027,11 @@ func (g *Graph) entry(pkg *pkg) { v := spec.(*ast.TypeSpec) T := pkg.TypesInfo.TypeOf(v.Type) obj := pkg.TypesInfo.ObjectOf(v.Name) - ctx.see(obj) - ctx.see(T) - ctx.use(T, obj, edgeType) - g.typ(ctx, obj.Type(), nil) - g.typ(ctx, T, nil) + g.see(obj) + g.see(T) + g.use(T, obj, edgeType) + g.typ(obj.Type(), nil) + g.typ(T, nil) if v.Assign != 0 { aliasFor := obj.(*types.TypeName).Type() @@ -1276,10 +1042,10 @@ func (g *Graph) entry(pkg *pkg) { // just mark the alias used. // // FIXME(dh): what about aliases declared inside functions? - ctx.use(obj, nil, edgeAlias) + g.use(obj, nil, edgeAlias) } else { - ctx.see(aliasFor) - ctx.seeAndUse(obj, aliasFor, edgeAlias) + g.see(aliasFor) + g.seeAndUse(obj, aliasFor, edgeAlias) } } } @@ -1295,16 +1061,16 @@ func (g *Graph) entry(pkg *pkg) { // nothing to do, we collect all constants from Defs case *ir.Global: if m.Object() != nil { - ctx.see(m.Object()) - if g.trackExportedIdentifier(ctx, m.Object()) { - // (1.3) packages use exported variables (unless in package main) - ctx.use(m.Object(), nil, edgeExportedVariable) + g.see(m.Object()) + if m.Object().Exported() { + // (1.3) packages use exported variables + g.use(m.Object(), nil, edgeExportedVariable) } } case *ir.Function: mObj := owningObject(m) if mObj != nil { - ctx.see(mObj) + g.see(mObj) } //lint:ignore SA9003 handled implicitly if m.Name() == "init" { @@ -1315,17 +1081,17 @@ func (g *Graph) entry(pkg *pkg) { // be owned by the package. } // This branch catches top-level functions, not methods. - if m.Object() != nil && g.trackExportedIdentifier(ctx, m.Object()) { - // (1.2) packages use exported functions (unless in package main) - ctx.use(mObj, nil, edgeExportedFunction) + if m.Object() != nil && m.Object().Exported() { + // (1.2) packages use exported functions + g.use(mObj, nil, edgeExportedFunction) } if m.Name() == "main" && pkg.Pkg.Name() == "main" { // (1.7) packages use the main function iff in the main package - ctx.use(mObj, nil, edgeMainFunction) + g.use(mObj, nil, edgeMainFunction) } if pkg.Pkg.Path() == "runtime" && runtimeFuncs[m.Name()] { // (9.8) runtime functions that may be called from user code via the compiler - ctx.use(mObj, nil, edgeRuntimeFunction) + g.use(mObj, nil, edgeRuntimeFunction) } if m.Source() != nil { doc := m.Source().(*ast.FuncDecl).Doc @@ -1333,57 +1099,102 @@ func (g *Graph) entry(pkg *pkg) { for _, cmt := range doc.List { if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { // (1.6) packages use functions exported to cgo - ctx.use(mObj, nil, edgeCgoExported) + g.use(mObj, nil, edgeCgoExported) } } } } - g.function(ctx, m) + g.function(m) case *ir.Type: if m.Object() != nil { - ctx.see(m.Object()) - if g.trackExportedIdentifier(ctx, m.Object()) { - // (1.1) packages use exported named types (unless in package main) - ctx.use(m.Object(), nil, edgeExportedType) + g.see(m.Object()) + if m.Object().Exported() { + // (1.1) packages use exported named types + g.use(m.Object(), nil, edgeExportedType) } } - g.typ(ctx, m.Type(), nil) + g.typ(m.Type(), nil) default: panic(fmt.Sprintf("unreachable: %T", m)) } } - if !g.wholeProgram { - // When not in whole program mode we reset seenTypes after each package, - // which means g.seenTypes only contains types of - // interest to us. In whole program mode, we're better off - // processing all interfaces at once, globally, both for - // performance reasons and because in whole program mode we - // actually care about all interfaces, not just the subset - // that has unexported methods. - - var ifaces []*types.Interface - var notIfaces []types.Type - - ctx.seenTypes.Iterate(func(t types.Type, _ interface{}) { - switch t := t.(type) { - case *types.Interface: - // OPT(dh): (8.1) we only need interfaces that have unexported methods - ifaces = append(ifaces, t) - default: - if _, ok := t.Underlying().(*types.Interface); !ok { - notIfaces = append(notIfaces, t) + var ifaces []*types.Interface + var notIfaces []types.Type + + g.seenTypes.Iterate(func(t types.Type, _ interface{}) { + switch t := t.(type) { + case *types.Interface: + // OPT(dh): (8.1) we only need interfaces that have unexported methods + ifaces = append(ifaces, t) + default: + if _, ok := t.Underlying().(*types.Interface); !ok { + notIfaces = append(notIfaces, t) + } + } + }) + + // (8.0) handle interfaces + for _, t := range notIfaces { + ms := pkg.IR.Prog.MethodSets.MethodSet(t) + for _, iface := range ifaces { + if sels, ok := g.implements(t, iface, ms); ok { + for _, sel := range sels { + g.useMethod(t, sel, t, edgeImplements) } } - }) + } + } - // (8.0) handle interfaces - for _, t := range notIfaces { - ms := pkg.IR.Prog.MethodSets.MethodSet(t) - for _, iface := range ifaces { - if sels, ok := g.implements(t, iface, ms); ok { - for _, sel := range sels { - g.useMethod(ctx, t, sel, t, edgeImplements) + type ignoredKey struct { + file string + line int + } + ignores := map[ignoredKey]struct{}{} + for _, dir := range g.pkg.Directives { + if dir.Command != "ignore" { + continue + } + if len(dir.Arguments) == 0 { + continue + } + for _, check := range strings.Split(dir.Arguments[0], ",") { + if check == "U1000" { + pos := g.pkg.Fset.PositionFor(dir.Node.Pos(), false) + key := ignoredKey{ + pos.Filename, + pos.Line, + } + ignores[key] = struct{}{} + break + } + } + } + + if len(ignores) > 0 { + // all objects annotated with a //lint:ignore U1000 are considered used + for obj := range g.Nodes { + if obj, ok := obj.(types.Object); ok { + pos := g.pkg.Fset.PositionFor(obj.Pos(), false) + key := ignoredKey{ + pos.Filename, + pos.Line, + } + if _, ok := ignores[key]; ok { + g.use(obj, nil, edgeIgnored) + + // use methods and fields of ignored types + if obj, ok := obj.(*types.TypeName); ok { + if typ, ok := obj.Type().(*types.Named); ok { + for i := 0; i < typ.NumMethods(); i++ { + g.use(typ.Method(i), nil, edgeIgnored) + } + } + if typ, ok := obj.Type().Underlying().(*types.Struct); ok { + for i := 0; i < typ.NumFields(); i++ { + g.use(typ.Field(i), nil, edgeIgnored) + } + } } } } @@ -1391,7 +1202,7 @@ func (g *Graph) entry(pkg *pkg) { } } -func (g *Graph) useMethod(ctx *context, t types.Type, sel *types.Selection, by interface{}, kind edgeKind) { +func (g *graph) useMethod(t types.Type, sel *types.Selection, by interface{}, kind edgeKind) { obj := sel.Obj() path := sel.Index() assert(obj != nil) @@ -1400,12 +1211,12 @@ func (g *Graph) useMethod(ctx *context, t types.Type, sel *types.Selection, by i for _, idx := range path[:len(path)-1] { next := base.Field(idx) // (6.3) structs use embedded fields that help implement interfaces - ctx.see(base) - ctx.seeAndUse(next, base, edgeProvidesMethod) + g.see(base) + g.seeAndUse(next, base, edgeProvidesMethod) base, _ = code.Dereference(next.Type()).Underlying().(*types.Struct) } } - ctx.seeAndUse(obj, by, kind) + g.seeAndUse(obj, by, kind) } func owningObject(fn *ir.Function) types.Object { @@ -1418,94 +1229,77 @@ func owningObject(fn *ir.Function) types.Object { return nil } -func (g *Graph) function(ctx *context, fn *ir.Function) { - if fn.Package() != nil && fn.Package() != ctx.pkg.IR { +func (g *graph) function(fn *ir.Function) { + if fn.Package() != nil && fn.Package() != g.pkg.IR { return } name := fn.RelString(nil) - if _, ok := ctx.seenFns[name]; ok { + if _, ok := g.seenFns[name]; ok { return } - ctx.seenFns[name] = struct{}{} + g.seenFns[name] = struct{}{} // (4.1) functions use all their arguments, return parameters and receivers - g.signature(ctx, fn.Signature, owningObject(fn)) - g.instructions(ctx, fn) + g.signature(fn.Signature, owningObject(fn)) + g.instructions(fn) for _, anon := range fn.AnonFuncs { // (4.2) functions use anonymous functions defined beneath them // // This fact is expressed implicitly. Anonymous functions have // no types.Object, so their owner is the surrounding // function. - g.function(ctx, anon) + g.function(anon) } } -func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { - if g.wholeProgram { - g.mu.Lock() - } - if ctx.seenTypes.At(t) != nil { - if g.wholeProgram { - g.mu.Unlock() - } +func (g *graph) typ(t types.Type, parent types.Type) { + if g.seenTypes.At(t) != nil { return } - if g.wholeProgram { - g.mu.Unlock() - } + if t, ok := t.(*types.Named); ok && t.Obj().Pkg() != nil { - if t.Obj().Pkg() != ctx.pkg.Pkg { + if t.Obj().Pkg() != g.pkg.Pkg { return } } - if g.wholeProgram { - g.mu.Lock() - } - ctx.seenTypes.Set(t, struct{}{}) - if g.wholeProgram { - g.mu.Unlock() - } + g.seenTypes.Set(t, struct{}{}) if isIrrelevant(t) { return } - ctx.see(t) + g.see(t) switch t := t.(type) { case *types.Struct: for i := 0; i < t.NumFields(); i++ { - ctx.see(t.Field(i)) + g.see(t.Field(i)) if t.Field(i).Exported() { // (6.2) structs use exported fields - ctx.use(t.Field(i), t, edgeExportedField) + g.use(t.Field(i), t, edgeExportedField) } else if t.Field(i).Name() == "_" { - ctx.use(t.Field(i), t, edgeBlankField) + g.use(t.Field(i), t, edgeBlankField) } else if isNoCopyType(t.Field(i).Type()) { // (6.1) structs use fields of type NoCopy sentinel - ctx.use(t.Field(i), t, edgeNoCopySentinel) + g.use(t.Field(i), t, edgeNoCopySentinel) } else if parent == nil { // (11.1) anonymous struct types use all their fields. - ctx.use(t.Field(i), t, edgeAnonymousStruct) + g.use(t.Field(i), t, edgeAnonymousStruct) } if t.Field(i).Anonymous() { - // (e3) exported identifiers aren't automatically used. - if !g.wholeProgram { - // does the embedded field contribute exported methods to the method set? - T := t.Field(i).Type() - if _, ok := T.Underlying().(*types.Pointer); !ok { - // An embedded field is addressable, so check - // the pointer type to get the full method set - T = types.NewPointer(T) - } - ms := ctx.pkg.IR.Prog.MethodSets.MethodSet(T) - for j := 0; j < ms.Len(); j++ { - if ms.At(j).Obj().Exported() { - // (6.4) structs use embedded fields that have exported methods (recursively) - ctx.use(t.Field(i), t, edgeExtendsExportedMethodSet) - break - } + // does the embedded field contribute exported methods to the method set? + T := t.Field(i).Type() + if _, ok := T.Underlying().(*types.Pointer); !ok { + // An embedded field is addressable, so check + // the pointer type to get the full method set + T = types.NewPointer(T) + } + ms := g.pkg.IR.Prog.MethodSets.MethodSet(T) + for j := 0; j < ms.Len(); j++ { + if ms.At(j).Obj().Exported() { + // (6.4) structs use embedded fields that have exported methods (recursively) + g.use(t.Field(i), t, edgeExtendsExportedMethodSet) + break } } @@ -1534,115 +1328,115 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { // does the embedded field contribute exported fields? if hasExportedField(t.Field(i).Type()) { // (6.5) structs use embedded structs that have exported fields (recursively) - ctx.use(t.Field(i), t, edgeExtendsExportedFields) + g.use(t.Field(i), t, edgeExtendsExportedFields) } } - g.variable(ctx, t.Field(i)) + g.variable(t.Field(i)) } case *types.Basic: // Nothing to do case *types.Named: // (9.3) types use their underlying and element types - ctx.seeAndUse(t.Underlying(), t, edgeUnderlyingType) - ctx.seeAndUse(t.Obj(), t, edgeTypeName) - ctx.seeAndUse(t, t.Obj(), edgeNamedType) + g.seeAndUse(t.Underlying(), t, edgeUnderlyingType) + g.seeAndUse(t.Obj(), t, edgeTypeName) + g.seeAndUse(t, t.Obj(), edgeNamedType) // (2.4) named types use the pointer type if _, ok := t.Underlying().(*types.Interface); !ok && t.NumMethods() > 0 { - ctx.seeAndUse(types.NewPointer(t), t, edgePointerType) + g.seeAndUse(types.NewPointer(t), t, edgePointerType) } for i := 0; i < t.NumMethods(); i++ { - ctx.see(t.Method(i)) + g.see(t.Method(i)) // don't use trackExportedIdentifier here, we care about // all exported methods, even in package main or in tests. - if t.Method(i).Exported() && !g.wholeProgram { + if t.Method(i).Exported() { // (2.1) named types use exported methods - ctx.use(t.Method(i), t, edgeExportedMethod) + g.use(t.Method(i), t, edgeExportedMethod) } - g.function(ctx, ctx.pkg.IR.Prog.FuncValue(t.Method(i))) + g.function(g.pkg.IR.Prog.FuncValue(t.Method(i))) } - g.typ(ctx, t.Underlying(), t) + g.typ(t.Underlying(), t) case *types.Slice: // (9.3) types use their underlying and element types - ctx.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(ctx, t.Elem(), nil) + g.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(t.Elem(), nil) case *types.Map: // (9.3) types use their underlying and element types - ctx.seeAndUse(t.Elem(), t, edgeElementType) + g.seeAndUse(t.Elem(), t, edgeElementType) // (9.3) types use their underlying and element types - ctx.seeAndUse(t.Key(), t, edgeKeyType) - g.typ(ctx, t.Elem(), nil) - g.typ(ctx, t.Key(), nil) + g.seeAndUse(t.Key(), t, edgeKeyType) + g.typ(t.Elem(), nil) + g.typ(t.Key(), nil) case *types.Signature: - g.signature(ctx, t, nil) + g.signature(t, nil) case *types.Interface: for i := 0; i < t.NumMethods(); i++ { m := t.Method(i) // (8.3) All interface methods are marked as used - ctx.seeAndUse(m, t, edgeInterfaceMethod) - ctx.seeAndUse(m.Type().(*types.Signature), m, edgeSignature) - g.signature(ctx, m.Type().(*types.Signature), nil) + g.seeAndUse(m, t, edgeInterfaceMethod) + g.seeAndUse(m.Type().(*types.Signature), m, edgeSignature) + g.signature(m.Type().(*types.Signature), nil) } for i := 0; i < t.NumEmbeddeds(); i++ { tt := t.EmbeddedType(i) // (8.4) All embedded interfaces are marked as used - ctx.seeAndUse(tt, t, edgeEmbeddedInterface) + g.seeAndUse(tt, t, edgeEmbeddedInterface) } case *types.Array: // (9.3) types use their underlying and element types - ctx.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(ctx, t.Elem(), nil) + g.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(t.Elem(), nil) case *types.Pointer: // (9.3) types use their underlying and element types - ctx.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(ctx, t.Elem(), nil) + g.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(t.Elem(), nil) case *types.Chan: // (9.3) types use their underlying and element types - ctx.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(ctx, t.Elem(), nil) + g.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(t.Elem(), nil) case *types.Tuple: for i := 0; i < t.Len(); i++ { // (9.3) types use their underlying and element types - ctx.seeAndUse(t.At(i).Type(), t, edgeTupleElement|edgeType) - g.typ(ctx, t.At(i).Type(), nil) + g.seeAndUse(t.At(i).Type(), t, edgeTupleElement|edgeType) + g.typ(t.At(i).Type(), nil) } default: panic(fmt.Sprintf("unreachable: %T", t)) } } -func (g *Graph) variable(ctx *context, v *types.Var) { +func (g *graph) variable(v *types.Var) { // (9.2) variables use their types - ctx.seeAndUse(v.Type(), v, edgeType) - g.typ(ctx, v.Type(), nil) + g.seeAndUse(v.Type(), v, edgeType) + g.typ(v.Type(), nil) } -func (g *Graph) signature(ctx *context, sig *types.Signature, fn types.Object) { +func (g *graph) signature(sig *types.Signature, fn types.Object) { var user interface{} = fn if fn == nil { user = sig - ctx.see(sig) + g.see(sig) } if sig.Recv() != nil { - ctx.seeAndUse(sig.Recv().Type(), user, edgeReceiver|edgeType) - g.typ(ctx, sig.Recv().Type(), nil) + g.seeAndUse(sig.Recv().Type(), user, edgeReceiver|edgeType) + g.typ(sig.Recv().Type(), nil) } for i := 0; i < sig.Params().Len(); i++ { param := sig.Params().At(i) - ctx.seeAndUse(param.Type(), user, edgeFunctionArgument|edgeType) - g.typ(ctx, param.Type(), nil) + g.seeAndUse(param.Type(), user, edgeFunctionArgument|edgeType) + g.typ(param.Type(), nil) } for i := 0; i < sig.Results().Len(); i++ { param := sig.Results().At(i) - ctx.seeAndUse(param.Type(), user, edgeFunctionResult|edgeType) - g.typ(ctx, param.Type(), nil) + g.seeAndUse(param.Type(), user, edgeFunctionResult|edgeType) + g.typ(param.Type(), nil) } } -func (g *Graph) instructions(ctx *context, fn *ir.Function) { +func (g *graph) instructions(fn *ir.Function) { fnObj := owningObject(fn) for _, b := range fn.Blocks { for _, instr := range b.Instrs { @@ -1663,17 +1457,17 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) { // (9.5) instructions use their operands // (4.4) functions use functions they return. we assume that someone else will call the returned function if owningObject(v) != nil { - ctx.seeAndUse(owningObject(v), fnObj, edgeInstructionOperand) + g.seeAndUse(owningObject(v), fnObj, edgeInstructionOperand) } - g.function(ctx, v) + g.function(v) case *ir.Const: // (9.6) instructions use their operands' types - ctx.seeAndUse(v.Type(), fnObj, edgeType) - g.typ(ctx, v.Type(), nil) + g.seeAndUse(v.Type(), fnObj, edgeType) + g.typ(v.Type(), nil) case *ir.Global: if v.Object() != nil { // (9.5) instructions use their operands - ctx.seeAndUse(v.Object(), fnObj, edgeInstructionOperand) + g.seeAndUse(v.Object(), fnObj, edgeInstructionOperand) } } }) @@ -1684,8 +1478,8 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) { // (4.8) instructions use their types // (9.4) conversions use the type they convert to - ctx.seeAndUse(v.Type(), fnObj, edgeType) - g.typ(ctx, v.Type(), nil) + g.seeAndUse(v.Type(), fnObj, edgeType) + g.typ(v.Type(), nil) } } switch instr := instr.(type) { @@ -1693,51 +1487,21 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) { st := instr.X.Type().Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access - ctx.seeAndUse(field, fnObj, edgeFieldAccess) + g.seeAndUse(field, fnObj, edgeFieldAccess) case *ir.FieldAddr: st := code.Dereference(instr.X.Type()).Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access - ctx.seeAndUse(field, fnObj, edgeFieldAccess) + g.seeAndUse(field, fnObj, edgeFieldAccess) case *ir.Store: // nothing to do, handled generically by operands case *ir.Call: c := instr.Common() if !c.IsInvoke() { // handled generically as an instruction operand - - if g.wholeProgram { - // (e3) special case known reflection-based method callers - switch code.CallName(c) { - case "net/rpc.Register", "net/rpc.RegisterName", "(*net/rpc.Server).Register", "(*net/rpc.Server).RegisterName": - var arg ir.Value - switch code.CallName(c) { - case "net/rpc.Register": - arg = c.Args[0] - case "net/rpc.RegisterName": - arg = c.Args[1] - case "(*net/rpc.Server).Register": - arg = c.Args[1] - case "(*net/rpc.Server).RegisterName": - arg = c.Args[2] - } - walkPhi(arg, func(v ir.Value) { - if v, ok := v.(*ir.MakeInterface); ok { - walkPhi(v.X, func(vv ir.Value) { - ms := ctx.pkg.IR.Prog.MethodSets.MethodSet(vv.Type()) - for i := 0; i < ms.Len(); i++ { - if ms.At(i).Obj().Exported() { - g.useMethod(ctx, vv.Type(), ms.At(i), fnObj, edgeNetRPCRegister) - } - } - }) - } - }) - } - } } else { // (4.5) functions use functions/interface methods they call - ctx.seeAndUse(c.Method, fnObj, edgeInterfaceCall) + g.seeAndUse(c.Method, fnObj, edgeInterfaceCall) } case *ir.Return: // nothing to do, handled generically by operands @@ -1754,14 +1518,14 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) { assert(s1.NumFields() == s2.NumFields()) for i := 0; i < s1.NumFields(); i++ { - ctx.see(s1.Field(i)) - ctx.see(s2.Field(i)) + g.see(s1.Field(i)) + g.see(s2.Field(i)) // (5.1) when converting between two equivalent structs, the fields in // either struct use each other. the fields are relevant for the // conversion, but only if the fields are also accessed outside the // conversion. - ctx.seeAndUse(s1.Field(i), s2.Field(i), edgeStructConversion) - ctx.seeAndUse(s2.Field(i), s1.Field(i), edgeStructConversion) + g.seeAndUse(s1.Field(i), s2.Field(i), edgeStructConversion) + g.seeAndUse(s2.Field(i), s1.Field(i), edgeStructConversion) } } case *ir.MakeInterface: @@ -1777,7 +1541,7 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - ctx.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) + g.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) } } } @@ -1788,7 +1552,7 @@ func (g *Graph) instructions(ctx *context, fn *ir.Function) { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - ctx.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) + g.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) } } } @@ -1930,49 +1694,3 @@ func walkPhi(v ir.Value, fn func(v ir.Value)) { } impl(phi) } - -func interfacesFromExportData(pkg *types.Package) []*types.Interface { - var out []*types.Interface - scope := pkg.Scope() - for _, name := range scope.Names() { - obj := scope.Lookup(name) - out = append(out, interfacesFromObject(obj)...) - } - return out -} - -func interfacesFromObject(obj types.Object) []*types.Interface { - var out []*types.Interface - switch obj := obj.(type) { - case *types.Func: - sig := obj.Type().(*types.Signature) - for i := 0; i < sig.Results().Len(); i++ { - out = append(out, interfacesFromObject(sig.Results().At(i))...) - } - for i := 0; i < sig.Params().Len(); i++ { - out = append(out, interfacesFromObject(sig.Params().At(i))...) - } - case *types.TypeName: - if named, ok := obj.Type().(*types.Named); ok { - for i := 0; i < named.NumMethods(); i++ { - out = append(out, interfacesFromObject(named.Method(i))...) - } - - if iface, ok := named.Underlying().(*types.Interface); ok { - out = append(out, iface) - } - } - case *types.Var: - // No call to Underlying here. We want unnamed interfaces - // only. Named interfaces are gotten directly from the - // package's scope. - if iface, ok := obj.Type().(*types.Interface); ok { - out = append(out, iface) - } - case *types.Const: - case *types.Builtin: - default: - panic(fmt.Sprintf("unhandled type: %T", obj)) - } - return out -} diff --git a/unused/unused_test.go b/unused/unused_test.go index b4acc0d2f..4b500abf1 100644 --- a/unused/unused_test.go +++ b/unused/unused_test.go @@ -1,197 +1,176 @@ package unused import ( - "fmt" - "go/parser" - "go/token" "go/types" - "os" - "sort" - "strconv" "strings" "testing" - "text/scanner" - "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/analysistest" - "golang.org/x/tools/go/packages" - "honnef.co/go/tools/lint" ) -// parseExpectations parses the content of a "// want ..." comment -// and returns the expectations, a mixture of diagnostics ("rx") and -// facts (name:"rx"). -func parseExpectations(text string) ([]string, error) { - var scanErr string - sc := new(scanner.Scanner).Init(strings.NewReader(text)) - sc.Error = func(s *scanner.Scanner, msg string) { - scanErr = msg // e.g. bad string escape - } - sc.Mode = scanner.ScanIdents | scanner.ScanStrings | scanner.ScanRawStrings - - scanRegexp := func(tok rune) (string, error) { - if tok != scanner.String && tok != scanner.RawString { - return "", fmt.Errorf("got %s, want regular expression", - scanner.TokenString(tok)) - } - pattern, _ := strconv.Unquote(sc.TokenText()) // can't fail - return pattern, nil - } +type expectation bool - var expects []string - for { - tok := sc.Scan() - switch tok { - case scanner.String, scanner.RawString: - rx, err := scanRegexp(tok) - if err != nil { - return nil, err - } - expects = append(expects, rx) - - case scanner.EOF: - if scanErr != "" { - return nil, fmt.Errorf("%s", scanErr) - } - return expects, nil +const ( + shouldBeUsed = true + shouldBeUnused = false +) - default: - return nil, fmt.Errorf("unexpected %s", scanner.TokenString(tok)) - } +func (exp expectation) String() string { + switch exp { + case shouldBeUsed: + return "used" + case shouldBeUnused: + return "unused" + default: + panic("unreachable") } } -func check(t *testing.T, fset *token.FileSet, diagnostics []types.Object) { +func check(t *testing.T, res *analysistest.Result) { type key struct { file string line int } - + want := map[key]expectation{} files := map[string]struct{}{} - for _, d := range diagnostics { - files[fset.Position(d.Pos()).Filename] = struct{}{} - } - - want := make(map[key][]string) - // processComment parses expectations out of comments. - processComment := func(filename string, linenum int, text string) { - text = strings.TrimSpace(text) - - // Any comment starting with "want" is treated - // as an expectation, even without following whitespace. - if rest := strings.TrimPrefix(text, "want"); rest != text { - expects, err := parseExpectations(rest) - if err != nil { - t.Errorf("%s:%d: in 'want' comment: %s", filename, linenum, err) - return - } - if expects != nil { - want[key{filename, linenum}] = expects - } + isTest := false + for _, f := range res.Pass.Files { + filename := res.Pass.Fset.Position(f.Pos()).Filename + if strings.HasSuffix(filename, "_test.go") { + isTest = true + break } } - - // Extract 'want' comments from Go files. - fset2 := token.NewFileSet() - for f := range files { - af, err := parser.ParseFile(fset2, f, nil, parser.ParseComments) - if err != nil { - t.Fatal(err) + for _, f := range res.Pass.Files { + filename := res.Pass.Fset.Position(f.Pos()).Filename + if !strings.HasSuffix(filename, ".go") { + continue } - for _, cgroup := range af.Comments { + files[filename] = struct{}{} + for _, cgroup := range f.Comments { + commentLoop: for _, c := range cgroup.List { - text := strings.TrimPrefix(c.Text, "//") if text == c.Text { continue // not a //-comment } - // Hack: treat a comment of the form "//...// want..." - // as if it starts at 'want'. - // This allows us to add comments on comments, - // as required when testing the buildtag analyzer. - if i := strings.Index(text, "// want"); i >= 0 { - text = text[i+len("// "):] + fields := strings.Fields(text) + posn := res.Pass.Fset.Position(c.Pos()) + for _, field := range fields { + switch field { + case "used", "unused", "used_test", "unused_test": + default: + continue commentLoop + } + } + for _, field := range fields { + switch field { + case "used": + if !isTest { + want[key{posn.Filename, posn.Line}] = shouldBeUsed + } + case "unused": + if !isTest { + want[key{posn.Filename, posn.Line}] = shouldBeUnused + } + case "used_test": + if isTest { + want[key{posn.Filename, posn.Line}] = shouldBeUsed + } + case "unused_test": + if isTest { + want[key{posn.Filename, posn.Line}] = shouldBeUnused + } + } } - - // It's tempting to compute the filename - // once outside the loop, but it's - // incorrect because it can change due - // to //line directives. - posn := fset2.Position(c.Pos()) - processComment(posn.Filename, posn.Line, text) } } } - checkMessage := func(posn token.Position, name, message string) { - k := key{posn.Filename, posn.Line} - expects := want[k] - var unmatched []string - for i, exp := range expects { - if exp == message { - // matched: remove the expectation. - expects[i] = expects[len(expects)-1] - expects = expects[:len(expects)-1] - want[k] = expects - return + checkObjs := func(objs []types.Object, state expectation) { + for _, obj := range objs { + posn := res.Pass.Fset.Position(obj.Pos()) + if _, ok := files[posn.Filename]; !ok { + continue } - unmatched = append(unmatched, fmt.Sprintf("%q", exp)) - } - if unmatched == nil { - t.Errorf("%v: unexpected: %v", posn, message) - } else { - t.Errorf("%v: %q does not match pattern %s", - posn, message, strings.Join(unmatched, " or ")) - } - } - // Check the diagnostics match expectations. - for _, f := range diagnostics { - posn := fset.Position(f.Pos()) - checkMessage(posn, "", f.Name()) - } - - // Reject surplus expectations. - // - // Sometimes an Analyzer reports two similar diagnostics on a - // line with only one expectation. The reader may be confused by - // the error message. - // TODO(adonovan): print a better error: - // "got 2 diagnostics here; each one needs its own expectation". - var surplus []string - for key, expects := range want { - for _, exp := range expects { - err := fmt.Sprintf("%s:%d: no diagnostic was reported matching %q", key.file, key.line, exp) - surplus = append(surplus, err) + k := key{posn.Filename, posn.Line} + exp, ok := want[k] + if !ok { + t.Errorf("unexpected %s object at %s", state, posn) + continue + } + delete(want, k) + if state != exp { + t.Errorf("object at %s should be %s but is %s", posn, exp, state) + } } } - sort.Strings(surplus) - for _, err := range surplus { - t.Errorf("%s", err) + ures := res.Result.(Result) + checkObjs(ures.Used, shouldBeUsed) + checkObjs(ures.Unused, shouldBeUnused) + + for key, b := range want { + var exp string + if b { + exp = "used" + } else { + exp = "unused " + } + t.Errorf("did not see expected %s object %s:%d", exp, key.file, key.line) } } func TestAll(t *testing.T) { - c := NewChecker(false) - var stats lint.Stats - r, err := lint.NewRunner(&stats) - if err != nil { - t.Fatal(err) + dirs := []string{ + "tests", + "alias", + "anonymous", + "blank", + "cgo", + "consts", + "conversion", + "cyclic", + "defer", + "elem", + "embedded_call", + "embedding", + "embedding2", + "exported_fields", + "exported_fields_main", + "exported_method_test", + "fields", + "functions", + "ignored", + "interfaces", + "interfaces2", + "linkname", + "main", + "mapslice", + "methods", + "named", + "nested", + "nocopy", + "nocopy-main", + "pointer-type-embedding", + "quiet", + "selectors", + "switch_interface", + "tests", + "tests-main", + "type-dedup", + "type-dedup2", + "type-dedup3", + "types", + "unused-argument", + "unused_type", + "variables", } - dir := analysistest.TestData() - cfg := &packages.Config{ - Dir: dir, - Tests: true, - Env: append(os.Environ(), "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off"), - } - pkgs, err := r.Run(cfg, []string{"./..."}, []*analysis.Analyzer{c.Analyzer()}, true) - if err != nil { - t.Fatal(err) + results := analysistest.Run(t, analysistest.TestData(), Analyzer, dirs...) + for _, res := range results { + check(t, res) } - - res := c.Result() - check(t, pkgs[0].Fset, res) } From ca7c57230fa34558ce43170ca14a898b15311a84 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 4 May 2020 13:32:05 +0200 Subject: [PATCH 022/111] Remove support for binary export data This is a manual application of 655248709eb08403f2cd91d1d0b783f4d5db38d1 from golang.org/x/tools --- internal/go/gcexportdata/gcexportdata.go | 12 +- internal/go/gcimporter/bimport.go | 1056 ----------------- internal/go/gcimporter/gcimporter.go | 11 +- internal/go/gcimporter/iimport.go | 187 +++ .../testdata/versions/test_go1.11_6b.a | Bin 2426 -> 0 bytes .../testdata/versions/test_go1.11_999b.a | Bin 2600 -> 0 bytes .../testdata/versions/test_go1.7_0.a | Bin 1862 -> 0 bytes .../testdata/versions/test_go1.7_1.a | Bin 2316 -> 0 bytes .../testdata/versions/test_go1.8_4.a | Bin 1658 -> 0 bytes .../testdata/versions/test_go1.8_5.a | Bin 1658 -> 0 bytes 10 files changed, 195 insertions(+), 1071 deletions(-) delete mode 100644 internal/go/gcimporter/bimport.go delete mode 100644 internal/go/gcimporter/testdata/versions/test_go1.11_6b.a delete mode 100644 internal/go/gcimporter/testdata/versions/test_go1.11_999b.a delete mode 100644 internal/go/gcimporter/testdata/versions/test_go1.7_0.a delete mode 100644 internal/go/gcimporter/testdata/versions/test_go1.7_1.a delete mode 100644 internal/go/gcimporter/testdata/versions/test_go1.8_4.a delete mode 100644 internal/go/gcimporter/testdata/versions/test_go1.8_5.a diff --git a/internal/go/gcexportdata/gcexportdata.go b/internal/go/gcexportdata/gcexportdata.go index 03540ebd3..aa8ebe6ac 100644 --- a/internal/go/gcexportdata/gcexportdata.go +++ b/internal/go/gcexportdata/gcexportdata.go @@ -109,14 +109,10 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) } - // The indexed export format starts with an 'i'; the older - // binary export format starts with a 'c', 'd', or 'v' - // (from "version"). Select appropriate importer. - if len(data) > 0 && data[0] == 'i' { - _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) - return pkg, err + // The indexed export format starts with an 'i'. + if len(data) == 0 || data[0] != 'i' { + return nil, fmt.Errorf("unknown export data format") } - - _, pkg, err := gcimporter.BImportData(fset, imports, data, path) + _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) return pkg, err } diff --git a/internal/go/gcimporter/bimport.go b/internal/go/gcimporter/bimport.go deleted file mode 100644 index 3176441c4..000000000 --- a/internal/go/gcimporter/bimport.go +++ /dev/null @@ -1,1056 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go. - -package gcimporter - -import ( - "encoding/binary" - "fmt" - "go/constant" - "go/token" - "go/types" - "sort" - "strconv" - "strings" - "sync" - "unicode" - "unicode/utf8" - "unsafe" -) - -type importer struct { - imports map[string]*types.Package - data []byte - importpath string - buf []byte // for reading strings - version int // export format version - - // object lists - strList []string // in order of appearance - pathList []string // in order of appearance - pkgList []*types.Package // in order of appearance - typList []types.Type // in order of appearance - interfaceList []*types.Interface // for delayed completion only - trackAllTypes bool - - // position encoding - posInfoFormat bool - prevFile string - prevLine int - fake fakeFileSet - - // debugging support - debugFormat bool - read int // bytes read -} - -// BImportData imports a package from the serialized package data -// and returns the number of bytes consumed and a reference to the package. -// If the export data version is not recognized or the format is otherwise -// compromised, an error is returned. -func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { - // catch panics and return them as errors - const currentVersion = 6 - version := -1 // unknown version - defer func() { - if e := recover(); e != nil { - // Return a (possibly nil or incomplete) package unchanged (see #16088). - if version > currentVersion { - err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) - } else { - err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) - } - } - }() - - p := importer{ - imports: imports, - data: data, - importpath: path, - version: version, - strList: []string{""}, // empty string is mapped to 0 - pathList: []string{""}, // empty string is mapped to 0 - fake: fakeFileSet{ - fset: fset, - files: make(map[string]*token.File), - }, - } - - // read version info - var versionstr string - if b := p.rawByte(); b == 'c' || b == 'd' { - // Go1.7 encoding; first byte encodes low-level - // encoding format (compact vs debug). - // For backward-compatibility only (avoid problems with - // old installed packages). Newly compiled packages use - // the extensible format string. - // TODO(gri) Remove this support eventually; after Go1.8. - if b == 'd' { - p.debugFormat = true - } - p.trackAllTypes = p.rawByte() == 'a' - p.posInfoFormat = p.int() != 0 - versionstr = p.string() - if versionstr == "v1" { - version = 0 - } - } else { - // Go1.8 extensible encoding - // read version string and extract version number (ignore anything after the version number) - versionstr = p.rawStringln(b) - if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { - if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { - version = v - } - } - } - p.version = version - - // read version specific flags - extend as necessary - switch p.version { - // case currentVersion: - // ... - // fallthrough - case currentVersion, 5, 4, 3, 2, 1: - p.debugFormat = p.rawStringln(p.rawByte()) == "debug" - p.trackAllTypes = p.int() != 0 - p.posInfoFormat = p.int() != 0 - case 0: - // Go1.7 encoding format - nothing to do here - default: - errorf("unknown bexport format version %d (%q)", p.version, versionstr) - } - - // --- generic export data --- - - // populate typList with predeclared "known" types - p.typList = append(p.typList, predeclared()...) - - // read package data - pkg = p.pkg() - - // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go) - objcount := 0 - for { - tag := p.tagOrIndex() - if tag == endTag { - break - } - p.obj(tag) - objcount++ - } - - // self-verification - if count := p.int(); count != objcount { - errorf("got %d objects; want %d", objcount, count) - } - - // ignore compiler-specific import data - - // complete interfaces - // TODO(gri) re-investigate if we still need to do this in a delayed fashion - for _, typ := range p.interfaceList { - typ.Complete() - } - - // record all referenced packages as imports - list := append(([]*types.Package)(nil), p.pkgList[1:]...) - sort.Sort(byPath(list)) - pkg.SetImports(list) - - // package was imported completely and without errors - pkg.MarkComplete() - - return p.read, pkg, nil -} - -func errorf(format string, args ...interface{}) { - panic(fmt.Sprintf(format, args...)) -} - -func (p *importer) pkg() *types.Package { - // if the package was seen before, i is its index (>= 0) - i := p.tagOrIndex() - if i >= 0 { - return p.pkgList[i] - } - - // otherwise, i is the package tag (< 0) - if i != packageTag { - errorf("unexpected package tag %d version %d", i, p.version) - } - - // read package data - name := p.string() - var path string - if p.version >= 5 { - path = p.path() - } else { - path = p.string() - } - if p.version >= 6 { - p.int() // package height; unused by go/types - } - - // we should never see an empty package name - if name == "" { - errorf("empty package name in import") - } - - // an empty path denotes the package we are currently importing; - // it must be the first package we see - if (path == "") != (len(p.pkgList) == 0) { - errorf("package path %q for pkg index %d", path, len(p.pkgList)) - } - - // if the package was imported before, use that one; otherwise create a new one - if path == "" { - path = p.importpath - } - pkg := p.imports[path] - if pkg == nil { - pkg = types.NewPackage(path, name) - p.imports[path] = pkg - } else if pkg.Name() != name { - errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path) - } - p.pkgList = append(p.pkgList, pkg) - - return pkg -} - -// objTag returns the tag value for each object kind. -func objTag(obj types.Object) int { - switch obj.(type) { - case *types.Const: - return constTag - case *types.TypeName: - return typeTag - case *types.Var: - return varTag - case *types.Func: - return funcTag - default: - errorf("unexpected object: %v (%T)", obj, obj) // panics - panic("unreachable") - } -} - -func sameObj(a, b types.Object) bool { - // Because unnamed types are not canonicalized, we cannot simply compare types for - // (pointer) identity. - // Ideally we'd check equality of constant values as well, but this is good enough. - return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type()) -} - -func (p *importer) declare(obj types.Object) { - pkg := obj.Pkg() - if alt := pkg.Scope().Insert(obj); alt != nil { - // This can only trigger if we import a (non-type) object a second time. - // Excluding type aliases, this cannot happen because 1) we only import a package - // once; and b) we ignore compiler-specific export data which may contain - // functions whose inlined function bodies refer to other functions that - // were already imported. - // However, type aliases require reexporting the original type, so we need - // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go, - // method importer.obj, switch case importing functions). - // TODO(gri) review/update this comment once the gc compiler handles type aliases. - if !sameObj(obj, alt) { - errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt) - } - } -} - -func (p *importer) obj(tag int) { - switch tag { - case constTag: - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil, nil) - val := p.value() - p.declare(types.NewConst(pos, pkg, name, typ, val)) - - case aliasTag: - // TODO(gri) verify type alias hookup is correct - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil, nil) - p.declare(types.NewTypeName(pos, pkg, name, typ)) - - case typeTag: - p.typ(nil, nil) - - case varTag: - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil, nil) - p.declare(types.NewVar(pos, pkg, name, typ)) - - case funcTag: - pos := p.pos() - pkg, name := p.qualifiedName() - params, isddd := p.paramList() - result, _ := p.paramList() - sig := types.NewSignature(nil, params, result, isddd) - p.declare(types.NewFunc(pos, pkg, name, sig)) - - default: - errorf("unexpected object tag %d", tag) - } -} - -const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go - -func (p *importer) pos() token.Pos { - if !p.posInfoFormat { - return token.NoPos - } - - file := p.prevFile - line := p.prevLine - delta := p.int() - line += delta - if p.version >= 5 { - if delta == deltaNewFile { - if n := p.int(); n >= 0 { - // file changed - file = p.path() - line = n - } - } - } else { - if delta == 0 { - if n := p.int(); n >= 0 { - // file changed - file = p.prevFile[:n] + p.string() - line = p.int() - } - } - } - p.prevFile = file - p.prevLine = line - - return p.fake.pos(file, line, 0) -} - -// Synthesize a token.Pos -type fakeFileSet struct { - fset *token.FileSet - files map[string]*token.File -} - -type unsafeFile struct { - _ uintptr - _ string - _ int - _ int - mutex sync.Mutex - lines []int -} - -func (f *unsafeFile) SetLines(lines []int) bool { - f.mutex.Lock() - f.lines = lines - f.mutex.Unlock() - return true -} - -func (s *fakeFileSet) pos(file string, line, column int) token.Pos { - // TODO(mdempsky): Make use of column. - - // Since we don't know the set of needed file positions, we - // reserve maxlines positions per file. - const maxlines = 64 * 1024 - f := s.files[file] - if f == nil { - f = s.fset.AddFile(file, -1, maxlines) - s.files[file] = f - // Allocate the fake linebreak indices on first use. - // TODO(adonovan): opt: save ~512KB using a more complex scheme? - fakeLinesOnce.Do(func() { - fakeLines = make([]int, maxlines) - for i := range fakeLines { - fakeLines[i] = i - } - }) - (*unsafeFile)(unsafe.Pointer(f)).SetLines(fakeLines) - } - - if line > maxlines { - line = 1 - } - - // Treat the file as if it contained only newlines - // and column=1: use the line number as the offset. - return f.Pos(line - 1) -} - -var ( - fakeLines []int - fakeLinesOnce sync.Once -) - -func (p *importer) qualifiedName() (pkg *types.Package, name string) { - name = p.string() - pkg = p.pkg() - return -} - -func (p *importer) record(t types.Type) { - p.typList = append(p.typList, t) -} - -// A dddSlice is a types.Type representing ...T parameters. -// It only appears for parameter types and does not escape -// the importer. -type dddSlice struct { - elem types.Type -} - -func (t *dddSlice) Underlying() types.Type { return t } -func (t *dddSlice) String() string { return "..." + t.elem.String() } - -// parent is the package which declared the type; parent == nil means -// the package currently imported. The parent package is needed for -// exported struct fields and interface methods which don't contain -// explicit package information in the export data. -// -// A non-nil tname is used as the "owner" of the result type; i.e., -// the result type is the underlying type of tname. tname is used -// to give interface methods a named receiver type where possible. -func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type { - // if the type was seen before, i is its index (>= 0) - i := p.tagOrIndex() - if i >= 0 { - return p.typList[i] - } - - // otherwise, i is the type tag (< 0) - switch i { - case namedTag: - // read type object - pos := p.pos() - parent, name := p.qualifiedName() - scope := parent.Scope() - obj := scope.Lookup(name) - - // if the object doesn't exist yet, create and insert it - if obj == nil { - obj = types.NewTypeName(pos, parent, name, nil) - scope.Insert(obj) - } - - if _, ok := obj.(*types.TypeName); !ok { - errorf("pkg = %s, name = %s => %s", parent, name, obj) - } - - // associate new named type with obj if it doesn't exist yet - t0 := types.NewNamed(obj.(*types.TypeName), nil, nil) - - // but record the existing type, if any - tname := obj.Type().(*types.Named) // tname is either t0 or the existing type - p.record(tname) - - // read underlying type - t0.SetUnderlying(p.typ(parent, t0)) - - // interfaces don't have associated methods - if types.IsInterface(t0) { - return tname - } - - // read associated methods - for i := p.int(); i > 0; i-- { - // TODO(gri) replace this with something closer to fieldName - pos := p.pos() - name := p.string() - if !exported(name) { - p.pkg() - } - - recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver? - params, isddd := p.paramList() - result, _ := p.paramList() - p.int() // go:nointerface pragma - discarded - - sig := types.NewSignature(recv.At(0), params, result, isddd) - t0.AddMethod(types.NewFunc(pos, parent, name, sig)) - } - - return tname - - case arrayTag: - t := new(types.Array) - if p.trackAllTypes { - p.record(t) - } - - n := p.int64() - *t = *types.NewArray(p.typ(parent, nil), n) - return t - - case sliceTag: - t := new(types.Slice) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewSlice(p.typ(parent, nil)) - return t - - case dddTag: - t := new(dddSlice) - if p.trackAllTypes { - p.record(t) - } - - t.elem = p.typ(parent, nil) - return t - - case structTag: - t := new(types.Struct) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewStruct(p.fieldList(parent)) - return t - - case pointerTag: - t := new(types.Pointer) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewPointer(p.typ(parent, nil)) - return t - - case signatureTag: - t := new(types.Signature) - if p.trackAllTypes { - p.record(t) - } - - params, isddd := p.paramList() - result, _ := p.paramList() - *t = *types.NewSignature(nil, params, result, isddd) - return t - - case interfaceTag: - // Create a dummy entry in the type list. This is safe because we - // cannot expect the interface type to appear in a cycle, as any - // such cycle must contain a named type which would have been - // first defined earlier. - // TODO(gri) Is this still true now that we have type aliases? - // See issue #23225. - n := len(p.typList) - if p.trackAllTypes { - p.record(nil) - } - - var embeddeds []types.Type - for n := p.int(); n > 0; n-- { - p.pos() - embeddeds = append(embeddeds, p.typ(parent, nil)) - } - - t := newInterface(p.methodList(parent, tname), embeddeds) - p.interfaceList = append(p.interfaceList, t) - if p.trackAllTypes { - p.typList[n] = t - } - return t - - case mapTag: - t := new(types.Map) - if p.trackAllTypes { - p.record(t) - } - - key := p.typ(parent, nil) - val := p.typ(parent, nil) - *t = *types.NewMap(key, val) - return t - - case chanTag: - t := new(types.Chan) - if p.trackAllTypes { - p.record(t) - } - - dir := chanDir(p.int()) - val := p.typ(parent, nil) - *t = *types.NewChan(dir, val) - return t - - default: - errorf("unexpected type tag %d", i) // panics - panic("unreachable") - } -} - -func chanDir(d int) types.ChanDir { - // tag values must match the constants in cmd/compile/internal/gc/go.go - switch d { - case 1 /* Crecv */ : - return types.RecvOnly - case 2 /* Csend */ : - return types.SendOnly - case 3 /* Cboth */ : - return types.SendRecv - default: - errorf("unexpected channel dir %d", d) - return 0 - } -} - -func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) { - if n := p.int(); n > 0 { - fields = make([]*types.Var, n) - tags = make([]string, n) - for i := range fields { - fields[i], tags[i] = p.field(parent) - } - } - return -} - -func (p *importer) field(parent *types.Package) (*types.Var, string) { - pos := p.pos() - pkg, name, alias := p.fieldName(parent) - typ := p.typ(parent, nil) - tag := p.string() - - anonymous := false - if name == "" { - // anonymous field - typ must be T or *T and T must be a type name - switch typ := deref(typ).(type) { - case *types.Basic: // basic types are named types - pkg = nil // // objects defined in Universe scope have no package - name = typ.Name() - case *types.Named: - name = typ.Obj().Name() - default: - errorf("named base type expected") - } - anonymous = true - } else if alias { - // anonymous field: we have an explicit name because it's an alias - anonymous = true - } - - return types.NewField(pos, pkg, name, typ, anonymous), tag -} - -func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) { - if n := p.int(); n > 0 { - methods = make([]*types.Func, n) - for i := range methods { - methods[i] = p.method(parent, baseType) - } - } - return -} - -func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func { - pos := p.pos() - pkg, name, _ := p.fieldName(parent) - // If we don't have a baseType, use a nil receiver. - // A receiver using the actual interface type (which - // we don't know yet) will be filled in when we call - // types.Interface.Complete. - var recv *types.Var - if baseType != nil { - recv = types.NewVar(token.NoPos, parent, "", baseType) - } - params, isddd := p.paramList() - result, _ := p.paramList() - sig := types.NewSignature(recv, params, result, isddd) - return types.NewFunc(pos, pkg, name, sig) -} - -func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) { - name = p.string() - pkg = parent - if pkg == nil { - // use the imported package instead - pkg = p.pkgList[0] - } - if p.version == 0 && name == "_" { - // version 0 didn't export a package for _ fields - return - } - switch name { - case "": - // 1) field name matches base type name and is exported: nothing to do - case "?": - // 2) field name matches base type name and is not exported: need package - name = "" - pkg = p.pkg() - case "@": - // 3) field name doesn't match type name (alias) - name = p.string() - alias = true - fallthrough - default: - if !exported(name) { - pkg = p.pkg() - } - } - return -} - -func (p *importer) paramList() (*types.Tuple, bool) { - n := p.int() - if n == 0 { - return nil, false - } - // negative length indicates unnamed parameters - named := true - if n < 0 { - n = -n - named = false - } - // n > 0 - params := make([]*types.Var, n) - isddd := false - for i := range params { - params[i], isddd = p.param(named) - } - return types.NewTuple(params...), isddd -} - -func (p *importer) param(named bool) (*types.Var, bool) { - t := p.typ(nil, nil) - td, isddd := t.(*dddSlice) - if isddd { - t = types.NewSlice(td.elem) - } - - var pkg *types.Package - var name string - if named { - name = p.string() - if name == "" { - errorf("expected named parameter") - } - if name != "_" { - pkg = p.pkg() - } - if i := strings.Index(name, "·"); i > 0 { - name = name[:i] // cut off gc-specific parameter numbering - } - } - - // read and discard compiler-specific info - p.string() - - return types.NewVar(token.NoPos, pkg, name, t), isddd -} - -func exported(name string) bool { - ch, _ := utf8.DecodeRuneInString(name) - return unicode.IsUpper(ch) -} - -func (p *importer) value() constant.Value { - switch tag := p.tagOrIndex(); tag { - case falseTag: - return constant.MakeBool(false) - case trueTag: - return constant.MakeBool(true) - case int64Tag: - return constant.MakeInt64(p.int64()) - case floatTag: - return p.float() - case complexTag: - re := p.float() - im := p.float() - return constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) - case stringTag: - return constant.MakeString(p.string()) - case unknownTag: - return constant.MakeUnknown() - default: - errorf("unexpected value tag %d", tag) // panics - panic("unreachable") - } -} - -func (p *importer) float() constant.Value { - sign := p.int() - if sign == 0 { - return constant.MakeInt64(0) - } - - exp := p.int() - mant := []byte(p.string()) // big endian - - // remove leading 0's if any - for len(mant) > 0 && mant[0] == 0 { - mant = mant[1:] - } - - // convert to little endian - // TODO(gri) go/constant should have a more direct conversion function - // (e.g., once it supports a big.Float based implementation) - for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 { - mant[i], mant[j] = mant[j], mant[i] - } - - // adjust exponent (constant.MakeFromBytes creates an integer value, - // but mant represents the mantissa bits such that 0.5 <= mant < 1.0) - exp -= len(mant) << 3 - if len(mant) > 0 { - for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 { - exp++ - } - } - - x := constant.MakeFromBytes(mant) - switch { - case exp < 0: - d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) - x = constant.BinaryOp(x, token.QUO, d) - case exp > 0: - x = constant.Shift(x, token.SHL, uint(exp)) - } - - if sign < 0 { - x = constant.UnaryOp(token.SUB, x, 0) - } - return x -} - -// ---------------------------------------------------------------------------- -// Low-level decoders - -func (p *importer) tagOrIndex() int { - if p.debugFormat { - p.marker('t') - } - - return int(p.rawInt64()) -} - -func (p *importer) int() int { - x := p.int64() - if int64(int(x)) != x { - errorf("exported integer too large") - } - return int(x) -} - -func (p *importer) int64() int64 { - if p.debugFormat { - p.marker('i') - } - - return p.rawInt64() -} - -func (p *importer) path() string { - if p.debugFormat { - p.marker('p') - } - // if the path was seen before, i is its index (>= 0) - // (the empty string is at index 0) - i := p.rawInt64() - if i >= 0 { - return p.pathList[i] - } - // otherwise, i is the negative path length (< 0) - a := make([]string, -i) - for n := range a { - a[n] = p.string() - } - s := strings.Join(a, "/") - p.pathList = append(p.pathList, s) - return s -} - -func (p *importer) string() string { - if p.debugFormat { - p.marker('s') - } - // if the string was seen before, i is its index (>= 0) - // (the empty string is at index 0) - i := p.rawInt64() - if i >= 0 { - return p.strList[i] - } - // otherwise, i is the negative string length (< 0) - if n := int(-i); n <= cap(p.buf) { - p.buf = p.buf[:n] - } else { - p.buf = make([]byte, n) - } - for i := range p.buf { - p.buf[i] = p.rawByte() - } - s := string(p.buf) - p.strList = append(p.strList, s) - return s -} - -func (p *importer) marker(want byte) { - if got := p.rawByte(); got != want { - errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read) - } - - pos := p.read - if n := int(p.rawInt64()); n != pos { - errorf("incorrect position: got %d; want %d", n, pos) - } -} - -// rawInt64 should only be used by low-level decoders. -func (p *importer) rawInt64() int64 { - i, err := binary.ReadVarint(p) - if err != nil { - errorf("read error: %v", err) - } - return i -} - -// rawStringln should only be used to read the initial version string. -func (p *importer) rawStringln(b byte) string { - p.buf = p.buf[:0] - for b != '\n' { - p.buf = append(p.buf, b) - b = p.rawByte() - } - return string(p.buf) -} - -// needed for binary.ReadVarint in rawInt64 -func (p *importer) ReadByte() (byte, error) { - return p.rawByte(), nil -} - -// byte is the bottleneck interface for reading p.data. -// It unescapes '|' 'S' to '$' and '|' '|' to '|'. -// rawByte should only be used by low-level decoders. -func (p *importer) rawByte() byte { - b := p.data[0] - r := 1 - if b == '|' { - b = p.data[1] - r = 2 - switch b { - case 'S': - b = '$' - case '|': - // nothing to do - default: - errorf("unexpected escape sequence in export data") - } - } - p.data = p.data[r:] - p.read += r - return b - -} - -// ---------------------------------------------------------------------------- -// Export format - -// Tags. Must be < 0. -const ( - // Objects - packageTag = -(iota + 1) - constTag - typeTag - varTag - funcTag - endTag - - // Types - namedTag - arrayTag - sliceTag - dddTag - structTag - pointerTag - signatureTag - interfaceTag - mapTag - chanTag - - // Values - falseTag - trueTag - int64Tag - floatTag - fractionTag // not used by gc - complexTag - stringTag - nilTag // only used by gc (appears in exported inlined function bodies) - unknownTag // not used by gc (only appears in packages with errors) - - // Type aliases - aliasTag -) - -var predeclOnce sync.Once -var predecl []types.Type // initialized lazily - -func predeclared() []types.Type { - predeclOnce.Do(func() { - // initialize lazily to be sure that all - // elements have been initialized before - predecl = []types.Type{ // basic types - types.Typ[types.Bool], - types.Typ[types.Int], - types.Typ[types.Int8], - types.Typ[types.Int16], - types.Typ[types.Int32], - types.Typ[types.Int64], - types.Typ[types.Uint], - types.Typ[types.Uint8], - types.Typ[types.Uint16], - types.Typ[types.Uint32], - types.Typ[types.Uint64], - types.Typ[types.Uintptr], - types.Typ[types.Float32], - types.Typ[types.Float64], - types.Typ[types.Complex64], - types.Typ[types.Complex128], - types.Typ[types.String], - - // basic type aliases - types.Universe.Lookup("byte").Type(), - types.Universe.Lookup("rune").Type(), - - // error - types.Universe.Lookup("error").Type(), - - // untyped types - types.Typ[types.UntypedBool], - types.Typ[types.UntypedInt], - types.Typ[types.UntypedRune], - types.Typ[types.UntypedFloat], - types.Typ[types.UntypedComplex], - types.Typ[types.UntypedString], - types.Typ[types.UntypedNil], - - // package unsafe - types.Typ[types.UnsafePointer], - - // invalid type - types.Typ[types.Invalid], // only appears in packages with errors - - // used internally by gc; never used by this package or in .a files - anyType{}, - } - }) - return predecl -} - -type anyType struct{} - -func (t anyType) Underlying() types.Type { return t } -func (t anyType) String() string { return "any" } diff --git a/internal/go/gcimporter/gcimporter.go b/internal/go/gcimporter/gcimporter.go index 981320890..81a262d36 100644 --- a/internal/go/gcimporter/gcimporter.go +++ b/internal/go/gcimporter/gcimporter.go @@ -204,14 +204,11 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func // Or, define a new standard go/types/gcexportdata package. fset := token.NewFileSet() - // The indexed export format starts with an 'i'; the older - // binary export format starts with a 'c', 'd', or 'v' - // (from "version"). Select appropriate importer. - if len(data) > 0 && data[0] == 'i' { - _, pkg, err = IImportData(fset, packages, data[1:], id) - } else { - _, pkg, err = BImportData(fset, packages, data, id) + // The indexed export format starts with an 'i'. + if len(data) == 0 || data[0] != 'i' { + return nil, fmt.Errorf("unknown export data format") } + _, pkg, err = IImportData(fset, packages, data[1:], id) default: err = fmt.Errorf("unknown export data header: %q", hdr) diff --git a/internal/go/gcimporter/iimport.go b/internal/go/gcimporter/iimport.go index a31a88026..6c7d76c1a 100644 --- a/internal/go/gcimporter/iimport.go +++ b/internal/go/gcimporter/iimport.go @@ -18,6 +18,10 @@ import ( "go/types" "io" "sort" + "sync" + "unicode" + "unicode/utf8" + "unsafe" ) type intReader struct { @@ -25,6 +29,10 @@ type intReader struct { path string } +func errorf(format string, args ...interface{}) { + panic(fmt.Sprintf(format, args...)) +} + func (r *intReader) int64() int64 { i, err := binary.ReadVarint(r.Reader) if err != nil { @@ -628,3 +636,182 @@ func (r *importReader) byte() byte { } return x } + +const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go + +// Synthesize a token.Pos +type fakeFileSet struct { + fset *token.FileSet + files map[string]*token.File +} + +type unsafeFile struct { + _ uintptr + _ string + _ int + _ int + mutex sync.Mutex + lines []int +} + +func (f *unsafeFile) SetLines(lines []int) bool { + f.mutex.Lock() + f.lines = lines + f.mutex.Unlock() + return true +} + +func (s *fakeFileSet) pos(file string, line, column int) token.Pos { + // TODO(mdempsky): Make use of column. + + // Since we don't know the set of needed file positions, we + // reserve maxlines positions per file. + const maxlines = 64 * 1024 + f := s.files[file] + if f == nil { + f = s.fset.AddFile(file, -1, maxlines) + s.files[file] = f + // Allocate the fake linebreak indices on first use. + // TODO(adonovan): opt: save ~512KB using a more complex scheme? + fakeLinesOnce.Do(func() { + fakeLines = make([]int, maxlines) + for i := range fakeLines { + fakeLines[i] = i + } + }) + (*unsafeFile)(unsafe.Pointer(f)).SetLines(fakeLines) + } + + if line > maxlines { + line = 1 + } + + // Treat the file as if it contained only newlines + // and column=1: use the line number as the offset. + return f.Pos(line - 1) +} + +var ( + fakeLines []int + fakeLinesOnce sync.Once +) + +func chanDir(d int) types.ChanDir { + // tag values must match the constants in cmd/compile/internal/gc/go.go + switch d { + case 1 /* Crecv */ : + return types.RecvOnly + case 2 /* Csend */ : + return types.SendOnly + case 3 /* Cboth */ : + return types.SendRecv + default: + errorf("unexpected channel dir %d", d) + return 0 + } +} + +func exported(name string) bool { + ch, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(ch) +} + +// ---------------------------------------------------------------------------- +// Export format + +// Tags. Must be < 0. +const ( + // Objects + packageTag = -(iota + 1) + constTag + typeTag + varTag + funcTag + endTag + + // Types + namedTag + arrayTag + sliceTag + dddTag + structTag + pointerTag + signatureTag + interfaceTag + mapTag + chanTag + + // Values + falseTag + trueTag + int64Tag + floatTag + fractionTag // not used by gc + complexTag + stringTag + nilTag // only used by gc (appears in exported inlined function bodies) + unknownTag // not used by gc (only appears in packages with errors) + + // Type aliases + aliasTag +) + +var predeclOnce sync.Once +var predecl []types.Type // initialized lazily + +func predeclared() []types.Type { + predeclOnce.Do(func() { + // initialize lazily to be sure that all + // elements have been initialized before + predecl = []types.Type{ // basic types + types.Typ[types.Bool], + types.Typ[types.Int], + types.Typ[types.Int8], + types.Typ[types.Int16], + types.Typ[types.Int32], + types.Typ[types.Int64], + types.Typ[types.Uint], + types.Typ[types.Uint8], + types.Typ[types.Uint16], + types.Typ[types.Uint32], + types.Typ[types.Uint64], + types.Typ[types.Uintptr], + types.Typ[types.Float32], + types.Typ[types.Float64], + types.Typ[types.Complex64], + types.Typ[types.Complex128], + types.Typ[types.String], + + // basic type aliases + types.Universe.Lookup("byte").Type(), + types.Universe.Lookup("rune").Type(), + + // error + types.Universe.Lookup("error").Type(), + + // untyped types + types.Typ[types.UntypedBool], + types.Typ[types.UntypedInt], + types.Typ[types.UntypedRune], + types.Typ[types.UntypedFloat], + types.Typ[types.UntypedComplex], + types.Typ[types.UntypedString], + types.Typ[types.UntypedNil], + + // package unsafe + types.Typ[types.UnsafePointer], + + // invalid type + types.Typ[types.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + anyType{}, + } + }) + return predecl +} + +type anyType struct{} + +func (t anyType) Underlying() types.Type { return t } +func (t anyType) String() string { return "any" } diff --git a/internal/go/gcimporter/testdata/versions/test_go1.11_6b.a b/internal/go/gcimporter/testdata/versions/test_go1.11_6b.a deleted file mode 100644 index c0a211e917435646f4fde78cd2f41aed4599ea0c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2426 zcmd5-&2HO95FT!2g`s7ea9mXg3WE_0!*Cpmq9lq^4a0C$+i4CqYNho}p{NxlR}yK8 zN)fvTbv3ss< z^@i{M(5+HID_KpdmGWgnFKX1tH%&v=Xue{Z`O*i*g@WCpHa6O%rdH4^?R>k{q_o^F zndPG01cq;^-R)tdo7O0B9Xg=C70}ieu#M;h^z>T%r@$6NWz%(gANXtQT3I*B?Pj}T zSX#bpQnOq#ZG#r5W>eh+w&l1g9z~7FZ%WTLT670(_YRYd*kn3^%dz4ze>=G2TLXvQ zL099b;V|e2cNkUCXcGAKtZc@vZ%bPyHi)vu6LB@L>x^L{u7-8p#GItDJsQ~4Z>ku_ za+7j=R;%Ys!?ONp5On)gb?jz07^+$x*mguUz^cQqy1E`-@yTbc0#jlbQd+)my#}@t zqNcv`<=Ada4{R+Wj|z%s1>uzemYwq1cmhlg_waVW#XGT@%E*cRpK2@e{*Sel%zLaX z*b}o@WcUR8pSUf&CrQAbNCJctzKp1E`2Jw33BU0TVcx?bRLyrTXZ&so(JmxrgiG3M zFKUVtu9KeLMmnaSX1KD(2;}M4A&&L2{JofNgD4tTV7E*^Yvf)G@qdtW{BF*1b6EGe zi0giCQulLl-Ot5!AJ5Q;J;wCU|4w~!PZWQR*Z(Es^Tsl^UyCQVNNyiuVj!3Jy8&PV zA!$g1hRaj}KLb%tNGT;DC4^*B6p~U3pJdn|DbC9va}-%b;1WqeY648li6jCu9GJ!j zftgF-!z)1P)5x>9Ox_6aMo1N90;DFBG&vTlu-Fq#P<{|_%>ELN?EXrGm|aCF0h89o z4C~}vfO8?GA;Z$POj1jA&d}iuXMFgX6Kt^i>X<-_2#8KFAi_IgqO4 E0@X}F`Tzg` diff --git a/internal/go/gcimporter/testdata/versions/test_go1.11_999b.a b/internal/go/gcimporter/testdata/versions/test_go1.11_999b.a deleted file mode 100644 index c35d22dce691e67127e04ea74ddd8c97a57e22ff..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2600 zcmd5-QE%c#5MFNy=W|eEYCj zofNe{t4~#>{-OSb&aBx$f#kU>+p|0K%{Q~N-rZLxPV5d&m2OwR`tYv({kfJlV9tK4GN1>%$9hg#UXWp1K*PW)>y)|`=NRW*-&W<%|Dk5#Om*JxNw>r9#U!*r~b zP0ci$Mx$A+X{J%OwbuqT?WWm}oe_(|V4Sd6=}m*7rv;u?yl%MV_s08v?Wf6Q6m@>S zx_JAs)U_|7*t!_KtKHs)x1H;^ei#;&H_#MC+23y|pIJN!!m-w9G!%tU=~==i2_YNG z@A|fKfBo+A$IHvk#w2#X^22Q&a_kIW_--(Y!Wa_RM!e@F&i0IHqM%0i!xycgGydf~ zU_+05O{vr+l#orjNhAW1m98Il^>AK5QK?63X_$73{$JMel|qOgmK#G)BLDpLr;&V# zn63|;$v`g_^@UP^kwyqP;Ej138~ZV%zGZv9{g znV&e*B=p&s#ZJP!Q?wnYb7FUEW^{*u9S-P);czK(-!$r`Rqyxu4a+ggb(`7sn(bPw z!VH(0Ho{N+P=|@>X^{1WI|EEG?uSo;jsdh=}V&d^_VEymMv z5{#JcyS*Tp=tdb=^(-YE!-<85gnq;ihxvBU5u^!MAa>%|`GU%Q?zyr%_5^ko2BC*( zkn~83oP=*VG!&Pjci*5^9g-`C$oe6cpk{Da~(`lDJVy;zog4 zbVW=-{{e#MzIi6gvuXw^X8{AKl@uyEI2~bJx6@$%1PEUGwmQYvIB5<$qk2Z$*50 zk75>_FIQD5p>37s>5beZJ44wf8d^aX7v%GO;k|hPJq4YWL!pz878ng++&xoiZnuMs z17y5kpgjiH=els+Gz1+rDI`qsK{i@_81CZGq8iaMXf;*%R8ePV0gI{7COseryqA?D z63LJRsHGFggVXp7#Hd+yYxGLd9RSj+|6dE7!@>Xn diff --git a/internal/go/gcimporter/testdata/versions/test_go1.7_0.a b/internal/go/gcimporter/testdata/versions/test_go1.7_0.a deleted file mode 100644 index edb6c3f25a159d65fd097bcf0b5bc6a2add86eee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1862 zcmcgsUyIvD5FhP1j?lS5s8`p7a45zc=D3sfXHS+(DIr|bKwk3DLf)K}w9=`OB}bN1 z^3s>&rSuczE%^q0%WFuW&wc6l=qK3gNV1LV2CkISWwo<2znR(DnOWWbk`=+lSITs1 zJbduY`PcXKW|%8lk!IhlE~PbMkKL8VV{4ha}V4EOeHssAj^ZGh&GQ1o9aT0V zE7|B7>L+=&s3k_6(@*cFEPHY<;b{n;kv(Zj0QL#&g9Lc0GL7@8k*`v;$zj8PET0xV z^BVO3%BON$0l*rK(@+9ome1$hFfQ2Q!sz#nt;>lH@;??fh+eI;#5~Q*0)&k6i-ycW z9M$0Qj_tQ`kmdnP7w_MizUT7DnM6*+L(e&jEb6nfh=$|9wIhc9x0JC3LLT7YKKD$= zoW5pom1AG}q=&cQ38gfS>1SbHk^94bS&&D_~y4t#C474yO~F zx{e=?EsI(n1K~A}t9(YwEZB8V*XMo=G#Zel@vd3FSY~B1<3=3#Nx3jg3&fp9JHtm- zb*gb(_3Tyw4G{X;3|+FKV9!AeYO9@U(KxTqPLu43v0KW+wUgD^AcoD!&10FfvhExZ z{hffO7=c>Luya7T4gB{`d;c~9VK?Y*K`QL>muo}l$gTT7pAXdgKb;SHAILu7Z0*CZ z$Vnea8*sMT_y(I%(LMYsy53j(RYQ_M-OZ!CGFsC&v>SmeyI1*Y@NSULW-JR&F}M7b z74dS$vvPr9P1g?BjvoBp`TV*pzgqKG(e2A_>khY>f3O#XkS=t=MBR2vZ?$E$)mEU@ zX}2+{H&g(k335O>(Ans8dupq*(Ye{^Xev=b1)<#_J?J65xuUl!IjJTQ+Tn&d5SmVO z&|4L9OU30-X#F;7q3wVi!4c99%waB@DtRp(-9eT9Ls@~rW84jeb__De1W?iu+DW^* ztk4W%1O8PslOTw62!BX8HS_^`<>I7v@u;$@{1|hUypx8nB@`I%B$SxHRjVyDx``D7 NcQ?$zihvOW!*Feif1)VE2;9JR0_2dO%^`=P@{(NAY$Q^q zq@)3YqSvA?&;mKQK%XFo=67nQwk}c6N1r4`=@I z4QVjY?%la{@Y$hS41G>HDZY!Vw@d!y__I<77%Q$(zN)K#c>n z721~V^(@nOdp7a9*e2NS6U%b@y`XRQecUyDrXek9hYjgpSxW8pP3aNICUKgmh9m(J zE>Fp13M=mcJbwDct-GJ!y?ehi$^1@~cJd4pJnTe%JQ}AN%eTYP0iNQ{+{+}FXi;i^ zaC3;0hlep42Jj)eBn$untfEyAfQ6M{5TyexofmUa?SenA@`l@+AI|@+^3rt)AWDti zFc5(1(gGoT`q9~Wc{bR$=8Rc!A^{xJ3N zaPs=4?zs*Lt$t{QByg>6XxJX^hIY{N9W%tt{}R};{DAmu5RbUJW%ffO3_L=5VV5l6*ZhCesyzp>E=A;JYzhJs%EWh zmQ3Rj(IVfArxQ&#*ggwa33~Y@Drfz+^W(fHroT2598#a z_Et?wC-c}qH;c%{Wgg>c-mpMhm4pT?138zmh6U;r>bKA1^~(^bS!k>6z;$OoihIxy zYqx(mXv??1KWMMNVb1}ZwN?0)al#u^2R3VU_C^$g(@X5fDY__b0bN>!l`#m;?xy<~7Z;B9V zLKAd`t=H6AU6gBe32Kdcoi)N0nVpA1(Hd$%V}WU|%eBS=bG5)IGLk_Cs$4ycBR~f-YN8|AfotChY*l~BSE^i{gInmi0M7+UXW+Gc88z0vVt#QqU%$7~6Y}XlzC8MDY*kH5)V;GCf`>CKa_pp6Wlt4RT zUnW&-5J5!1Bn_%KbzU{9xX1+dk}JE2DH%244}ndEXSl^(IJt`l+=}~g$1-{)n5P00 dxUU2z+5L^LdsMlx$dx6&zhKtR&Wb;+KLH~_A*28R diff --git a/internal/go/gcimporter/testdata/versions/test_go1.8_4.a b/internal/go/gcimporter/testdata/versions/test_go1.8_4.a deleted file mode 100644 index 26b8531650ad8f85885fcf62f8306e5f686a1dca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1658 zcmah}L2KMb6n-<6*FxTgsq1b{DMnqE#W=23(#pGQ6G|FP8|cBM^&y0UGb@eOYGp~* zY7+vX^ji803OTsYV^2Ax*U&)Ez4U+d7mWL)-LVtJCc`jq=6m1!<~_|rlhjPd-wVGm z1D-#M5|f_<1J4Y^LlK)dcO2Vs`;O=Cn1?fAexBtfZk^$t>+IPh)3u$UdDF&W-rEav zej>6oN(zx1pW-kKqtm%#JQVpfN)yvF44`DW5Yqyd-UWE{`imOH%WU{ zWWwX##E(w0G)J9Y$qsnIdkd9mg;#~el#(N$04sC_2;jAb@reO2 zNuAv|ARz7HaV9M5m`{(bcH4R-RMHG&w{maQJ;xx|E6)s5%ZeOiq3bOJu&;eg||9}IQ_ZoB=#&>Ou*?)Uvb_&CH^jO@O<8#-Yy7GfCgjE0^+ z28o4*QH+`1;L-z1-V)|$7vdayp7)`O2X8d2V7P1wxZN4 z-cXKntI_>QA9B$jbiawL;MK!F3v!I5|== z3j6Bc=7-C={;N8_&5{K{S;gN}n`>;zf2Z`RKuIZWK?`jB&onfAL7izv)6|Rx4$y8e z0J;j2uF)nmFIg?7HITLbI_v&N^f`g&#PApey3c5fK42sG_$Pwv#_!cC@0;`)foH_9 zk+gQ3(dOEBCFx1IoRyrmWF5)?-KC6V-HXGTWSi6kQ!70khD%qntp>8NW4CD+x@a4E z2APiPXT(Bz*!o&GVEX~?Y{^_~Q9zPMb5$~UJ9Z7ZrKMZ~v%mQBri;N$8`F8p1bGlny&5i&D%B(^TFPz z;3p!_;8<196I3*1d$M-+mg4B!qV3?+X{?VH6}; zz?1Qxf9!a!=Lo0ognYn9KF4h@^mjvUyFI_}4PGPndO;`x9AYR2cF)}%IiqkWM1Qn1 z=zGBsB-S;G6U_7upOjf7Qc>_ygdc<4o>m}E-^l+I`eFz0rHAA0H@Xfz|BPke|1)`+iXcrZt?40oFTZL8Nb(Rod^G~7A$jbiewML-p{&gCc zI5|==3j6$@=6j2}{);-l&5{{GS;gN}n`>;vzft*Aprn+xpanMmXBry5pw2X-X=+9T z2WU4L09^%1*Ju-(m#h}k8pv9Io%Qg2`hvgr@ zU9=57hg?VXGh(4UY<-~{u>A;kwq!20C?LtBI_PL^_?O^Zt&-WS*lx{!fFX@4#LpRh hN$fL%fd7&p;{9E%1IcdUhrv&SSzxYkem?yz{|7p9b&dc4 From 9489d27860e8e08881c82b605723b81c0e35cbca Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 4 May 2020 16:22:10 +0200 Subject: [PATCH 023/111] internal/go/gcimporter: delete unused code --- internal/go/gcimporter/iimport.go | 55 ------------------------------- 1 file changed, 55 deletions(-) diff --git a/internal/go/gcimporter/iimport.go b/internal/go/gcimporter/iimport.go index 6c7d76c1a..9a940fc38 100644 --- a/internal/go/gcimporter/iimport.go +++ b/internal/go/gcimporter/iimport.go @@ -19,8 +19,6 @@ import ( "io" "sort" "sync" - "unicode" - "unicode/utf8" "unsafe" ) @@ -33,14 +31,6 @@ func errorf(format string, args ...interface{}) { panic(fmt.Sprintf(format, args...)) } -func (r *intReader) int64() int64 { - i, err := binary.ReadVarint(r.Reader) - if err != nil { - errorf("import %q: read varint error: %v", r.path, err) - } - return i -} - func (r *intReader) uint64() uint64 { i, err := binary.ReadUvarint(r.Reader) if err != nil { @@ -711,51 +701,6 @@ func chanDir(d int) types.ChanDir { } } -func exported(name string) bool { - ch, _ := utf8.DecodeRuneInString(name) - return unicode.IsUpper(ch) -} - -// ---------------------------------------------------------------------------- -// Export format - -// Tags. Must be < 0. -const ( - // Objects - packageTag = -(iota + 1) - constTag - typeTag - varTag - funcTag - endTag - - // Types - namedTag - arrayTag - sliceTag - dddTag - structTag - pointerTag - signatureTag - interfaceTag - mapTag - chanTag - - // Values - falseTag - trueTag - int64Tag - floatTag - fractionTag // not used by gc - complexTag - stringTag - nilTag // only used by gc (appears in exported inlined function bodies) - unknownTag // not used by gc (only appears in packages with errors) - - // Type aliases - aliasTag -) - var predeclOnce sync.Once var predecl []types.Type // initialized lazily From f8687be584c9471ae1c555ba4a33a11fafa21c0a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 13:15:15 +0200 Subject: [PATCH 024/111] stylecheck: delete debugging leftover Closes gh-672 --- stylecheck/lint.go | 1 - 1 file changed, 1 deletion(-) diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 75a0112b2..4577c7460 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -50,7 +50,6 @@ func CheckPackageComment(pass *analysis.Pass) (interface{}, error) { if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { report.Report(pass, f.Doc, fmt.Sprintf(`package comment should be of the form "%s..."`, prefix)) } - f.Doc.Text() } } From d4e6d87776f90e935df9af9e776077116e6b552d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 13:23:43 +0200 Subject: [PATCH 025/111] staticcheck: don't flag nil constants as useless assignments Closes gh-733 --- staticcheck/lint.go | 4 ++++ .../CheckUnreadVariableValues.go | 12 ++++++++++++ 2 files changed, 16 insertions(+) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 1e9f5fcb3..282a60556 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -1719,6 +1719,10 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) { continue } + if _, ok := val.(*ir.Const); ok { + // a zero-valued constant, for example in 'foo := []string(nil)' + continue + } if !hasUse(val, nil) { report.Report(pass, assign, fmt.Sprintf("this value of %s is never used", lhs)) } diff --git a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go index a9efc2ded..da2d1c5d8 100644 --- a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go +++ b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go @@ -1,5 +1,7 @@ package pkg +import "fmt" + func fn1() { var x int x = gen() // want `this value of x is never used` @@ -130,3 +132,13 @@ func resolveWeakTypes(types []int) { } func findRunLimit(int) int { return 0 } + +func fn10() { + slice := []string(nil) + if true { + slice = []string{"1", "2"} + } else { + slice = []string{"3", "4"} + } + fmt.Println(slice) +} From 4efd30c33b37571e45cc0a57397b61cc35701e0d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 13:48:30 +0200 Subject: [PATCH 026/111] staticcheck: improve handling of byte slices and arrays in printf checker Closes gh-714 --- code/code.go | 1 + staticcheck/lint.go | 22 +++++++++++++++++-- .../testdata/src/CheckPrintf/CheckPrintf.go | 14 ++++++++++++ 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/code/code.go b/code/code.go index 5ef7aef4d..73aebea60 100644 --- a/code/code.go +++ b/code/code.go @@ -101,6 +101,7 @@ func IsCallToAny(call *ir.CallCommon, names ...string) bool { return false } +// OPT(dh): IsType is kind of expensive; should we really use it? func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name } func FilterDebug(instr []ir.Instruction) []ir.Instruction { diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 282a60556..f16bf3a40 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -527,8 +527,26 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) { return true } - if flags&isString != 0 && (code.IsType(T, "[]byte") || isStringer(T, ms) || isError(T, ms)) { - return true + if flags&isString != 0 { + isStringyElem := func(typ types.Type) bool { + if typ, ok := typ.Underlying().(*types.Basic); ok { + return typ.Kind() == types.Byte + } + return false + } + switch T := T.(type) { + case *types.Slice: + if isStringyElem(T.Elem()) { + return true + } + case *types.Array: + if isStringyElem(T.Elem()) { + return true + } + } + if isStringer(T, ms) || isError(T, ms) { + return true + } } if flags&isPointer != 0 && code.IsPointerLike(T) { diff --git a/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go b/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go index 18a127251..8747a2930 100644 --- a/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go +++ b/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go @@ -399,3 +399,17 @@ func dbg(format string, args ...interface{}) { } fmt.Printf(format, args...) } + +// https://github.com/dominikh/go-tools/issues/714 +func fn2() { + type String string + type Byte byte + + var a string = "a" + var b []byte = []byte{'b'} + var c [1]byte = [1]byte{'c'} + var d String = "d" + var e []uint8 = []uint8{'e'} + var f []Byte = []Byte{'h'} + fmt.Printf("%s %s %s %s %s %s %s", a, b, c, &c, d, e, f) +} From d9d28f5dea96fe3488f19326d0089b3b2ef40ac1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 14:04:53 +0200 Subject: [PATCH 027/111] stylecheck: avoid flagging error strings that begin with a function call Closes gh-452 --- stylecheck/lint.go | 4 ++++ .../testdata/src/CheckErrorStrings/CheckErrorStrings.go | 3 +++ 2 files changed, 7 insertions(+) diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 4577c7460..7848f93a6 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -409,6 +409,10 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) { } } + if strings.ContainsRune(word, '(') { + // Might be a function call + continue instrLoop + } word = strings.TrimRightFunc(word, func(r rune) bool { return unicode.IsPunct(r) }) if objNames[fn.Package()][word] { // Word is probably the name of a function or type in this package diff --git a/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go b/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go index 2659fa126..c734149bc 100644 --- a/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go +++ b/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go @@ -11,6 +11,9 @@ func fn() { errors.New("SomeFunc is okay") errors.New("URL is okay, but the period is not.") // want `error strings should not end with punctuation or a newline` errors.New("T must not be nil") + errors.New("Foo() failed") + errors.New("Foo(bar) failed") + errors.New("Foo(bar, baz) failed") } func Write() { From 4b3a258f456fb7b442cabaf6feaf5713c8bfdb19 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 14:26:37 +0200 Subject: [PATCH 028/111] staticcheck: Go 1.15+ does not require TestMain to call os.Exit Closes gh-708 --- staticcheck/lint.go | 6 ++++++ staticcheck/lint_test.go | 9 ++++++++- .../src/CheckTestMainExit-1_go115/CheckTestMainExit-1.go | 7 +++++++ 3 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 staticcheck/testdata/src/CheckTestMainExit-1_go115/CheckTestMainExit-1.go diff --git a/staticcheck/lint.go b/staticcheck/lint.go index f16bf3a40..54202d9b6 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -1103,6 +1103,12 @@ func CheckDubiousDeferInChannelRangeLoop(pass *analysis.Pass) (interface{}, erro } func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) { + if code.IsGoVersion(pass, 15) { + // Beginning with Go 1.15, the test framework will call + // os.Exit for us. + return nil, nil + } + var ( fnmain ast.Node callsExit bool diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index b024ab0f9..c5ea70161 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -40,7 +40,14 @@ func TestAll(t *testing.T) { "SA2001": {{Dir: "CheckEmptyCriticalSection"}}, "SA2002": {{Dir: "CheckConcurrentTesting"}}, "SA2003": {{Dir: "CheckDeferLock"}}, - "SA3000": {{Dir: "CheckTestMainExit-1"}, {Dir: "CheckTestMainExit-2"}, {Dir: "CheckTestMainExit-3"}, {Dir: "CheckTestMainExit-4"}, {Dir: "CheckTestMainExit-5"}}, + "SA3000": { + {Dir: "CheckTestMainExit-1"}, + {Dir: "CheckTestMainExit-2"}, + {Dir: "CheckTestMainExit-3"}, + {Dir: "CheckTestMainExit-4"}, + {Dir: "CheckTestMainExit-5"}, + {Dir: "CheckTestMainExit-1_go115", Version: "1.15"}, + }, "SA3001": {{Dir: "CheckBenchmarkN"}}, "SA4000": {{Dir: "CheckLhsRhsIdentical"}}, "SA4001": {{Dir: "CheckIneffectiveCopy"}}, diff --git a/staticcheck/testdata/src/CheckTestMainExit-1_go115/CheckTestMainExit-1.go b/staticcheck/testdata/src/CheckTestMainExit-1_go115/CheckTestMainExit-1.go new file mode 100644 index 000000000..cbb64cc55 --- /dev/null +++ b/staticcheck/testdata/src/CheckTestMainExit-1_go115/CheckTestMainExit-1.go @@ -0,0 +1,7 @@ +package pkg + +import "testing" + +func TestMain(m *testing.M) { + m.Run() +} From 8a50a77eaf0298aa010c2ea8d8e80f5e28a0f3c3 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 18:57:22 +0200 Subject: [PATCH 029/111] lint: warn when patterns matched no packages Updates gh-722 --- lint/lint.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lint/lint.go b/lint/lint.go index 62533cffc..e4574b996 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -4,6 +4,7 @@ package lint // import "honnef.co/go/tools/lint" import ( "fmt" "go/token" + "os" "path/filepath" "regexp" "sort" @@ -306,6 +307,12 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error return nil, err } + if len(results) == 0 && err == nil { + // TODO(dh): emulate Go's behavior more closely once we have + // access to go list's Match field. + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", patterns) + } + analyzerNames := make([]string, len(l.Checkers)) for i, a := range l.Checkers { analyzerNames[i] = a.Name From 08a27f44447472c7758985be6d8e34fea93fbeec Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 19:15:58 +0200 Subject: [PATCH 030/111] lint/lintutil: mention where to find documentation on checks Closes gh-436 --- lint/lintutil/format/format.go | 12 ++++++++++++ lint/lintutil/util.go | 9 ++++++--- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/lint/lintutil/format/format.go b/lint/lintutil/format/format.go index ef75a75d4..60aa6165c 100644 --- a/lint/lintutil/format/format.go +++ b/lint/lintutil/format/format.go @@ -46,6 +46,10 @@ type Formatter interface { Format(p lint.Problem) } +type DocumentationMentioner interface { + MentionCheckDocumentation(cmd string) +} + type Text struct { W io.Writer } @@ -57,6 +61,10 @@ func (o Text) Format(p lint.Problem) { } } +func (o Text) MentionCheckDocumentation(cmd string) { + fmt.Fprintf(o.W, "\nRun '%s -explain ' or visit https://staticcheck.io/docs/checks for documentation on checks.\n", cmd) +} + type JSON struct { W io.Writer } @@ -152,6 +160,10 @@ func (o *Stylish) Format(p lint.Problem) { } } +func (o *Stylish) MentionCheckDocumentation(cmd string) { + Text{W: o.W}.MentionCheckDocumentation(cmd) +} + func (o *Stylish) Stats(total, errors, warnings, ignored int) { if o.tw != nil { o.tw.Flush() diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index bcf513b7a..0d998925a 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -261,7 +261,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { } var ( - total int errors int warnings int ignored int @@ -275,7 +274,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { shouldExit := lint.FilterAnalyzerNames(analyzerNames, fail) shouldExit["compile"] = true - total = len(ps) for _, p := range ps { if p.Category == "compile" && debugNoCompile { continue @@ -293,8 +291,13 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { f.Format(p) } if f, ok := f.(format.Statter); ok { - f.Stats(total, errors, warnings, ignored) + f.Stats(len(ps), errors, warnings, ignored) } + + if f, ok := f.(format.DocumentationMentioner); ok && (errors > 0 || warnings > 0) && len(os.Args) > 0 { + f.MentionCheckDocumentation(os.Args[0]) + } + if errors > 0 { exit(1) } From 01541ecfc85eb4c7488ad2fac0bdf94e52782e41 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 19:39:21 +0200 Subject: [PATCH 031/111] lint: skip work for non-initial packages Don't waste cycles trying to collect and filter problems from non-initial packages; they have none. This also fixes a bug where we would complain about unmatched linter directives in dependencies. --- lint/lint.go | 4 ++++ runner/runner.go | 2 ++ 2 files changed, 6 insertions(+) diff --git a/lint/lint.go b/lint/lint.go index e4574b996..73f0fce95 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -328,6 +328,10 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error if res.Failed { problems = append(problems, failed(res)...) } else { + if !res.Initial { + continue + } + allowedAnalyzers := FilterAnalyzerNames(analyzerNames, res.Config.Checks) ps, u, err := success(allowedAnalyzers, res) if err != nil { diff --git a/runner/runner.go b/runner/runner.go index b088d3e50..f1299ae93 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -174,6 +174,7 @@ type TextEdit struct { type Result struct { Package *loader.PackageSpec Config config.Config + Initial bool Failed bool Errors []error @@ -1185,6 +1186,7 @@ func (r *Runner) Run(cfg *packages.Config, analyzers []*analysis.Analyzer, patte out[i] = Result{ Package: item.Package, Config: item.cfg, + Initial: !item.factsOnly, Failed: item.failed, Errors: item.errors, diagnostics: item.diagnostics, From 26e2b9ee63ed072d647854a4534729224d4fc21e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 20:49:22 +0200 Subject: [PATCH 032/111] runner: don't collect directives nor write empty diagnostics/unused for fact-only packages When analyzing a package just for facts, don't cache the (empty) list of diagnostics and (un)used information. When later analyzing the package for diagnostics, we would find a cached result that is incorrect. We also don't extract directives for factsOnly packages, to save on unnecessary work. --- runner/runner.go | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/runner/runner.go b/runner/runner.go index f1299ae93..a5896d109 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -598,6 +598,10 @@ func (r *subrunner) do(act action) error { return err } + if a.factsOnly { + return nil + } + dirs := make([]facts.SerializedDirective, len(result.dirs)) for i, dir := range result.dirs { dirs[i] = facts.SerializeDirective(dir, result.lpkg.Fset) @@ -716,7 +720,10 @@ func (r *subrunner) doUncached(a *packageAction) (packageActionResult, error) { // OPT(dh): instead of parsing directives twice (twice because // U1000 depends on the facts.Directives analyzer), reuse the // existing result - dirs := facts.ParseDirectives(pkg.Syntax, pkg.Fset) + var dirs []facts.Directive + if !a.factsOnly { + dirs = facts.ParseDirectives(pkg.Syntax, pkg.Fset) + } res, err := r.runAnalyzers(a, pkg) return packageActionResult{ From afbb3b1319f96baaf986aaf756b03f0d4792eb17 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 10 May 2020 22:48:53 +0200 Subject: [PATCH 033/111] facts: find deprecration notices that aren't the last paragraph Closes gh-734 --- facts/deprecated.go | 13 +++++++------ facts/testdata/src/Deprecated/Deprecated.go | 8 ++++++++ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/facts/deprecated.go b/facts/deprecated.go index 8587b0e0e..dbc5ede5c 100644 --- a/facts/deprecated.go +++ b/facts/deprecated.go @@ -37,13 +37,14 @@ func deprecated(pass *analysis.Pass) (interface{}, error) { continue } parts := strings.Split(doc.Text(), "\n\n") - last := parts[len(parts)-1] - if !strings.HasPrefix(last, "Deprecated: ") { - continue + for _, part := range parts { + if !strings.HasPrefix(part, "Deprecated: ") { + continue + } + alt := part[len("Deprecated: "):] + alt = strings.Replace(alt, "\n", " ", -1) + return alt } - alt := last[len("Deprecated: "):] - alt = strings.Replace(alt, "\n", " ", -1) - return alt } return "" } diff --git a/facts/testdata/src/Deprecated/Deprecated.go b/facts/testdata/src/Deprecated/Deprecated.go index 14f463d85..e6dedcde9 100644 --- a/facts/testdata/src/Deprecated/Deprecated.go +++ b/facts/testdata/src/Deprecated/Deprecated.go @@ -3,3 +3,11 @@ package pkg // Deprecated: Don't use this. func fn2() { // want fn2:`Deprecated: Don't use this\.` } + +// This is a function. +// +// Deprecated: Don't use this. +// +// Here is how you might use it instead. +func fn3() { // want fn3:`Deprecated: Don't use this\.` +} From 5a5bda03a50360c6f988d470e22bc62a983577be Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 11 May 2020 01:22:36 +0200 Subject: [PATCH 034/111] cmd/structlayout: port to go/packages --- cmd/structlayout/main.go | 54 +++++++++++++++++++++------------------- 1 file changed, 28 insertions(+), 26 deletions(-) diff --git a/cmd/structlayout/main.go b/cmd/structlayout/main.go index 0cdf7c2cf..157920767 100644 --- a/cmd/structlayout/main.go +++ b/cmd/structlayout/main.go @@ -14,7 +14,7 @@ import ( st "honnef.co/go/tools/structlayout" "honnef.co/go/tools/version" - "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" ) var ( @@ -41,38 +41,40 @@ func main() { os.Exit(1) } - conf := loader.Config{ - Build: &build.Default, + cfg := &packages.Config{ + Mode: packages.NeedImports | packages.NeedExportsFile | packages.NeedTypes | packages.NeedSyntax, + Tests: true, } - - var pkg string - var typName string - pkg = flag.Args()[0] - typName = flag.Args()[1] - conf.Import(pkg) - - lprog, err := conf.Load() + pkgs, err := packages.Load(cfg, flag.Args()[0]) if err != nil { log.Fatal(err) } - var typ types.Type - obj := lprog.Package(pkg).Pkg.Scope().Lookup(typName) - if obj == nil { - log.Fatal("couldn't find type") - } - typ = obj.Type() - st, ok := typ.Underlying().(*types.Struct) - if !ok { - log.Fatal("identifier is not a struct type") - } + for _, pkg := range pkgs { + typName := flag.Args()[1] + + var typ types.Type + obj := pkg.Types.Scope().Lookup(typName) + if obj == nil { + continue + } + typ = obj.Type() - fields := sizes(st, typ.(*types.Named).Obj().Name(), 0, nil) - if fJSON { - emitJSON(fields) - } else { - emitText(fields) + st, ok := typ.Underlying().(*types.Struct) + if !ok { + log.Fatal("identifier is not a struct type") + } + + fields := sizes(st, typ.(*types.Named).Obj().Name(), 0, nil) + if fJSON { + emitJSON(fields) + } else { + emitText(fields) + } + return } + + log.Fatal("couldn't find type") } func emitJSON(fields []st.Field) { From 855ad3406fc82d5a35d43fc005559be14b4f8298 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 11 May 2020 01:24:30 +0200 Subject: [PATCH 035/111] cmd/keyify: ignore deprecation of go/loader --- cmd/keyify/keyify.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cmd/keyify/keyify.go b/cmd/keyify/keyify.go index c4a56e1ea..95c920e6e 100644 --- a/cmd/keyify/keyify.go +++ b/cmd/keyify/keyify.go @@ -20,6 +20,8 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/buildutil" + + //lint:ignore SA1019 this tool is unmaintained, just keep it working "golang.org/x/tools/go/loader" ) From 3771fcc94a2d0672d93f6d679ce4b95d3965e9a9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 12 May 2020 04:13:21 +0200 Subject: [PATCH 036/111] Skip excessively large packages A common (albeit terrible) way of bundling assets with Go packages is to convert them into Go source files, usually some variation of a giant byte slice or string containing binary data. These files can be as large as hundreds of megabytes for some people, and parsing, type-checking and building IR form for them is an extreme burden with little reward. If we assume that assets are confined to their own packages, with no other logic mixed in, then we really don't care about analyzing these packages. No problems will be found, and no useful facts will be contributed to the analysis of dependents. We set the limit at 50 MB for now. We initially tried with 1 MB and 10 MB, which quickly ran into generated code for protobufs, and machine converted code respectively. We may need to tweak the limit further in the future. --- lint/lint.go | 18 +++++++++++------- lint/lint_test.go | 2 +- lint/lintutil/util.go | 33 ++++++++++++++++++--------------- loader/loader.go | 33 ++++++++++++++++++++++++++------- runner/runner.go | 13 ++++++++++--- 5 files changed, 66 insertions(+), 33 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 73f0fce95..3f86c91dc 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -301,10 +301,10 @@ func (l *Linter) SetGoVersion(n int) { l.Runner.GoVersion = n } -func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error) { +func (l *Linter) Lint(cfg *packages.Config, patterns []string) (problems []Problem, warnings []string, err error) { results, err := l.Runner.Run(cfg, l.Checkers, patterns) if err != nil { - return nil, err + return nil, nil, err } if len(results) == 0 && err == nil { @@ -318,7 +318,6 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error analyzerNames[i] = a.Name } - var problems []Problem used := map[unusedKey]bool{} var unuseds []unusedPair for _, res := range results { @@ -328,6 +327,11 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error if res.Failed { problems = append(problems, failed(res)...) } else { + if res.Skipped { + warnings = append(warnings, fmt.Sprintf("skipped package %s because it is too large", res.Package)) + continue + } + if !res.Initial { continue } @@ -335,11 +339,11 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error allowedAnalyzers := FilterAnalyzerNames(analyzerNames, res.Config.Checks) ps, u, err := success(allowedAnalyzers, res) if err != nil { - return nil, err + return nil, nil, err } filtered, err := filterIgnored(ps, res, allowedAnalyzers) if err != nil { - return nil, err + return nil, nil, err } problems = append(problems, filtered...) @@ -388,7 +392,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error } if len(problems) == 0 { - return nil, nil + return nil, warnings, nil } sort.Slice(problems, func(i, j int) bool { @@ -417,7 +421,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error out = append(out, p) } } - return out, nil + return out, warnings, nil } func FilterAnalyzerNames(analyzers []string, checks []string) map[string]bool { diff --git a/lint/lint_test.go b/lint/lint_test.go index e9bf7ad6d..08be73b30 100644 --- a/lint/lint_test.go +++ b/lint/lint_test.go @@ -30,7 +30,7 @@ func lintPackage(t *testing.T, name string) []Problem { cfg := &packages.Config{ Env: append(os.Environ(), "GOPATH="+testdata(), "GO111MODULE=off"), } - ps, err := l.Lint(cfg, []string{name}) + ps, _, err := l.Lint(cfg, []string{name}) if err != nil { t.Fatal(err) } diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 0d998925a..98e3fa43c 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -248,7 +248,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { exit(2) } - ps, err := doLint(cs, fs.Args(), &Options{ + ps, warnings, err := doLint(cs, fs.Args(), &Options{ Tags: tags, LintTests: tests, GoVersion: goVersion, @@ -260,10 +260,14 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { exit(1) } + for _, w := range warnings { + fmt.Fprintln(os.Stderr, "warning:", w) + } + var ( - errors int - warnings int - ignored int + numErrors int + numWarnings int + numIgnored int ) fail := *fs.Lookup("fail").Value.(*list) @@ -279,26 +283,26 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { continue } if p.Severity == lint.Ignored && !showIgnored { - ignored++ + numIgnored++ continue } if shouldExit[p.Category] { - errors++ + numErrors++ } else { p.Severity = lint.Warning - warnings++ + numWarnings++ } f.Format(p) } if f, ok := f.(format.Statter); ok { - f.Stats(len(ps), errors, warnings, ignored) + f.Stats(len(ps), numErrors, numWarnings, numIgnored) } - if f, ok := f.(format.DocumentationMentioner); ok && (errors > 0 || warnings > 0) && len(os.Args) > 0 { + if f, ok := f.(format.DocumentationMentioner); ok && (numErrors > 0 || numWarnings > 0) && len(os.Args) > 0 { f.MentionCheckDocumentation(os.Args[0]) } - if errors > 0 { + if numErrors > 0 { exit(1) } exit(0) @@ -333,10 +337,10 @@ func computeSalt() ([]byte, error) { return h.Sum(nil), nil } -func doLint(cs []*analysis.Analyzer, paths []string, opt *Options) ([]lint.Problem, error) { +func doLint(cs []*analysis.Analyzer, paths []string, opt *Options) ([]lint.Problem, []string, error) { salt, err := computeSalt() if err != nil { - return nil, fmt.Errorf("could not compute salt for cache: %s", err) + return nil, nil, fmt.Errorf("could not compute salt for cache: %s", err) } cache.SetSalt(salt) @@ -346,7 +350,7 @@ func doLint(cs []*analysis.Analyzer, paths []string, opt *Options) ([]lint.Probl l, err := lint.NewLinter(opt.Config) if err != nil { - return nil, err + return nil, nil, err } l.Checkers = cs l.SetGoVersion(opt.GoVersion) @@ -394,8 +398,7 @@ func doLint(cs []*analysis.Analyzer, paths []string, opt *Options) ([]lint.Probl } }() } - ps, err := l.Lint(cfg, paths) - return ps, err + return l.Lint(cfg, paths) } var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`) diff --git a/loader/loader.go b/loader/loader.go index 3ecd51b9a..d9d4bb110 100644 --- a/loader/loader.go +++ b/loader/loader.go @@ -18,6 +18,10 @@ import ( "golang.org/x/tools/go/packages" ) +const MaxFileSize = 50 * 1024 * 1024 // 50 MB + +var errMaxFileSize = errors.New("file exceeds max file size") + type PackageSpec struct { ID string Name string @@ -165,9 +169,12 @@ func Load(spec *PackageSpec) (*Package, Stats, error) { } } t := time.Now() - pkg := prog.LoadFromSource(spec) + pkg, err := prog.LoadFromSource(spec) + if err == errMaxFileSize { + pkg, err = prog.LoadFromExport(spec) + } stats.Source = time.Since(t) - return pkg, stats, nil + return pkg, stats, err } // LoadFromExport loads a package from export data. @@ -203,7 +210,7 @@ func (prog *program) LoadFromExport(spec *PackageSpec) (*Package, error) { // LoadFromSource loads a package from source. All of its dependencies // must have been loaded already. -func (prog *program) LoadFromSource(spec *PackageSpec) *Package { +func (prog *program) LoadFromSource(spec *PackageSpec) (*Package, error) { if len(spec.Errors) > 0 { panic("LoadFromSource called on package with errors") } @@ -232,12 +239,24 @@ func (prog *program) LoadFromSource(spec *PackageSpec) *Package { // be faster, and tends to be slower due to extra scheduling, // bookkeeping and potentially false sharing of cache lines. for i, file := range spec.CompiledGoFiles { - f, err := parser.ParseFile(prog.fset, file, nil, parser.ParseComments) + f, err := os.Open(file) + if err != nil { + return nil, err + } + fi, err := f.Stat() + if err != nil { + return nil, err + } + if fi.Size() >= MaxFileSize { + return nil, errMaxFileSize + } + af, err := parser.ParseFile(prog.fset, file, f, parser.ParseComments) + f.Close() if err != nil { pkg.Errors = append(pkg.Errors, convertError(err)...) - return pkg + return pkg, nil } - pkg.Syntax[i] = f + pkg.Syntax[i] = af } importer := func(path string) (*types.Package, error) { if path == "unsafe" { @@ -266,7 +285,7 @@ func (prog *program) LoadFromSource(spec *PackageSpec) *Package { }, } types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax) - return pkg + return pkg, nil } func convertError(err error) []packages.Error { diff --git a/runner/runner.go b/runner/runner.go index a5896d109..09a6765c4 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -175,6 +175,7 @@ type Result struct { Package *loader.PackageSpec Config config.Config Initial bool + Skipped bool Failed bool Errors []error @@ -311,6 +312,7 @@ type packageAction struct { directives string diagnostics string unused string + skipped bool } func (act *packageAction) String() string { @@ -433,9 +435,6 @@ func newPackageAction(pkg *loader.PackageSpec, cache map[*loader.PackageSpec]*pa // OPT(dh): pre-allocate a.deps for _, dep := range pkg.Imports { - if dep.PkgPath == "unsafe" { - continue - } depa := newPackageAction(dep, cache) depa.triggers = append(depa.triggers, a) a.deps = append(a.deps, depa) @@ -554,6 +553,8 @@ func (r *subrunner) do(act action) error { return nil } + a.skipped = result.skipped + // OPT(dh): doUncached returns facts in one format, only for // us to immediately convert them to another format. @@ -695,6 +696,7 @@ type packageActionResult struct { unused unused.SerializedResult dirs []facts.Directive lpkg *loader.Package + skipped bool } func (r *subrunner) doUncached(a *packageAction) (packageActionResult, error) { @@ -717,6 +719,10 @@ func (r *subrunner) doUncached(a *packageAction) (packageActionResult, error) { return packageActionResult{}, nil } + if len(pkg.Syntax) == 0 && pkg.PkgPath != "unsafe" { + return packageActionResult{lpkg: pkg, skipped: true}, nil + } + // OPT(dh): instead of parsing directives twice (twice because // U1000 depends on the facts.Directives analyzer), reuse the // existing result @@ -1194,6 +1200,7 @@ func (r *Runner) Run(cfg *packages.Config, analyzers []*analysis.Analyzer, patte Package: item.Package, Config: item.cfg, Initial: !item.factsOnly, + Skipped: item.skipped, Failed: item.failed, Errors: item.errors, diagnostics: item.diagnostics, From 8d2e66af4dd53e787b8206a48ad6381133c54508 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 12 May 2020 08:25:05 +0200 Subject: [PATCH 037/111] runner: simplify caching Merge diagnostics, directives and unused objects into a single output. Use temporary files instead of in-memory buffers. Avoid unnecessary transformation steps by storing data in its final form. --- lint/lint.go | 31 ++--- runner/runner.go | 327 ++++++++++++++++++----------------------------- 2 files changed, 137 insertions(+), 221 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 3f86c91dc..f2a4f0114 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -210,26 +210,19 @@ type unusedPair struct { obj unused.SerializedObject } -func success(allowedChecks map[string]bool, res runner.Result) ([]Problem, unused.SerializedResult, error) { - diags, err := res.Diagnostics() - if err != nil { - return nil, unused.SerializedResult{}, err - } - +func success(allowedChecks map[string]bool, res runner.ResultData) []Problem { + diags := res.Diagnostics var problems []Problem - for _, diag := range diags { if !allowedChecks[diag.Category] { continue } problems = append(problems, Problem{Diagnostic: diag}) } - - u, err := res.Unused() - return problems, u, err + return problems } -func filterIgnored(problems []Problem, res runner.Result, allowedAnalyzers map[string]bool) ([]Problem, error) { +func filterIgnored(problems []Problem, res runner.ResultData, allowedAnalyzers map[string]bool) ([]Problem, error) { couldveMatched := func(ig *lineIgnore) bool { for _, c := range ig.Checks { if c == "U1000" { @@ -256,12 +249,7 @@ func filterIgnored(problems []Problem, res runner.Result, allowedAnalyzers map[s return false } - dirs, err := res.Directives() - if err != nil { - return nil, err - } - - ignores, moreProblems := parseDirectives(dirs) + ignores, moreProblems := parseDirectives(res.Directives) for _, ig := range ignores { for i := range problems { @@ -337,17 +325,18 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) (problems []Probl } allowedAnalyzers := FilterAnalyzerNames(analyzerNames, res.Config.Checks) - ps, u, err := success(allowedAnalyzers, res) + resd, err := res.Load() if err != nil { return nil, nil, err } - filtered, err := filterIgnored(ps, res, allowedAnalyzers) + ps := success(allowedAnalyzers, resd) + filtered, err := filterIgnored(ps, resd, allowedAnalyzers) if err != nil { return nil, nil, err } problems = append(problems, filtered...) - for _, obj := range u.Used { + for _, obj := range resd.Unused.Used { // FIXME(dh): pick the object whose filename does not include $GOROOT key := unusedKey{ pkgPath: res.Package.PkgPath, @@ -359,7 +348,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) (problems []Probl } if allowedAnalyzers["U1000"] { - for _, obj := range u.Unused { + for _, obj := range resd.Unused.Unused { key := unusedKey{ pkgPath: res.Package.PkgPath, base: filepath.Base(obj.Position.Filename), diff --git a/runner/runner.go b/runner/runner.go index 09a6765c4..20337f95a 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -112,12 +112,12 @@ package runner // future. import ( - "bytes" "encoding/gob" "fmt" "go/token" "go/types" "io" + "io/ioutil" "os" "reflect" "runtime" @@ -180,82 +180,31 @@ type Result struct { Failed bool Errors []error // Action results, paths to files - diagnostics string - directives string - unused string + results string } -// Diagnostics loads and returns the diagnostics found while analyzing -// the package. -func (r Result) Diagnostics() ([]Diagnostic, error) { - if r.Failed { - panic("Diagnostics called on failed Result") - } - if r.diagnostics == "" { - // this package was only a dependency - return nil, nil - } - var diags []Diagnostic - f, err := os.Open(r.diagnostics) - if err != nil { - return nil, fmt.Errorf("failed loading diagnostics: %w", err) - } - defer f.Close() - dec := gob.NewDecoder(f) - for { - var diag Diagnostic - err := dec.Decode(&diag) - if err != nil { - if err == io.EOF { - break - } - return nil, fmt.Errorf("failed loading diagnostics: %w", err) - } - diags = append(diags, diag) - } - return diags, nil +type ResultData struct { + Directives []facts.SerializedDirective + Diagnostics []Diagnostic + Unused unused.SerializedResult } -// Directives loads and returns the directives found while analyzing -// the package. -func (r Result) Directives() ([]facts.SerializedDirective, error) { +func (r Result) Load() (ResultData, error) { if r.Failed { - panic("Directives called on failed Result") + panic("Load called on failed Result") } - if r.directives == "" { + if r.results == "" { // this package was only a dependency - return nil, nil + return ResultData{}, nil } - var dirs []facts.SerializedDirective - f, err := os.Open(r.directives) + f, err := os.Open(r.results) if err != nil { - return nil, fmt.Errorf("failed loading directives: %w", err) + return ResultData{}, fmt.Errorf("failed loading result: %w", err) } defer f.Close() - if err := gob.NewDecoder(f).Decode(&dirs); err != nil { - return nil, fmt.Errorf("failed loading directives: %w", err) - } - return dirs, nil -} - -func (r Result) Unused() (unused.SerializedResult, error) { - if r.Failed { - panic("Unused called on failed Result") - } - if r.unused == "" { - // this package was only a dependency - return unused.SerializedResult{}, nil - } - var res unused.SerializedResult - f, err := os.Open(r.unused) - if err != nil { - return unused.SerializedResult{}, fmt.Errorf("failed loading unused: %w", err) - } - defer f.Close() - if err := gob.NewDecoder(f).Decode(&res); err != nil { - return unused.SerializedResult{}, fmt.Errorf("failed loading unused: %w", err) - } - return res, nil + var out ResultData + err = gob.NewDecoder(f).Decode(&out) + return out, err } type action interface { @@ -307,12 +256,10 @@ type packageAction struct { // Action results - cfg config.Config - vetx string - directives string - diagnostics string - unused string - skipped bool + cfg config.Config + vetx string + results string + skipped bool } func (act *packageAction) String() string { @@ -350,7 +297,7 @@ type analyzerAction struct { // consumption because analyzer actions get garbage collected once // a package has been fully analyzed. Result interface{} - Diagnostics []analysis.Diagnostic + Diagnostics []Diagnostic ObjectFacts map[objectFactKey]analysis.Fact PackageFacts map[packageFactKey]analysis.Fact Pass *analysis.Pass @@ -534,17 +481,12 @@ func (r *subrunner) do(act action) error { a.hash = cache.ActionID(h.Sum()) // try to fetch hashed data - ids := make([]cache.ActionID, 0, 4) + ids := make([]cache.ActionID, 0, 2) ids = append(ids, cache.Subkey(a.hash, "vetx")) if !a.factsOnly { - ids = append(ids, - cache.Subkey(a.hash, "directives"), - cache.Subkey(a.hash, "diagnostics"), - // OPT(dh): only load "unused" data if we're running the U1000 analyzer - cache.Subkey(a.hash, "unused"), - ) + ids = append(ids, cache.Subkey(a.hash, "results")) } - if err := getCachedFiles(r.cache, ids, []*string{&a.vetx, &a.directives, &a.diagnostics, &a.unused}); err != nil { + if err := getCachedFiles(r.cache, ids, []*string{&a.vetx, &a.results}); err != nil { result, err := r.doUncached(a) if err != nil { return err @@ -555,102 +497,51 @@ func (r *subrunner) do(act action) error { a.skipped = result.skipped - // OPT(dh): doUncached returns facts in one format, only for - // us to immediately convert them to another format. - // OPT(dh) instead of collecting all object facts and encoding // them after analysis finishes, we could encode them as we // go. however, that would require some locking. - gobFacts := &bytes.Buffer{} - enc := gob.NewEncoder(gobFacts) - for _, f := range result.objFacts { - objPath, err := objectpath.For(f.Object) - if err != nil { - continue - } - gf := gobFact{ - PkgPath: f.Object.Pkg().Path(), - ObjPath: string(objPath), - Fact: f.Fact, - } - if err := enc.Encode(gf); err != nil { - return fmt.Errorf("failed gob encoding data: %w", err) - } - } - for _, f := range result.pkgFacts { - gf := gobFact{ - PkgPath: f.Package.Path(), - Fact: f.Fact, - } - if err := enc.Encode(gf); err != nil { - return fmt.Errorf("failed gob encoding data: %w", err) - } - } - + // // OPT(dh): We could sort gobFacts for more consistent output, // but it doesn't matter. The hash of a package includes all // of its files, so whether the vetx hash changes or not, a // change to a package requires re-analyzing all dependents, // even if the vetx data stayed the same. See also the note at // the top of loader/hash.go. - - a.vetx, err = r.writeCache(a, "vetx", gobFacts.Bytes()) + tf, err := ioutil.TempFile("", "staticcheck") if err != nil { return err } + defer tf.Close() + os.Remove(tf.Name()) - if a.factsOnly { - return nil + enc := gob.NewEncoder(tf) + for _, gf := range result.facts { + if err := enc.Encode(gf); err != nil { + return fmt.Errorf("failed gob encoding data: %w", err) + } } - dirs := make([]facts.SerializedDirective, len(result.dirs)) - for i, dir := range result.dirs { - dirs[i] = facts.SerializeDirective(dir, result.lpkg.Fset) + if _, err := tf.Seek(0, io.SeekStart); err != nil { + return err } - a.directives, err = r.writeCacheGob(a, "directives", dirs) + a.vetx, err = r.writeCacheReader(a, "vetx", tf) if err != nil { return err } - gobDiags := &bytes.Buffer{} - enc = gob.NewEncoder(gobDiags) - for _, diag := range result.diags { - d := Diagnostic{ - Position: report.DisplayPosition(result.lpkg.Fset, diag.Pos), - End: report.DisplayPosition(result.lpkg.Fset, diag.End), - Category: diag.Category, - Message: diag.Message, - } - for _, sugg := range diag.SuggestedFixes { - s := SuggestedFix{ - Message: sugg.Message, - } - for _, edit := range sugg.TextEdits { - s.TextEdits = append(s.TextEdits, TextEdit{ - Position: report.DisplayPosition(result.lpkg.Fset, edit.Pos), - End: report.DisplayPosition(result.lpkg.Fset, edit.End), - NewText: edit.NewText, - }) - } - d.SuggestedFixed = append(d.SuggestedFixed, s) - } - for _, rel := range diag.Related { - d.Related = append(d.Related, RelatedInformation{ - Position: report.DisplayPosition(result.lpkg.Fset, rel.Pos), - End: report.DisplayPosition(result.lpkg.Fset, rel.End), - Message: rel.Message, - }) - } - if err := enc.Encode(d); err != nil { - return fmt.Errorf("failed gob encoding data: %w", err) - } + if a.factsOnly { + return nil } - a.diagnostics, err = r.writeCache(a, "diagnostics", gobDiags.Bytes()) - if err != nil { - return err + + var out ResultData + out.Directives = make([]facts.SerializedDirective, len(result.dirs)) + for i, dir := range result.dirs { + out.Directives[i] = facts.SerializeDirective(dir, result.lpkg.Fset) } - a.unused, err = r.writeCacheGob(a, "unused", result.unused) + out.Diagnostics = result.diags + out.Unused = result.unused + a.results, err = r.writeCacheGob(a, "results", out) if err != nil { return err } @@ -668,35 +559,38 @@ func (r *Runner) TotalWorkers() int { return r.semaphore.Cap() } -func (r *Runner) writeCache(a *packageAction, kind string, data []byte) (string, error) { +func (r *Runner) writeCacheReader(a *packageAction, kind string, rs io.ReadSeeker) (string, error) { h := cache.Subkey(a.hash, kind) - if err := r.cache.PutBytes(h, data); err != nil { - return "", fmt.Errorf("failed caching data: %w", err) - } - // OPT(dh): change PutBytes signature so we get the file name right away, not requiring a call to GetFile - f, _, err := r.cache.GetFile(h) + out, _, err := r.cache.Put(h, rs) if err != nil { - return "", fmt.Errorf("failed finding cache entry: %w", err) + return "", fmt.Errorf("failed caching data: %w", err) } - return f, nil + return r.cache.OutputFile(out), nil } func (r *Runner) writeCacheGob(a *packageAction, kind string, data interface{}) (string, error) { - buf := bytes.NewBuffer(nil) - if err := gob.NewEncoder(buf).Encode(data); err != nil { + f, err := ioutil.TempFile("", "staticcheck") + if err != nil { + return "", err + } + defer f.Close() + os.Remove(f.Name()) + if err := gob.NewEncoder(f).Encode(data); err != nil { return "", fmt.Errorf("failed gob encoding data: %w", err) } - return r.writeCache(a, kind, buf.Bytes()) + if _, err := f.Seek(0, io.SeekStart); err != nil { + return "", err + } + return r.writeCacheReader(a, kind, f) } type packageActionResult struct { - objFacts []analysis.ObjectFact - pkgFacts []analysis.PackageFact - diags []analysis.Diagnostic - unused unused.SerializedResult - dirs []facts.Directive - lpkg *loader.Package - skipped bool + facts []gobFact + diags []Diagnostic + unused unused.SerializedResult + dirs []facts.Directive + lpkg *loader.Package + skipped bool } func (r *subrunner) doUncached(a *packageAction) (packageActionResult, error) { @@ -733,12 +627,11 @@ func (r *subrunner) doUncached(a *packageAction) (packageActionResult, error) { res, err := r.runAnalyzers(a, pkg) return packageActionResult{ - objFacts: res.objFacts, - pkgFacts: res.pkgFacts, - diags: res.diagnostics, - unused: res.unused, - dirs: dirs, - lpkg: pkg, + facts: res.facts, + diags: res.diagnostics, + unused: res.unused, + dirs: dirs, + lpkg: pkg, }, err } @@ -880,10 +773,36 @@ func (ar *analyzerRunner) do(act action) error { Pkg: ar.pkg.Types, TypesInfo: ar.pkg.TypesInfo, TypesSizes: ar.pkg.TypesSizes, - Report: func(d analysis.Diagnostic) { + Report: func(diag analysis.Diagnostic) { if !ar.factsOnly { - if d.Category == "" { - d.Category = a.Analyzer.Name + if diag.Category == "" { + diag.Category = a.Analyzer.Name + } + d := Diagnostic{ + Position: report.DisplayPosition(ar.pkg.Fset, diag.Pos), + End: report.DisplayPosition(ar.pkg.Fset, diag.End), + Category: diag.Category, + Message: diag.Message, + } + for _, sugg := range diag.SuggestedFixes { + s := SuggestedFix{ + Message: sugg.Message, + } + for _, edit := range sugg.TextEdits { + s.TextEdits = append(s.TextEdits, TextEdit{ + Position: report.DisplayPosition(ar.pkg.Fset, edit.Pos), + End: report.DisplayPosition(ar.pkg.Fset, edit.End), + NewText: edit.NewText, + }) + } + d.SuggestedFixed = append(d.SuggestedFixed, s) + } + for _, rel := range diag.Related { + d.Related = append(d.Related, RelatedInformation{ + Position: report.DisplayPosition(ar.pkg.Fset, rel.Pos), + End: report.DisplayPosition(ar.pkg.Fset, rel.End), + Message: rel.Message, + }) } a.Diagnostics = append(a.Diagnostics, d) } @@ -980,9 +899,8 @@ func (ar *analyzerRunner) do(act action) error { } type analysisResult struct { - objFacts []analysis.ObjectFact - pkgFacts []analysis.PackageFact - diagnostics []analysis.Diagnostic + facts []gobFact + diagnostics []Diagnostic unused unused.SerializedResult } @@ -1056,23 +974,34 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an } // OPT(dh): cull objects not reachable via the exported closure - objFacts := make([]analysis.ObjectFact, 0, len(depObjFacts)) - pkgFacts := make([]analysis.PackageFact, 0, len(depPkgFacts)) + gobFacts := make([]gobFact, 0, len(depObjFacts)+len(depPkgFacts)) for key, fact := range depObjFacts { - objFacts = append(objFacts, analysis.ObjectFact{Object: key.Obj, Fact: fact}) + objPath, err := objectpath.For(key.Obj) + if err != nil { + continue + } + gf := gobFact{ + PkgPath: key.Obj.Pkg().Path(), + ObjPath: string(objPath), + Fact: fact, + } + gobFacts = append(gobFacts, gf) } for key, fact := range depPkgFacts { - pkgFacts = append(pkgFacts, analysis.PackageFact{Package: key.Pkg, Fact: fact}) + gf := gobFact{ + PkgPath: key.Pkg.Path(), + Fact: fact, + } + gobFacts = append(gobFacts, gf) } - var diags []analysis.Diagnostic + var diags []Diagnostic for _, a := range root.deps { a := a.(*analyzerAction) diags = append(diags, a.Diagnostics...) } return analysisResult{ - objFacts: objFacts, - pkgFacts: pkgFacts, + facts: gobFacts, diagnostics: diags, unused: unusedResult, }, nil @@ -1197,15 +1126,13 @@ func (r *Runner) Run(cfg *packages.Config, analyzers []*analysis.Analyzer, patte continue } out[i] = Result{ - Package: item.Package, - Config: item.cfg, - Initial: !item.factsOnly, - Skipped: item.skipped, - Failed: item.failed, - Errors: item.errors, - diagnostics: item.diagnostics, - directives: item.directives, - unused: item.unused, + Package: item.Package, + Config: item.cfg, + Initial: !item.factsOnly, + Skipped: item.skipped, + Failed: item.failed, + Errors: item.errors, + results: item.results, } } return out, nil From 9f98f02efaeea0fd8e186455de075731b45a9d87 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 12 May 2020 08:52:15 +0200 Subject: [PATCH 038/111] runner: cache computation of which analyzers produce facts --- runner/runner.go | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/runner/runner.go b/runner/runner.go index 20337f95a..088982c46 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -321,6 +321,7 @@ type Runner struct { type subrunner struct { *Runner analyzers []*analysis.Analyzer + factAnalyzers []*analysis.Analyzer analyzerNames string } @@ -344,9 +345,17 @@ func newSubrunner(r *Runner, analyzers []*analysis.Analyzer) *subrunner { analyzerNames[i] = a.Name } sort.Strings(analyzerNames) + + var factAnalyzers []*analysis.Analyzer + for _, a := range analyzers { + if len(a.FactTypes) > 0 { + factAnalyzers = append(factAnalyzers, a) + } + } return &subrunner{ Runner: r, analyzers: analyzers, + factAnalyzers: factAnalyzers, analyzerNames: strings.Join(analyzerNames, ","), } } @@ -757,6 +766,11 @@ func (ar *analyzerRunner) do(act action) error { dep := dep.(*analyzerAction) results[dep.Analyzer] = dep.Result } + // OPT(dh): cache factTypes, it is the same for all packages for a given analyzer + // + // OPT(dh): do we need the factTypes map? most analyzers have 0-1 + // fact types. iterating over the slice is probably faster than + // indexing a map. factTypes := map[reflect.Type]struct{}{} for _, typ := range a.Analyzer.FactTypes { factTypes[reflect.TypeOf(typ)] = struct{}{} @@ -914,18 +928,21 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an } } - // OPT(dh): this computation is the same for all packages - // (depending on act.factsOnly), we should cache it in the runner. analyzerActionCache := map[*analysis.Analyzer]*analyzerAction{} root := &analyzerAction{} - for _, a := range r.analyzers { + var analyzers []*analysis.Analyzer + if pkgAct.factsOnly { // When analyzing non-initial packages, we only care about // analyzers that produce facts. - if !pkgAct.factsOnly || len(a.FactTypes) > 0 { - a := newAnalyzerAction(a, analyzerActionCache) - root.deps = append(root.deps, a) - a.triggers = append(a.triggers, root) - } + analyzers = r.factAnalyzers + } else { + analyzers = r.analyzers + } + + for _, a := range analyzers { + a := newAnalyzerAction(a, analyzerActionCache) + root.deps = append(root.deps, a) + a.triggers = append(a.triggers, root) } root.pending = uint32(len(root.deps)) all := actionList(root) @@ -1007,6 +1024,7 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an }, nil } +// OPT(dh): why use actionList when we already have a map of all actions? func actionList(root action) []action { seen := map[action]struct{}{} all := []action{} From 061ec28cdd9be292124e46e2691f1b1ea2aeeffa Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 12 May 2020 09:02:26 +0200 Subject: [PATCH 039/111] runner: remove actionList function --- runner/runner.go | 44 ++++++++++++-------------------------------- 1 file changed, 12 insertions(+), 32 deletions(-) diff --git a/runner/runner.go b/runner/runner.go index 088982c46..1fb35185d 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -928,7 +928,6 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an } } - analyzerActionCache := map[*analysis.Analyzer]*analyzerAction{} root := &analyzerAction{} var analyzers []*analysis.Analyzer if pkgAct.factsOnly { @@ -939,13 +938,13 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an analyzers = r.analyzers } + all := map[*analysis.Analyzer]*analyzerAction{} for _, a := range analyzers { - a := newAnalyzerAction(a, analyzerActionCache) + a := newAnalyzerAction(a, all) root.deps = append(root.deps, a) a.triggers = append(a.triggers, root) } root.pending = uint32(len(root.deps)) - all := actionList(root) ar := &analyzerRunner{ pkg: pkg, @@ -974,8 +973,6 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an var unusedResult unused.SerializedResult for _, a := range all { - a := a.(*analyzerAction) - if a != root && a.Analyzer.Name == "U1000" { // TODO(dh): figure out a clean abstraction, instead of // special-casing U1000. @@ -1024,25 +1021,6 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an }, nil } -// OPT(dh): why use actionList when we already have a map of all actions? -func actionList(root action) []action { - seen := map[action]struct{}{} - all := []action{} - var walk func(action) - walk = func(a action) { - if _, ok := seen[a]; ok { - return - } - seen[a] = struct{}{} - for _, a1 := range a.Deps() { - walk(a1) - } - all = append(all, a) - } - walk(root) - return all -} - func registerGobTypes(analyzers []*analysis.Analyzer) { for _, a := range analyzers { for _, typ := range a.FactTypes { @@ -1105,17 +1083,20 @@ func (r *Runner) Run(cfg *packages.Config, analyzers []*analysis.Analyzer, patte } r.Stats.setInitialPackages(len(lpkgs)) + if len(lpkgs) == 0 { + return nil, nil + } + r.Stats.setState(StateBuildActionGraph) - packageActionCache := map[*loader.PackageSpec]*packageAction{} + all := map[*loader.PackageSpec]*packageAction{} root := &packageAction{} for _, lpkg := range lpkgs { - a := newPackageActionRoot(lpkg, packageActionCache) + a := newPackageActionRoot(lpkg, all) root.deps = append(root.deps, a) a.triggers = append(a.triggers, root) } root.pending = uint32(len(root.deps)) - all := actionList(root) queue := make(chan action) r.Stats.setTotalPackages(len(all) - 1) @@ -1137,13 +1118,12 @@ func (r *Runner) Run(cfg *packages.Config, analyzers []*analysis.Analyzer, patte } r.Stats.setState(StateFinalizing) - out := make([]Result, len(all)-1) - for i, item := range all { - item := item.(*packageAction) + out := make([]Result, 0, len(all)) + for _, item := range all { if item.Package == nil { continue } - out[i] = Result{ + out = append(out, Result{ Package: item.Package, Config: item.cfg, Initial: !item.factsOnly, @@ -1151,7 +1131,7 @@ func (r *Runner) Run(cfg *packages.Config, analyzers []*analysis.Analyzer, patte Failed: item.failed, Errors: item.errors, results: item.results, - } + }) } return out, nil } From a48ee6de2091c529a58bbde7dca3e745d3460507 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 12 May 2020 20:40:58 +0200 Subject: [PATCH 040/111] Don't read entire archive Only a fraction of the archive consists of the type information we need. Don't load the rest into memory. In absurd cases, such as binary assets, this is a ~100% reduction. --- internal/go/gcexportdata/gcexportdata.go | 63 +------------------ internal/go/gcexportdata/gcexportdata_test.go | 6 +- internal/go/gcimporter/exportdata.go | 36 ++++++++--- internal/go/gcimporter/gcimporter.go | 2 +- loader/loader.go | 6 +- 5 files changed, 39 insertions(+), 74 deletions(-) diff --git a/internal/go/gcexportdata/gcexportdata.go b/internal/go/gcexportdata/gcexportdata.go index aa8ebe6ac..a8dbb6cd1 100644 --- a/internal/go/gcexportdata/gcexportdata.go +++ b/internal/go/gcexportdata/gcexportdata.go @@ -21,47 +21,15 @@ package gcexportdata import ( - "bufio" "bytes" "fmt" "go/token" "go/types" - "io" - "io/ioutil" - "os" "honnef.co/go/tools/internal/go/gcimporter" ) -type bufferedReader struct { - r io.Reader - buf *bufio.Reader -} - -func (r *bufferedReader) Read(b []byte) (int, error) { - return r.buf.Read(b) -} - -func (r *bufferedReader) ReadSlice(delim byte) (line []byte, err error) { - return r.buf.ReadSlice(delim) -} - -// NewReader returns a reader for the export data section of an object -// (.o) or archive (.a) file read from r. The new reader may provide -// additional trailing data beyond the end of the export data. -func NewReader(r io.Reader) (io.Reader, error) { - buf := &bufferedReader{ - r: r, - buf: bufio.NewReader(r), - } - _, err := gcimporter.FindExportData(buf) - // If we ever switch to a zip-like archive format with the ToC - // at the end, we can return the correct portion of export data, - // but for now we must return the entire rest of the file. - return buf, err -} - -// Read reads export data from in, decodes it, and returns type +// Read reads export data from data, decodes it, and returns type // information for the package. // The package name is specified by path. // File position information is added to fset. @@ -71,34 +39,7 @@ func NewReader(r io.Reader) (io.Reader, error) { // must ensure that imports[path] does not exist, or exists but is // incomplete (see types.Package.Complete), and Read inserts the // resulting package into this map entry. -// -// On return, the state of the reader is undefined. -func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { - var data []byte - if br, ok := in.(*bufferedReader); ok { - if f, ok := br.r.(*os.File); ok { - fi, err := f.Stat() - if err == nil { - // we expect to be close to the start of the file, - // which is why we don't bother checking with - // SEEK_CUR. - data = make([]byte, fi.Size()) - n, err := io.ReadFull(in, data) - data = data[:n] - if err != nil && err != io.ErrUnexpectedEOF { - data = nil - } - } - } - } - if data == nil { - var err error - data, err = ioutil.ReadAll(in) - if err != nil { - return nil, fmt.Errorf("reading export data for %q: %v", path, err) - } - } - +func Read(data []byte, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { if bytes.HasPrefix(data, []byte("!")) { return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) } diff --git a/internal/go/gcexportdata/gcexportdata_test.go b/internal/go/gcexportdata/gcexportdata_test.go index 17b3e6f3d..564f4d267 100644 --- a/internal/go/gcexportdata/gcexportdata_test.go +++ b/internal/go/gcexportdata/gcexportdata_test.go @@ -8,6 +8,7 @@ import ( "testing" "honnef.co/go/tools/internal/go/gcexportdata" + "honnef.co/go/tools/internal/go/gcimporter" ) // Test to ensure that gcexportdata can read files produced by App @@ -19,7 +20,8 @@ func TestAppEngine16(t *testing.T) { t.Fatal(err) } defer f.Close() - r, err := gcexportdata.NewReader(f) + + b, err := gcimporter.GetExportData(f) if err != nil { log.Fatalf("reading export data: %v", err) } @@ -27,7 +29,7 @@ func TestAppEngine16(t *testing.T) { // Decode the export data. fset := token.NewFileSet() imports := make(map[string]*types.Package) - pkg, err := gcexportdata.Read(r, fset, imports, "errors") + pkg, err := gcexportdata.Read(b, fset, imports, "errors") if err != nil { log.Fatal(err) } diff --git a/internal/go/gcimporter/exportdata.go b/internal/go/gcimporter/exportdata.go index b0a2e353f..3a48d3a19 100644 --- a/internal/go/gcimporter/exportdata.go +++ b/internal/go/gcimporter/exportdata.go @@ -9,8 +9,10 @@ package gcimporter import ( + "bufio" "fmt" "io" + "io/ioutil" "strconv" "strings" ) @@ -36,18 +38,13 @@ func readGopackHeader(r io.Reader) (name string, size int, err error) { return } -type BufferedReader interface { - Read(b []byte) (int, error) - ReadSlice(delim byte) (line []byte, err error) -} - -// FindExportData positions the reader r at the beginning of the +// findExportData positions the reader r at the beginning of the // export data section of an underlying GC-created object/archive // file by reading from it. The reader must be positioned at the // start of the file before calling this function. The hdr result // is the string before the export data, either "$$" or "$$B". // -func FindExportData(r BufferedReader) (hdr string, err error) { +func findExportData(r *bufio.Reader) (hdr string, length int, err error) { // Read first line to make sure this is an object file. line, err := r.ReadSlice('\n') if err != nil { @@ -58,7 +55,7 @@ func FindExportData(r BufferedReader) (hdr string, err error) { if string(line) == "!\n" { // Archive file. Scan to __.PKGDEF. var name string - if name, _, err = readGopackHeader(r); err != nil { + if name, length, err = readGopackHeader(r); err != nil { return } @@ -74,6 +71,7 @@ func FindExportData(r BufferedReader) (hdr string, err error) { err = fmt.Errorf("can't find export data (%v)", err) return } + length -= len(line) } // Now at __.PKGDEF in archive or still at beginning of file. @@ -90,8 +88,30 @@ func FindExportData(r BufferedReader) (hdr string, err error) { err = fmt.Errorf("can't find export data (%v)", err) return } + length -= len(line) } hdr = string(line) return } + +func GetExportData(r io.ReadSeeker) ([]byte, error) { + br := bufio.NewReader(r) + _, length, err := findExportData(br) + if err != nil { + return nil, err + } + if _, err := r.Seek(-int64(br.Buffered()), io.SeekCurrent); err != nil { + return nil, err + } + if length > 0 { + // OPT(dh): in theory, reusing this slice across calls to + // LoadFromExport should help. when we tried, it made no + // difference. investigate. + b := make([]byte, length) + _, err := io.ReadFull(r, b) + return b, err + } else { + return ioutil.ReadAll(r) + } +} diff --git a/internal/go/gcimporter/gcimporter.go b/internal/go/gcimporter/gcimporter.go index 81a262d36..018acf64d 100644 --- a/internal/go/gcimporter/gcimporter.go +++ b/internal/go/gcimporter/gcimporter.go @@ -179,7 +179,7 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func var hdr string buf := bufio.NewReader(rc) - if hdr, err = FindExportData(buf); err != nil { + if hdr, _, err = findExportData(buf); err != nil { return } diff --git a/loader/loader.go b/loader/loader.go index d9d4bb110..575d7e3ba 100644 --- a/loader/loader.go +++ b/loader/loader.go @@ -14,6 +14,7 @@ import ( "honnef.co/go/tools/config" "honnef.co/go/tools/internal/cache" "honnef.co/go/tools/internal/go/gcexportdata" + "honnef.co/go/tools/internal/go/gcimporter" "golang.org/x/tools/go/packages" ) @@ -189,11 +190,12 @@ func (prog *program) LoadFromExport(spec *PackageSpec) (*Package, error) { } defer f.Close() - r, err := gcexportdata.NewReader(f) + b, err := gcimporter.GetExportData(f) if err != nil { return nil, err } - tpkg, err := gcexportdata.Read(r, prog.fset, prog.packages, spec.PkgPath) + + tpkg, err := gcexportdata.Read(b, prog.fset, prog.packages, spec.PkgPath) if err != nil { return nil, err } From ae2625b84ee03f9a0b840a53b26850613c905682 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 13 May 2020 00:20:53 +0200 Subject: [PATCH 041/111] loader: unexport LoadFromExport and LoadFromSource --- loader/loader.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/loader/loader.go b/loader/loader.go index 575d7e3ba..58d2be353 100644 --- a/loader/loader.go +++ b/loader/loader.go @@ -163,23 +163,23 @@ func Load(spec *PackageSpec) (*Package, Stats, error) { continue } t := time.Now() - _, err := prog.LoadFromExport(imp) + _, err := prog.loadFromExport(imp) stats.Export[imp] = time.Since(t) if err != nil { return nil, stats, err } } t := time.Now() - pkg, err := prog.LoadFromSource(spec) + pkg, err := prog.loadFromSource(spec) if err == errMaxFileSize { - pkg, err = prog.LoadFromExport(spec) + pkg, err = prog.loadFromExport(spec) } stats.Source = time.Since(t) return pkg, stats, err } -// LoadFromExport loads a package from export data. -func (prog *program) LoadFromExport(spec *PackageSpec) (*Package, error) { +// loadFromExport loads a package from export data. +func (prog *program) loadFromExport(spec *PackageSpec) (*Package, error) { // log.Printf("Loading package %s from export", spec) if spec.ExportFile == "" { return nil, fmt.Errorf("no export data for %q", spec.ID) @@ -210,9 +210,9 @@ func (prog *program) LoadFromExport(spec *PackageSpec) (*Package, error) { return pkg, nil } -// LoadFromSource loads a package from source. All of its dependencies +// loadFromSource loads a package from source. All of its dependencies // must have been loaded already. -func (prog *program) LoadFromSource(spec *PackageSpec) (*Package, error) { +func (prog *program) loadFromSource(spec *PackageSpec) (*Package, error) { if len(spec.Errors) > 0 { panic("LoadFromSource called on package with errors") } From df51f576b414bdb519c187ef381ec46e335511bf Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 13 May 2020 01:20:50 +0200 Subject: [PATCH 042/111] Delete support for textual export data format We don't care about supporting compilers from many years ago. We've already dropped support for the old binary export data, and the textual form is even older than that. --- internal/go/gcexportdata/gcexportdata.go | 59 -- internal/go/gcexportdata/gcexportdata_test.go | 43 - internal/go/gcexportdata/main.go | 100 -- .../go/gcexportdata/testdata/errors-ae16.a | Bin 5494 -> 0 bytes internal/go/gcimporter/gcimporter.go | 902 +----------------- loader/loader.go | 3 +- 6 files changed, 2 insertions(+), 1105 deletions(-) delete mode 100644 internal/go/gcexportdata/gcexportdata.go delete mode 100644 internal/go/gcexportdata/gcexportdata_test.go delete mode 100644 internal/go/gcexportdata/main.go delete mode 100644 internal/go/gcexportdata/testdata/errors-ae16.a diff --git a/internal/go/gcexportdata/gcexportdata.go b/internal/go/gcexportdata/gcexportdata.go deleted file mode 100644 index a8dbb6cd1..000000000 --- a/internal/go/gcexportdata/gcexportdata.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package gcexportdata provides functions for locating, reading, and -// writing export data files containing type information produced by the -// gc compiler. This package supports go1.7 export data format and all -// later versions. -// -// Although it might seem convenient for this package to live alongside -// go/types in the standard library, this would cause version skew -// problems for developer tools that use it, since they must be able to -// consume the outputs of the gc compiler both before and after a Go -// update such as from Go 1.7 to Go 1.8. Because this package lives in -// golang.org/x/tools, sites can update their version of this repo some -// time before the Go 1.8 release and rebuild and redeploy their -// developer tools, which will then be able to consume both Go 1.7 and -// Go 1.8 export data files, so they will work before and after the -// Go update. (See discussion at https://golang.org/issue/15651.) -// -package gcexportdata - -import ( - "bytes" - "fmt" - "go/token" - "go/types" - - "honnef.co/go/tools/internal/go/gcimporter" -) - -// Read reads export data from data, decodes it, and returns type -// information for the package. -// The package name is specified by path. -// File position information is added to fset. -// -// Read may inspect and add to the imports map to ensure that references -// within the export data to other packages are consistent. The caller -// must ensure that imports[path] does not exist, or exists but is -// incomplete (see types.Package.Complete), and Read inserts the -// resulting package into this map entry. -func Read(data []byte, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { - if bytes.HasPrefix(data, []byte("!")) { - return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) - } - - // The App Engine Go runtime v1.6 uses the old export data format. - // TODO(adonovan): delete once v1.7 has been around for a while. - if bytes.HasPrefix(data, []byte("package ")) { - return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) - } - - // The indexed export format starts with an 'i'. - if len(data) == 0 || data[0] != 'i' { - return nil, fmt.Errorf("unknown export data format") - } - _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) - return pkg, err -} diff --git a/internal/go/gcexportdata/gcexportdata_test.go b/internal/go/gcexportdata/gcexportdata_test.go deleted file mode 100644 index 564f4d267..000000000 --- a/internal/go/gcexportdata/gcexportdata_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package gcexportdata_test - -import ( - "go/token" - "go/types" - "log" - "os" - "testing" - - "honnef.co/go/tools/internal/go/gcexportdata" - "honnef.co/go/tools/internal/go/gcimporter" -) - -// Test to ensure that gcexportdata can read files produced by App -// Engine Go runtime v1.6. -func TestAppEngine16(t *testing.T) { - // Open and read the file. - f, err := os.Open("testdata/errors-ae16.a") - if err != nil { - t.Fatal(err) - } - defer f.Close() - - b, err := gcimporter.GetExportData(f) - if err != nil { - log.Fatalf("reading export data: %v", err) - } - - // Decode the export data. - fset := token.NewFileSet() - imports := make(map[string]*types.Package) - pkg, err := gcexportdata.Read(b, fset, imports, "errors") - if err != nil { - log.Fatal(err) - } - - // Print package information. - got := pkg.Scope().Lookup("New").Type().String() - want := "func(text string) error" - if got != want { - t.Errorf("New.Type = %s, want %s", got, want) - } -} diff --git a/internal/go/gcexportdata/main.go b/internal/go/gcexportdata/main.go deleted file mode 100644 index a84a7ca4c..000000000 --- a/internal/go/gcexportdata/main.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build ignore - -// The gcexportdata command is a diagnostic tool that displays the -// contents of gc export data files. -package main - -import ( - "flag" - "fmt" - "go/token" - "go/types" - "log" - "os" - - "honnef.co/go/tools/internal/go/gcexportdata" - - "golang.org/x/tools/go/types/typeutil" -) - -var packageFlag = flag.String("package", "", "alternative package to print") - -func main() { - log.SetPrefix("gcexportdata: ") - log.SetFlags(0) - flag.Usage = func() { - fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a") - } - flag.Parse() - if flag.NArg() != 1 { - flag.Usage() - os.Exit(2) - } - filename := flag.Args()[0] - - f, err := os.Open(filename) - if err != nil { - log.Fatal(err) - } - - r, err := gcexportdata.NewReader(f) - if err != nil { - log.Fatalf("%s: %s", filename, err) - } - - // Decode the package. - const primary = "" - imports := make(map[string]*types.Package) - fset := token.NewFileSet() - pkg, err := gcexportdata.Read(r, fset, imports, primary) - if err != nil { - log.Fatalf("%s: %s", filename, err) - } - - // Optionally select an indirectly mentioned package. - if *packageFlag != "" { - pkg = imports[*packageFlag] - if pkg == nil { - fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n", - filename, *packageFlag) - for p := range imports { - if p != primary { - fmt.Fprintf(os.Stderr, "\t%s\n", p) - } - } - os.Exit(1) - } - } - - // Print all package-level declarations, including non-exported ones. - fmt.Printf("package %s\n", pkg.Name()) - for _, imp := range pkg.Imports() { - fmt.Printf("import %q\n", imp.Path()) - } - qual := func(p *types.Package) string { - if pkg == p { - return "" - } - return p.Name() - } - scope := pkg.Scope() - for _, name := range scope.Names() { - obj := scope.Lookup(name) - fmt.Printf("%s: %s\n", - fset.Position(obj.Pos()), - types.ObjectString(obj, qual)) - - // For types, print each method. - if _, ok := obj.(*types.TypeName); ok { - for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { - fmt.Printf("%s: %s\n", - fset.Position(method.Obj().Pos()), - types.SelectionString(method, qual)) - } - } - } -} diff --git a/internal/go/gcexportdata/testdata/errors-ae16.a b/internal/go/gcexportdata/testdata/errors-ae16.a deleted file mode 100644 index 3f1dad54f074503fb14d9ddf702553f41e696a4c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5494 zcmb^#&2QUO{KYL*7D{K0YLsbAUdXDnfM@4BWo*^d4I|K2X*+CEMe8_r8k#1N9(8^@tYXqg z<}w8}V_7*Pl`7=ZYQj*}yro*%v>J^>VzEeFH9%B>&Wc@?;^T-&`COw|w<(Lpl$ZX_gmfoz z#JN>BDbRzBtXIHrm&%od*i;*;g^4Om`ou_4vtdjrYl%d3Lb`-zNO7tl*R`b~B?J~S zZW_QE9YE$wwRySbhM*NA_B|+GseGOVPI{SmMNUTufxsp!#SqyQhMBVoxj%saMQvmF z41q^`Ul)(w`w@3{|BS1@Y0bIVG=6B>S8q{-=E>Moy4jeCg|+5POwu+gv7FXi4C6*w z{{W$6<)+q*yWm&wK>bV0KXpd6jX9v841so9yZbo)$iup|odf1v;#^E@i5z?Olhz!J zY0XM3+&Yw=eM9%Zr}p;teud|FBzzKu67os-1R>*?0CH!x0PNH7Fdi@I%>O&g#;1C)@dQcW&9dO@D-WXw_$(7q55Ely?oZ~*!( zg^p4vO-nf2qGvrBKNe7NiE9Te5+8vgz~FBRkvW9wU0{0G9aR0^0Fo*kVYz0T4F^hc zzFIS!H3u-JX9)?P0+aL=r{*lz=M_a!$x$KL4j-aINEbv*xX*2sNP}LJN8l7ooshxf z)Dx_R$Z_F~S?g+OcIN5`2@5+RB96W-d>;yp5MhMKLPI!92CtWlO3lzKjfW4cR8~o6 zlj)Rcq%#>Mld*D{Tux8rvx;F^X`Og*6(gtXIWwoIc~ws;rfwvXmZ~SzoT|Vk11=Ec zQtfQRHqO$xG~5Lag<>)O)gh>tew)vsOBnO&OR}2e{*Wk6aZ*peMVc zSGivP7bM7S0mwo~3lCj8c=U{Si^rf#zn@1>!%=sU>)^AYmxF~dLdK95x+~ZJ7=#n% z8)Pe?tN}tFfrwyjt09A=RF@p-@$i-fak?lp=>KG>0INl+yxfdO`0ScQCq)j30`kSv8Zcd#c z;c$}23WVOwAc(9$qHT((i;d<491lTn9Cqj4u82dx z+efIy+ybA(*W8q15)*AoR+4Qhlzadf+tSGnK}Zm5R~F|)FxF&!Un%{t{L87ADJ9pA zeG4xkzH)?-7cQY5?se-W1FrPfOUnnX0ts>us|cu`U?3}jeMx==DOR_zC6QrLACzVKWR4?Iwlw)wu3c)C2A`TfGo= z#hG?np{0^SK&H$Ik1~LX z417n}j0YPCc(;w~vD?RfsKeG+o5Z!1$p|_}K_kp2Zbx~!UbCIL?kr-&2)%c^{7HX; zuy1*X2j$OhLq5e{?NA(E$2c8RoYSCcU4b|@#ylbrIk;AKAendA`wPhB7qID>JrSDD zo7~2JR~}Qo_Dk0<9yXut7rLPJQ!8%o>DD)} package object (across importer) - localPkgs map[string]*types.Package // package id -> package object (just this package) -} - -func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) { - p.scanner.Init(src) - p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } - p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments - p.scanner.Whitespace = 1<<'\t' | 1<<' ' - p.scanner.Filename = filename // for good error messages - p.next() - p.id = id - p.sharedPkgs = packages - if debug { - // check consistency of packages map - for _, pkg := range packages { - if pkg.Name() == "" { - fmt.Printf("no package name for %s\n", pkg.Path()) - } - } - } -} - -func (p *parser) next() { - p.tok = p.scanner.Scan() - switch p.tok { - case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·': - p.lit = p.scanner.TokenText() - default: - p.lit = "" - } - if debug { - fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit) - } -} - -func declTypeName(pkg *types.Package, name string) *types.TypeName { - scope := pkg.Scope() - if obj := scope.Lookup(name); obj != nil { - return obj.(*types.TypeName) - } - obj := types.NewTypeName(token.NoPos, pkg, name, nil) - // a named type may be referred to before the underlying type - // is known - set it up - types.NewNamed(obj, nil, nil) - scope.Insert(obj) - return obj -} - -// ---------------------------------------------------------------------------- -// Error handling - -// Internal errors are boxed as importErrors. -type importError struct { - pos scanner.Position - err error -} - -func (e importError) Error() string { - return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err) -} - -func (p *parser) error(err interface{}) { - if s, ok := err.(string); ok { - err = errors.New(s) - } - // panic with a runtime.Error if err is not an error - panic(importError{p.scanner.Pos(), err.(error)}) -} - -func (p *parser) errorf(format string, args ...interface{}) { - p.error(fmt.Sprintf(format, args...)) -} - -func (p *parser) expect(tok rune) string { - lit := p.lit - if p.tok != tok { - p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit) - } - p.next() - return lit -} - -func (p *parser) expectSpecial(tok string) { - sep := 'x' // not white space - i := 0 - for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' { - sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token - p.next() - i++ - } - if i < len(tok) { - p.errorf("expected %q, got %q", tok, tok[0:i]) - } -} - -func (p *parser) expectKeyword(keyword string) { - lit := p.expect(scanner.Ident) - if lit != keyword { - p.errorf("expected keyword %s, got %q", keyword, lit) - } -} - -// ---------------------------------------------------------------------------- -// Qualified and unqualified names - -// PackageId = string_lit . -// -func (p *parser) parsePackageID() string { - id, err := strconv.Unquote(p.expect(scanner.String)) - if err != nil { - p.error(err) - } - // id == "" stands for the imported package id - // (only known at time of package installation) - if id == "" { - id = p.id - } - return id -} - -// PackageName = ident . -// -func (p *parser) parsePackageName() string { - return p.expect(scanner.Ident) -} - -// dotIdentifier = ( ident | '·' ) { ident | int | '·' } . -func (p *parser) parseDotIdent() string { - ident := "" - if p.tok != scanner.Int { - sep := 'x' // not white space - for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' { - ident += p.lit - sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token - p.next() - } - } - if ident == "" { - p.expect(scanner.Ident) // use expect() for error handling - } - return ident -} - -// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) . -// -func (p *parser) parseQualifiedName() (id, name string) { - p.expect('@') - id = p.parsePackageID() - p.expect('.') - // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields. - if p.tok == '?' { - p.next() - } else { - name = p.parseDotIdent() - } - return -} - -// getPkg returns the package for a given id. If the package is -// not found, create the package and add it to the p.localPkgs -// and p.sharedPkgs maps. name is the (expected) name of the -// package. If name == "", the package name is expected to be -// set later via an import clause in the export data. -// -// id identifies a package, usually by a canonical package path like -// "encoding/json" but possibly by a non-canonical import path like -// "./json". -// -func (p *parser) getPkg(id, name string) *types.Package { - // package unsafe is not in the packages maps - handle explicitly - if id == "unsafe" { - return types.Unsafe - } - - pkg := p.localPkgs[id] - if pkg == nil { - // first import of id from this package - pkg = p.sharedPkgs[id] - if pkg == nil { - // first import of id by this importer; - // add (possibly unnamed) pkg to shared packages - pkg = types.NewPackage(id, name) - p.sharedPkgs[id] = pkg - } - // add (possibly unnamed) pkg to local packages - if p.localPkgs == nil { - p.localPkgs = make(map[string]*types.Package) - } - p.localPkgs[id] = pkg - } else if name != "" { - // package exists already and we have an expected package name; - // make sure names match or set package name if necessary - if pname := pkg.Name(); pname == "" { - pkg.SetName(name) - } else if pname != name { - p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name) - } - } - return pkg -} - -// parseExportedName is like parseQualifiedName, but -// the package id is resolved to an imported *types.Package. -// -func (p *parser) parseExportedName() (pkg *types.Package, name string) { - id, name := p.parseQualifiedName() - pkg = p.getPkg(id, "") - return -} - -// ---------------------------------------------------------------------------- -// Types - -// BasicType = identifier . -// -func (p *parser) parseBasicType() types.Type { - id := p.expect(scanner.Ident) - obj := types.Universe.Lookup(id) - if obj, ok := obj.(*types.TypeName); ok { - return obj.Type() - } - p.errorf("not a basic type: %s", id) - return nil -} - -// ArrayType = "[" int_lit "]" Type . -// -func (p *parser) parseArrayType(parent *types.Package) types.Type { - // "[" already consumed and lookahead known not to be "]" - lit := p.expect(scanner.Int) - p.expect(']') - elem := p.parseType(parent) - n, err := strconv.ParseInt(lit, 10, 64) - if err != nil { - p.error(err) - } - return types.NewArray(elem, n) -} - -// MapType = "map" "[" Type "]" Type . -// -func (p *parser) parseMapType(parent *types.Package) types.Type { - p.expectKeyword("map") - p.expect('[') - key := p.parseType(parent) - p.expect(']') - elem := p.parseType(parent) - return types.NewMap(key, elem) -} - -// Name = identifier | "?" | QualifiedName . -// -// For unqualified and anonymous names, the returned package is the parent -// package unless parent == nil, in which case the returned package is the -// package being imported. (The parent package is not nil if the the name -// is an unqualified struct field or interface method name belonging to a -// type declared in another package.) -// -// For qualified names, the returned package is nil (and not created if -// it doesn't exist yet) unless materializePkg is set (which creates an -// unnamed package with valid package path). In the latter case, a -// subsequent import clause is expected to provide a name for the package. -// -func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) { - pkg = parent - if pkg == nil { - pkg = p.sharedPkgs[p.id] - } - switch p.tok { - case scanner.Ident: - name = p.lit - p.next() - case '?': - // anonymous - p.next() - case '@': - // exported name prefixed with package path - pkg = nil - var id string - id, name = p.parseQualifiedName() - if materializePkg { - pkg = p.getPkg(id, "") - } - default: - p.error("name expected") - } - return -} - -func deref(typ types.Type) types.Type { - if p, _ := typ.(*types.Pointer); p != nil { - return p.Elem() - } - return typ -} - -// Field = Name Type [ string_lit ] . -// -func (p *parser) parseField(parent *types.Package) (*types.Var, string) { - pkg, name := p.parseName(parent, true) - - if name == "_" { - // Blank fields should be package-qualified because they - // are unexported identifiers, but gc does not qualify them. - // Assuming that the ident belongs to the current package - // causes types to change during re-exporting, leading - // to spurious "can't assign A to B" errors from go/types. - // As a workaround, pretend all blank fields belong - // to the same unique dummy package. - const blankpkg = "<_>" - pkg = p.getPkg(blankpkg, blankpkg) - } - - typ := p.parseType(parent) - anonymous := false - if name == "" { - // anonymous field - typ must be T or *T and T must be a type name - switch typ := deref(typ).(type) { - case *types.Basic: // basic types are named types - pkg = nil // objects defined in Universe scope have no package - name = typ.Name() - case *types.Named: - name = typ.Obj().Name() - default: - p.errorf("anonymous field expected") - } - anonymous = true - } - tag := "" - if p.tok == scanner.String { - s := p.expect(scanner.String) - var err error - tag, err = strconv.Unquote(s) - if err != nil { - p.errorf("invalid struct tag %s: %s", s, err) - } - } - return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag -} - -// StructType = "struct" "{" [ FieldList ] "}" . -// FieldList = Field { ";" Field } . -// -func (p *parser) parseStructType(parent *types.Package) types.Type { - var fields []*types.Var - var tags []string - - p.expectKeyword("struct") - p.expect('{') - for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { - if i > 0 { - p.expect(';') - } - fld, tag := p.parseField(parent) - if tag != "" && tags == nil { - tags = make([]string, i) - } - if tags != nil { - tags = append(tags, tag) - } - fields = append(fields, fld) - } - p.expect('}') - - return types.NewStruct(fields, tags) -} - -// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] . -// -func (p *parser) parseParameter() (par *types.Var, isVariadic bool) { - _, name := p.parseName(nil, false) - // remove gc-specific parameter numbering - if i := strings.Index(name, "·"); i >= 0 { - name = name[:i] - } - if p.tok == '.' { - p.expectSpecial("...") - isVariadic = true - } - typ := p.parseType(nil) - if isVariadic { - typ = types.NewSlice(typ) - } - // ignore argument tag (e.g. "noescape") - if p.tok == scanner.String { - p.next() - } - // TODO(gri) should we provide a package? - par = types.NewVar(token.NoPos, nil, name, typ) - return -} - -// Parameters = "(" [ ParameterList ] ")" . -// ParameterList = { Parameter "," } Parameter . -// -func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) { - p.expect('(') - for p.tok != ')' && p.tok != scanner.EOF { - if len(list) > 0 { - p.expect(',') - } - par, variadic := p.parseParameter() - list = append(list, par) - if variadic { - if isVariadic { - p.error("... not on final argument") - } - isVariadic = true - } - } - p.expect(')') - - return -} - -// Signature = Parameters [ Result ] . -// Result = Type | Parameters . -// -func (p *parser) parseSignature(recv *types.Var) *types.Signature { - params, isVariadic := p.parseParameters() - - // optional result type - var results []*types.Var - if p.tok == '(' { - var variadic bool - results, variadic = p.parseParameters() - if variadic { - p.error("... not permitted on result type") - } - } - - return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic) -} - -// InterfaceType = "interface" "{" [ MethodList ] "}" . -// MethodList = Method { ";" Method } . -// Method = Name Signature . -// -// The methods of embedded interfaces are always "inlined" -// by the compiler and thus embedded interfaces are never -// visible in the export data. -// -func (p *parser) parseInterfaceType(parent *types.Package) types.Type { - var methods []*types.Func - - p.expectKeyword("interface") - p.expect('{') - for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { - if i > 0 { - p.expect(';') - } - pkg, name := p.parseName(parent, true) - sig := p.parseSignature(nil) - methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig)) - } - p.expect('}') - - // Complete requires the type's embedded interfaces to be fully defined, - // but we do not define any - return newInterface(methods, nil).Complete() -} - -// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . -// -func (p *parser) parseChanType(parent *types.Package) types.Type { - dir := types.SendRecv - if p.tok == scanner.Ident { - p.expectKeyword("chan") - if p.tok == '<' { - p.expectSpecial("<-") - dir = types.SendOnly - } - } else { - p.expectSpecial("<-") - p.expectKeyword("chan") - dir = types.RecvOnly - } - elem := p.parseType(parent) - return types.NewChan(dir, elem) -} - -// Type = -// BasicType | TypeName | ArrayType | SliceType | StructType | -// PointerType | FuncType | InterfaceType | MapType | ChanType | -// "(" Type ")" . -// -// BasicType = ident . -// TypeName = ExportedName . -// SliceType = "[" "]" Type . -// PointerType = "*" Type . -// FuncType = "func" Signature . -// -func (p *parser) parseType(parent *types.Package) types.Type { - switch p.tok { - case scanner.Ident: - switch p.lit { - default: - return p.parseBasicType() - case "struct": - return p.parseStructType(parent) - case "func": - // FuncType - p.next() - return p.parseSignature(nil) - case "interface": - return p.parseInterfaceType(parent) - case "map": - return p.parseMapType(parent) - case "chan": - return p.parseChanType(parent) - } - case '@': - // TypeName - pkg, name := p.parseExportedName() - return declTypeName(pkg, name).Type() - case '[': - p.next() // look ahead - if p.tok == ']' { - // SliceType - p.next() - return types.NewSlice(p.parseType(parent)) - } - return p.parseArrayType(parent) - case '*': - // PointerType - p.next() - return types.NewPointer(p.parseType(parent)) - case '<': - return p.parseChanType(parent) - case '(': - // "(" Type ")" - p.next() - typ := p.parseType(parent) - p.expect(')') - return typ - } - p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit) - return nil -} - -// ---------------------------------------------------------------------------- -// Declarations - -// ImportDecl = "import" PackageName PackageId . -// -func (p *parser) parseImportDecl() { - p.expectKeyword("import") - name := p.parsePackageName() - p.getPkg(p.parsePackageID(), name) -} - -// int_lit = [ "+" | "-" ] { "0" ... "9" } . -// -func (p *parser) parseInt() string { - s := "" - switch p.tok { - case '-': - s = "-" - p.next() - case '+': - p.next() - } - return s + p.expect(scanner.Int) -} - -// number = int_lit [ "p" int_lit ] . -// -func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) { - // mantissa - mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0) - if mant == nil { - panic("invalid mantissa") - } - - if p.lit == "p" { - // exponent (base 2) - p.next() - exp, err := strconv.ParseInt(p.parseInt(), 10, 0) - if err != nil { - p.error(err) - } - if exp < 0 { - denom := constant.MakeInt64(1) - denom = constant.Shift(denom, token.SHL, uint(-exp)) - typ = types.Typ[types.UntypedFloat] - val = constant.BinaryOp(mant, token.QUO, denom) - return - } - if exp > 0 { - mant = constant.Shift(mant, token.SHL, uint(exp)) - } - typ = types.Typ[types.UntypedFloat] - val = mant - return - } - - typ = types.Typ[types.UntypedInt] - val = mant - return -} - -// ConstDecl = "const" ExportedName [ Type ] "=" Literal . -// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit . -// bool_lit = "true" | "false" . -// complex_lit = "(" float_lit "+" float_lit "i" ")" . -// rune_lit = "(" int_lit "+" int_lit ")" . -// string_lit = `"` { unicode_char } `"` . -// -func (p *parser) parseConstDecl() { - p.expectKeyword("const") - pkg, name := p.parseExportedName() - - var typ0 types.Type - if p.tok != '=' { - // constant types are never structured - no need for parent type - typ0 = p.parseType(nil) - } - - p.expect('=') - var typ types.Type - var val constant.Value - switch p.tok { - case scanner.Ident: - // bool_lit - if p.lit != "true" && p.lit != "false" { - p.error("expected true or false") - } - typ = types.Typ[types.UntypedBool] - val = constant.MakeBool(p.lit == "true") - p.next() - - case '-', scanner.Int: - // int_lit - typ, val = p.parseNumber() - - case '(': - // complex_lit or rune_lit - p.next() - if p.tok == scanner.Char { - p.next() - p.expect('+') - typ = types.Typ[types.UntypedRune] - _, val = p.parseNumber() - p.expect(')') - break - } - _, re := p.parseNumber() - p.expect('+') - _, im := p.parseNumber() - p.expectKeyword("i") - p.expect(')') - typ = types.Typ[types.UntypedComplex] - val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) - - case scanner.Char: - // rune_lit - typ = types.Typ[types.UntypedRune] - val = constant.MakeFromLiteral(p.lit, token.CHAR, 0) - p.next() - - case scanner.String: - // string_lit - typ = types.Typ[types.UntypedString] - val = constant.MakeFromLiteral(p.lit, token.STRING, 0) - p.next() - - default: - p.errorf("expected literal got %s", scanner.TokenString(p.tok)) - } - - if typ0 == nil { - typ0 = typ - } - - pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val)) -} - -// TypeDecl = "type" ExportedName Type . -// -func (p *parser) parseTypeDecl() { - p.expectKeyword("type") - pkg, name := p.parseExportedName() - obj := declTypeName(pkg, name) - - // The type object may have been imported before and thus already - // have a type associated with it. We still need to parse the type - // structure, but throw it away if the object already has a type. - // This ensures that all imports refer to the same type object for - // a given type declaration. - typ := p.parseType(pkg) - - if name := obj.Type().(*types.Named); name.Underlying() == nil { - name.SetUnderlying(typ) - } -} - -// VarDecl = "var" ExportedName Type . -// -func (p *parser) parseVarDecl() { - p.expectKeyword("var") - pkg, name := p.parseExportedName() - typ := p.parseType(pkg) - pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ)) -} - -// Func = Signature [ Body ] . -// Body = "{" ... "}" . -// -func (p *parser) parseFunc(recv *types.Var) *types.Signature { - sig := p.parseSignature(recv) - if p.tok == '{' { - p.next() - for i := 1; i > 0; p.next() { - switch p.tok { - case '{': - i++ - case '}': - i-- - } - } - } - return sig -} - -// MethodDecl = "func" Receiver Name Func . -// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" . -// -func (p *parser) parseMethodDecl() { - // "func" already consumed - p.expect('(') - recv, _ := p.parseParameter() // receiver - p.expect(')') - - // determine receiver base type object - base := deref(recv.Type()).(*types.Named) - - // parse method name, signature, and possibly inlined body - _, name := p.parseName(nil, false) - sig := p.parseFunc(recv) - - // methods always belong to the same package as the base type object - pkg := base.Obj().Pkg() - - // add method to type unless type was imported before - // and method exists already - // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small. - base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) -} - -// FuncDecl = "func" ExportedName Func . -// -func (p *parser) parseFuncDecl() { - // "func" already consumed - pkg, name := p.parseExportedName() - typ := p.parseFunc(nil) - pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ)) -} - -// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" . -// -func (p *parser) parseDecl() { - if p.tok == scanner.Ident { - switch p.lit { - case "import": - p.parseImportDecl() - case "const": - p.parseConstDecl() - case "type": - p.parseTypeDecl() - case "var": - p.parseVarDecl() - case "func": - p.next() // look ahead - if p.tok == '(' { - p.parseMethodDecl() - } else { - p.parseFuncDecl() - } - } - } - p.expect('\n') -} - -// ---------------------------------------------------------------------------- -// Export - -// Export = "PackageClause { Decl } "$$" . -// PackageClause = "package" PackageName [ "safe" ] "\n" . -// -func (p *parser) parseExport() *types.Package { - p.expectKeyword("package") - name := p.parsePackageName() - if p.tok == scanner.Ident && p.lit == "safe" { - // package was compiled with -u option - ignore - p.next() - } - p.expect('\n') - - pkg := p.getPkg(p.id, name) - - for p.tok != '$' && p.tok != scanner.EOF { - p.parseDecl() - } - - if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' { - // don't call next()/expect() since reading past the - // export data may cause scanner errors (e.g. NUL chars) - p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch) - } - - if n := p.scanner.ErrorCount; n != 0 { - p.errorf("expected no scanner errors, got %d", n) - } - - // Record all locally referenced packages as imports. - var imports []*types.Package - for id, pkg2 := range p.localPkgs { - if pkg2.Name() == "" { - p.errorf("%s package has no name", id) - } - if id == p.id { - continue // avoid self-edge - } - imports = append(imports, pkg2) - } - sort.Sort(byPath(imports)) - pkg.SetImports(imports) - - // package was imported completely and without errors - pkg.MarkComplete() - - return pkg -} - type byPath []*types.Package func (a byPath) Len() int { return len(a) } diff --git a/loader/loader.go b/loader/loader.go index 58d2be353..3bf94ce81 100644 --- a/loader/loader.go +++ b/loader/loader.go @@ -13,7 +13,6 @@ import ( "honnef.co/go/tools/config" "honnef.co/go/tools/internal/cache" - "honnef.co/go/tools/internal/go/gcexportdata" "honnef.co/go/tools/internal/go/gcimporter" "golang.org/x/tools/go/packages" @@ -195,7 +194,7 @@ func (prog *program) loadFromExport(spec *PackageSpec) (*Package, error) { return nil, err } - tpkg, err := gcexportdata.Read(b, prog.fset, prog.packages, spec.PkgPath) + _, tpkg, err := gcimporter.IImportData(prog.fset, prog.packages, b[1:], spec.PkgPath) if err != nil { return nil, err } From da1a9ab088ada96ecf82c8877e74115616c67000 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 13 May 2020 07:58:14 +0200 Subject: [PATCH 043/111] Apply some trivial optimizations --- internal/go/gcimporter/iimport.go | 2 ++ runner/runner.go | 8 ++++---- unused/unused.go | 3 +++ 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/internal/go/gcimporter/iimport.go b/internal/go/gcimporter/iimport.go index 9a940fc38..0db50ca0d 100644 --- a/internal/go/gcimporter/iimport.go +++ b/internal/go/gcimporter/iimport.go @@ -73,6 +73,8 @@ func IImportData(fset *token.FileSet, imports map[string]*types.Package, data [] } }() + // OPT(dh): use a cheaper reader that does less state tracking. we + // don't need to be able to unread. r := &intReader{bytes.NewReader(data), path} version = int64(r.uint64()) diff --git a/runner/runner.go b/runner/runner.go index 1fb35185d..add837dfb 100644 --- a/runner/runner.go +++ b/runner/runner.go @@ -377,10 +377,10 @@ func newPackageAction(pkg *loader.PackageSpec, cache map[*loader.PackageSpec]*pa } cache[pkg] = a - // OPT(dh): pre-allocate a.errors if len(pkg.Errors) > 0 { - for _, err := range pkg.Errors { - a.errors = append(a.errors, err) + a.errors = make([]error, len(pkg.Errors)) + for i, err := range pkg.Errors { + a.errors[i] = err } a.failed = true @@ -389,7 +389,7 @@ func newPackageAction(pkg *loader.PackageSpec, cache map[*loader.PackageSpec]*pa return a } - // OPT(dh): pre-allocate a.deps + a.deps = make([]action, 0, len(pkg.Imports)) for _, dep := range pkg.Imports { depa := newPackageAction(dep, cache) depa.triggers = append(depa.triggers, a) diff --git a/unused/unused.go b/unused/unused.go index 1033c581c..8b7911cbe 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -580,6 +580,8 @@ func (c *checker) results() (used, unused []types.Object) { } }) + // OPT(dh): can we find meaningful initial capacities for the used and unused slices? + for _, n := range c.graph.Nodes { if obj, ok := n.obj.(types.Object); ok { switch obj := obj.(type) { @@ -1119,6 +1121,7 @@ func (g *graph) entry(pkg *pkg) { } } + // OPT(dh): can we find meaningful initial capacities for these slices? var ifaces []*types.Interface var notIfaces []types.Type From cc5cf7f13a5bd2a5adaf741f8233beea329517ee Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 13 May 2020 09:15:43 +0200 Subject: [PATCH 044/111] internal/go/gcimporter: reuse byte slices --- internal/go/gcimporter/exportdata.go | 11 ++++++----- loader/loader.go | 20 +++++++++++--------- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/internal/go/gcimporter/exportdata.go b/internal/go/gcimporter/exportdata.go index 3a48d3a19..512dc1051 100644 --- a/internal/go/gcimporter/exportdata.go +++ b/internal/go/gcimporter/exportdata.go @@ -95,7 +95,7 @@ func findExportData(r *bufio.Reader) (hdr string, length int, err error) { return } -func GetExportData(r io.ReadSeeker) ([]byte, error) { +func GetExportData(r io.ReadSeeker, b []byte) ([]byte, error) { br := bufio.NewReader(r) _, length, err := findExportData(br) if err != nil { @@ -105,10 +105,11 @@ func GetExportData(r io.ReadSeeker) ([]byte, error) { return nil, err } if length > 0 { - // OPT(dh): in theory, reusing this slice across calls to - // LoadFromExport should help. when we tried, it made no - // difference. investigate. - b := make([]byte, length) + if cap(b) >= length { + b = b[:length] + } else { + b = make([]byte, length) + } _, err := io.ReadFull(r, b) return b, err } else { diff --git a/loader/loader.go b/loader/loader.go index 3bf94ce81..678f3d469 100644 --- a/loader/loader.go +++ b/loader/loader.go @@ -157,12 +157,14 @@ func Load(spec *PackageSpec) (*Package, Stats, error) { stats := Stats{ Export: map[*PackageSpec]time.Duration{}, } + var b []byte for _, imp := range spec.Imports { if imp.PkgPath == "unsafe" { continue } t := time.Now() - _, err := prog.loadFromExport(imp) + var err error + _, b, err = prog.loadFromExport(imp, b) stats.Export[imp] = time.Since(t) if err != nil { return nil, stats, err @@ -171,32 +173,32 @@ func Load(spec *PackageSpec) (*Package, Stats, error) { t := time.Now() pkg, err := prog.loadFromSource(spec) if err == errMaxFileSize { - pkg, err = prog.loadFromExport(spec) + pkg, _, err = prog.loadFromExport(spec, b) } stats.Source = time.Since(t) return pkg, stats, err } // loadFromExport loads a package from export data. -func (prog *program) loadFromExport(spec *PackageSpec) (*Package, error) { +func (prog *program) loadFromExport(spec *PackageSpec, b []byte) (*Package, []byte, error) { // log.Printf("Loading package %s from export", spec) if spec.ExportFile == "" { - return nil, fmt.Errorf("no export data for %q", spec.ID) + return nil, b, fmt.Errorf("no export data for %q", spec.ID) } f, err := os.Open(spec.ExportFile) if err != nil { - return nil, err + return nil, b, err } defer f.Close() - b, err := gcimporter.GetExportData(f) + b, err = gcimporter.GetExportData(f, b) if err != nil { - return nil, err + return nil, b, err } _, tpkg, err := gcimporter.IImportData(prog.fset, prog.packages, b[1:], spec.PkgPath) if err != nil { - return nil, err + return nil, b, err } pkg := &Package{ PackageSpec: spec, @@ -206,7 +208,7 @@ func (prog *program) loadFromExport(spec *PackageSpec) (*Package, error) { // runtime.SetFinalizer(pkg, func(pkg *Package) { // log.Println("Unloading package", pkg.PkgPath) // }) - return pkg, nil + return pkg, b, nil } // loadFromSource loads a package from source. All of its dependencies From 6cac86f0dfb5a6e8a133437d2bd84720426dfd30 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 13 May 2020 09:16:25 +0200 Subject: [PATCH 045/111] ir: turn Synthetic into an enum --- callgraph/cha/cha.go | 2 +- callgraph/util.go | 2 +- ir/builder_test.go | 43 ++++++++++++++++++++-------------------- ir/create.go | 4 ++-- ir/func.go | 4 ++-- ir/irutil/switch_test.go | 2 +- ir/sanity.go | 15 +++++++------- ir/ssa.go | 29 ++++++++++++++++++++++++++- ir/stdlib_test.go | 2 +- ir/wrappers.go | 12 +++++------ stylecheck/lint.go | 4 ++-- 11 files changed, 71 insertions(+), 48 deletions(-) diff --git a/callgraph/cha/cha.go b/callgraph/cha/cha.go index 912d28cbd..985d4dd9c 100644 --- a/callgraph/cha/cha.go +++ b/callgraph/cha/cha.go @@ -71,7 +71,7 @@ func CallGraph(prog *ir.Program) *callgraph.Graph { for f := range allFuncs { if f.Signature.Recv() == nil { // Package initializers can never be address-taken. - if f.Name() == "init" && f.Synthetic == "package initializer" { + if f.Name() == "init" && f.Synthetic == ir.SyntheticPackageInitializer { continue } funcs, _ := funcsBySig.At(f.Signature).([]*ir.Function) diff --git a/callgraph/util.go b/callgraph/util.go index 0d89ecbb5..7f81964f7 100644 --- a/callgraph/util.go +++ b/callgraph/util.go @@ -103,7 +103,7 @@ func (g *Graph) DeleteSyntheticNodes() { } } for fn, cgn := range g.Nodes { - if cgn == g.Root || fn.Synthetic == "" || isInit(cgn.Func) { + if cgn == g.Root || fn.Synthetic == 0 || isInit(cgn.Func) { continue // keep } for _, eIn := range cgn.In { diff --git a/ir/builder_test.go b/ir/builder_test.go index bc39c5ad2..0648bacc1 100644 --- a/ir/builder_test.go +++ b/ir/builder_test.go @@ -16,7 +16,6 @@ import ( "os" "reflect" "sort" - "strings" "testing" "golang.org/x/tools/go/loader" @@ -117,7 +116,7 @@ func main() { for i, n := 0, mset.Len(); i < n; i++ { m := prog.MethodValue(mset.At(i)) // For external types, only synthetic wrappers have code. - expExt := !strings.Contains(m.Synthetic, "wrapper") + expExt := m.Synthetic != ir.SyntheticWrapper if expExt && !isEmpty(m) { t.Errorf("external method %s is non-empty: %s", m, m.Synthetic) @@ -367,28 +366,28 @@ var ( prog.Build() // Enumerate reachable synthetic functions - want := map[string]string{ - "(*P.T).g$bound": "bound method wrapper for func (*P.T).g() int", - "(P.T).f$bound": "bound method wrapper for func (P.T).f() int", - - "(*P.T).g$thunk": "thunk for func (*P.T).g() int", - "(P.T).f$thunk": "thunk for func (P.T).f() int", - "(struct{*P.T}).g$thunk": "thunk for func (*P.T).g() int", - "(struct{P.T}).f$thunk": "thunk for func (P.T).f() int", - - "(*P.T).f": "wrapper for func (P.T).f() int", - "(*struct{*P.T}).f": "wrapper for func (P.T).f() int", - "(*struct{*P.T}).g": "wrapper for func (*P.T).g() int", - "(*struct{P.T}).f": "wrapper for func (P.T).f() int", - "(*struct{P.T}).g": "wrapper for func (*P.T).g() int", - "(struct{*P.T}).f": "wrapper for func (P.T).f() int", - "(struct{*P.T}).g": "wrapper for func (*P.T).g() int", - "(struct{P.T}).f": "wrapper for func (P.T).f() int", - - "P.init": "package initializer", + want := map[string]ir.Synthetic{ + "(*P.T).g$bound": ir.SyntheticBound, + "(P.T).f$bound": ir.SyntheticBound, + + "(*P.T).g$thunk": ir.SyntheticThunk, + "(P.T).f$thunk": ir.SyntheticThunk, + "(struct{*P.T}).g$thunk": ir.SyntheticThunk, + "(struct{P.T}).f$thunk": ir.SyntheticThunk, + + "(*P.T).f": ir.SyntheticWrapper, + "(*struct{*P.T}).f": ir.SyntheticWrapper, + "(*struct{*P.T}).g": ir.SyntheticWrapper, + "(*struct{P.T}).f": ir.SyntheticWrapper, + "(*struct{P.T}).g": ir.SyntheticWrapper, + "(struct{*P.T}).f": ir.SyntheticWrapper, + "(struct{*P.T}).g": ir.SyntheticWrapper, + "(struct{P.T}).f": ir.SyntheticWrapper, + + "P.init": ir.SyntheticPackageInitializer, } for fn := range irutil.AllFunctions(prog) { - if fn.Synthetic == "" { + if fn.Synthetic == 0 { continue } name := fn.String() diff --git a/ir/create.go b/ir/create.go index ff81a244b..f3bb4e52e 100644 --- a/ir/create.go +++ b/ir/create.go @@ -96,7 +96,7 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { fn.source = syntax fn.initHTML(pkg.printFunc) if syntax == nil { - fn.Synthetic = "loaded from gc object file" + fn.Synthetic = SyntheticLoadedFromExportData } else { fn.functionBody = new(functionBody) } @@ -180,7 +180,7 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info * p.init = &Function{ name: "init", Signature: new(types.Signature), - Synthetic: "package initializer", + Synthetic: SyntheticPackageInitializer, Pkg: p, Prog: prog, functionBody: new(functionBody), diff --git a/ir/func.go b/ir/func.go index 386d82b67..b99dc6ae1 100644 --- a/ir/func.go +++ b/ir/func.go @@ -813,7 +813,7 @@ func WriteFunction(buf *bytes.Buffer, f *Function) { if f.Pkg != nil { fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) } - if syn := f.Synthetic; syn != "" { + if syn := f.Synthetic; syn != 0 { fmt.Fprintln(buf, "# Synthetic:", syn) } if pos := f.Pos(); pos.IsValid() { @@ -941,7 +941,7 @@ func (f *Function) newBasicBlock(comment string) *BasicBlock { // // TODO(adonovan): think harder about the API here. // -func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function { +func (prog *Program) NewFunction(name string, sig *types.Signature, provenance Synthetic) *Function { return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} } diff --git a/ir/irutil/switch_test.go b/ir/irutil/switch_test.go index 5d689eed5..ef4527078 100644 --- a/ir/irutil/switch_test.go +++ b/ir/irutil/switch_test.go @@ -40,7 +40,7 @@ func TestSwitches(t *testing.T) { for _, mem := range mainPkg.Members { if fn, ok := mem.(*ir.Function); ok { - if fn.Synthetic != "" { + if fn.Synthetic != 0 { continue // e.g. init() } // Each (multi-line) "switch" comment within diff --git a/ir/sanity.go b/ir/sanity.go index ff9edbc64..c94f2bf83 100644 --- a/ir/sanity.go +++ b/ir/sanity.go @@ -443,16 +443,15 @@ func (s *sanity) checkFunction(fn *Function) bool { // shared across packages, or duplicated as weak symbols in a // separate-compilation model), and error.Error. if fn.Pkg == nil { - if strings.HasPrefix(fn.Synthetic, "wrapper ") || - strings.HasPrefix(fn.Synthetic, "bound ") || - strings.HasPrefix(fn.Synthetic, "thunk ") || - strings.HasSuffix(fn.name, "Error") { - // ok - } else { - s.errorf("nil Pkg") + switch fn.Synthetic { + case SyntheticWrapper, SyntheticBound, SyntheticThunk: + default: + if !strings.HasSuffix(fn.name, "Error") { + s.errorf("nil Pkg") + } } } - if src, syn := fn.Synthetic == "", fn.source != nil; src != syn { + if src, syn := fn.Synthetic == 0, fn.source != nil; src != syn { s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) } for i, l := range fn.Locals { diff --git a/ir/ssa.go b/ir/ssa.go index 49693045f..fc8e84114 100644 --- a/ir/ssa.go +++ b/ir/ssa.go @@ -279,6 +279,33 @@ type Node interface { Referrers() *[]Instruction // nil for non-Values } +type Synthetic int + +const ( + SyntheticLoadedFromExportData Synthetic = iota + 1 + SyntheticPackageInitializer + SyntheticThunk + SyntheticWrapper + SyntheticBound +) + +func (syn Synthetic) String() string { + switch syn { + case SyntheticLoadedFromExportData: + return "loaded from export data" + case SyntheticPackageInitializer: + return "package initializer" + case SyntheticThunk: + return "thunk" + case SyntheticWrapper: + return "wrapper" + case SyntheticBound: + return "bound" + default: + return fmt.Sprintf("Synthetic(%d)", syn) + } +} + // Function represents the parameters, results, and code of a function // or method. // @@ -322,7 +349,7 @@ type Function struct { method *types.Selection // info about provenance of synthetic methods Signature *types.Signature - Synthetic string // provenance of synthetic function; "" for true source functions + Synthetic Synthetic parent *Function // enclosing function if anon; nil if global Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) Prog *Program // enclosing program diff --git a/ir/stdlib_test.go b/ir/stdlib_test.go index 642c884ae..f6495e8f4 100644 --- a/ir/stdlib_test.go +++ b/ir/stdlib_test.go @@ -99,7 +99,7 @@ func TestStdlib(t *testing.T) { // except for unexported ones (explained at (*Function).RelString). byName := make(map[string]*ir.Function) for fn := range allFuncs { - if fn.Synthetic == "" || ast.IsExported(fn.Name()) { + if fn.Synthetic == 0 || ast.IsExported(fn.Name()) { str := fn.String() prev := byName[str] byName[str] = fn diff --git a/ir/wrappers.go b/ir/wrappers.go index 7dd334748..1d51b5dc9 100644 --- a/ir/wrappers.go +++ b/ir/wrappers.go @@ -48,19 +48,18 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function { var recv *types.Var // wrapper's receiver or thunk's params[0] name := obj.Name() - var description string + var description Synthetic var start int // first regular param if sel.Kind() == types.MethodExpr { name += "$thunk" - description = "thunk" + description = SyntheticThunk recv = sig.Params().At(0) start = 1 } else { - description = "wrapper" + description = SyntheticWrapper recv = sig.Recv() } - description = fmt.Sprintf("%s for %s", description, sel.Obj()) if prog.mode&LogSource != 0 { defer logStack("make %s to (%s)", description, recv.Type())() } @@ -180,15 +179,14 @@ func makeBound(prog *Program, obj *types.Func) *Function { defer prog.methodsMu.Unlock() fn, ok := prog.bounds[obj] if !ok { - description := fmt.Sprintf("bound method wrapper for %s", obj) if prog.mode&LogSource != 0 { - defer logStack("%s", description)() + defer logStack("%s", SyntheticBound)() } fn = &Function{ name: obj.Name() + "$bound", object: obj, Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver - Synthetic: description, + Synthetic: SyntheticBound, Prog: prog, functionBody: new(functionBody), } diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 7848f93a6..2055a3968 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -232,7 +232,7 @@ fnLoop: // types do not return unexported types. func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) { for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { - if fn.Synthetic != "" || fn.Parent() != nil { + if fn.Synthetic != 0 || fn.Parent() != nil { continue } if !ast.IsExported(fn.Name()) || code.IsMain(pass) || code.IsInTest(pass, fn) { @@ -324,7 +324,7 @@ func CheckContextFirstArg(pass *analysis.Pass) (interface{}, error) { // func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) { fnLoop: for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { - if fn.Synthetic != "" || fn.Parent() != nil { + if fn.Synthetic != 0 || fn.Parent() != nil { continue } params := fn.Signature.Params() From ec989ef14853a2bf3470d6061ab63e5981219d62 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 14 May 2020 23:34:50 +0200 Subject: [PATCH 046/111] unused: support file-ignore directives --- unused/testdata/src/ignored/ignored2.go | 3 +++ unused/testdata/src/ignored/ignored3.go | 7 ++++++ unused/testdata/src/ignored/ignored4.go | 3 +++ unused/unused.go | 30 ++++++++++++++++++++----- 4 files changed, 37 insertions(+), 6 deletions(-) create mode 100644 unused/testdata/src/ignored/ignored2.go create mode 100644 unused/testdata/src/ignored/ignored3.go create mode 100644 unused/testdata/src/ignored/ignored4.go diff --git a/unused/testdata/src/ignored/ignored2.go b/unused/testdata/src/ignored/ignored2.go new file mode 100644 index 000000000..162b3833a --- /dev/null +++ b/unused/testdata/src/ignored/ignored2.go @@ -0,0 +1,3 @@ +package pkg + +func (t1) fn4() {} // used diff --git a/unused/testdata/src/ignored/ignored3.go b/unused/testdata/src/ignored/ignored3.go new file mode 100644 index 000000000..49d6b1902 --- /dev/null +++ b/unused/testdata/src/ignored/ignored3.go @@ -0,0 +1,7 @@ +//lint:file-ignore U1000 consider everything in here used + +package pkg + +type t9 struct{} // used + +func (t9) fn1() {} // used diff --git a/unused/testdata/src/ignored/ignored4.go b/unused/testdata/src/ignored/ignored4.go new file mode 100644 index 000000000..0b0a36eba --- /dev/null +++ b/unused/testdata/src/ignored/ignored4.go @@ -0,0 +1,3 @@ +package pkg + +func (t9) fn2() {} // used diff --git a/unused/unused.go b/unused/unused.go index 8b7911cbe..fc4130d73 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1155,7 +1155,7 @@ func (g *graph) entry(pkg *pkg) { } ignores := map[ignoredKey]struct{}{} for _, dir := range g.pkg.Directives { - if dir.Command != "ignore" { + if dir.Command != "ignore" && dir.Command != "file-ignore" { continue } if len(dir.Arguments) == 0 { @@ -1164,10 +1164,20 @@ func (g *graph) entry(pkg *pkg) { for _, check := range strings.Split(dir.Arguments[0], ",") { if check == "U1000" { pos := g.pkg.Fset.PositionFor(dir.Node.Pos(), false) - key := ignoredKey{ - pos.Filename, - pos.Line, + var key ignoredKey + switch dir.Command { + case "ignore": + key = ignoredKey{ + pos.Filename, + pos.Line, + } + case "file-ignore": + key = ignoredKey{ + pos.Filename, + -1, + } } + ignores[key] = struct{}{} break } @@ -1179,11 +1189,19 @@ func (g *graph) entry(pkg *pkg) { for obj := range g.Nodes { if obj, ok := obj.(types.Object); ok { pos := g.pkg.Fset.PositionFor(obj.Pos(), false) - key := ignoredKey{ + key1 := ignoredKey{ pos.Filename, pos.Line, } - if _, ok := ignores[key]; ok { + key2 := ignoredKey{ + pos.Filename, + -1, + } + _, ok := ignores[key1] + if !ok { + _, ok = ignores[key2] + } + if ok { g.use(obj, nil, edgeIgnored) // use methods and fields of ignored types From d8cf7f14797bfb9242597e176f0189adc9c98b7c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 May 2020 01:24:33 +0200 Subject: [PATCH 047/111] Add new benchmarking code --- _benchmarks/bench | 224 ------------------------------ _benchmarks/bench.sh | 31 +++++ _benchmarks/benchmark.go | 76 ---------- _benchmarks/benchmark.r | 18 --- _benchmarks/silent-staticcheck.sh | 3 + 5 files changed, 34 insertions(+), 318 deletions(-) delete mode 100644 _benchmarks/bench create mode 100755 _benchmarks/bench.sh delete mode 100644 _benchmarks/benchmark.go delete mode 100644 _benchmarks/benchmark.r create mode 100755 _benchmarks/silent-staticcheck.sh diff --git a/_benchmarks/bench b/_benchmarks/bench deleted file mode 100644 index acd2d41d0..000000000 --- a/_benchmarks/bench +++ /dev/null @@ -1,224 +0,0 @@ -Version Go Target Time Memory - -2017.1 1.11 std 23.12 2911980 -2017.1 1.11 std 23.41 2879472 -2017.1 1.11 std 24.02 2930628 -2017.1 1.11 std 23.72 2915616 -2017.1 1.11 std 22.93 2923856 -2017.1 1.11 std 24.22 2935140 -2017.1 1.11 std 23.55 2877584 -2017.1 1.11 std 23.56 2962548 -2017.1 1.11 std 23.63 3089648 -2017.1 1.11 std 23.26 2942748 - -2017.1 1.11 upspin.io/... 07.39 1150328 -2017.1 1.11 upspin.io/... 07.56 1183596 -2017.1 1.11 upspin.io/... 07.14 1292392 -2017.1 1.11 upspin.io/... 07.38 1102012 -2017.1 1.11 upspin.io/... 07.56 1311560 -2017.1 1.11 upspin.io/... 07.46 1296864 -2017.1 1.11 upspin.io/... 07.31 1137304 -2017.1 1.11 upspin.io/... 06.97 1113864 -2017.1 1.11 upspin.io/... 07.59 1246148 -2017.1 1.11 upspin.io/... 07.32 1312208 - -2017.1 1.11 perkeep.org/... 21.60 3282696 -2017.1 1.11 perkeep.org/... 22.04 3290012 -2017.1 1.11 perkeep.org/... 21.07 3353664 -2017.1 1.11 perkeep.org/... 21.07 3310224 -2017.1 1.11 perkeep.org/... 20.28 2946956 -2017.1 1.11 perkeep.org/... 20.45 2946652 -2017.1 1.11 perkeep.org/... 21.07 3080740 -2017.1 1.11 perkeep.org/... 21.75 3329972 -2017.1 1.11 perkeep.org/... 20.29 3064512 -2017.1 1.11 perkeep.org/... 21.20 2934708 - -2017.1 1.11 bazil.org/fuse/... 4.60 485504 -2017.1 1.11 bazil.org/fuse/... 4.66 448312 -2017.1 1.11 bazil.org/fuse/... 4.13 488968 -2017.1 1.11 bazil.org/fuse/... 4.41 489336 -2017.1 1.11 bazil.org/fuse/... 4.11 488660 -2017.1 1.11 bazil.org/fuse/... 4.18 489144 -2017.1 1.11 bazil.org/fuse/... 4.11 487096 -2017.1 1.11 bazil.org/fuse/... 4.15 488868 -2017.1 1.11 bazil.org/fuse/... 4.53 490336 -2017.1 1.11 bazil.org/fuse/... 4.32 441084 - -2017.2.2 1.11 std 33.44 3091424 -2017.2.2 1.11 std 33.87 3089884 -2017.2.2 1.11 std 32.23 3151584 -2017.2.2 1.11 std 33.49 3097356 -2017.2.2 1.11 std 35.71 3160340 -2017.2.2 1.11 std 35.07 3068796 -2017.2.2 1.11 std 33.19 3155960 -2017.2.2 1.11 std 32.18 3308160 -2017.2.2 1.11 std 31.02 3197124 -2017.2.2 1.11 std 31.36 3166552 - -2017.2.2 1.11 upspin.io/... 11.86 1631452 -2017.2.2 1.11 upspin.io/... 11.73 1587260 -2017.2.2 1.11 upspin.io/... 11.60 1522088 -2017.2.2 1.11 upspin.io/... 11.66 1697168 -2017.2.2 1.11 upspin.io/... 11.41 1556716 -2017.2.2 1.11 upspin.io/... 10.77 1582556 -2017.2.2 1.11 upspin.io/... 11.32 1654276 -2017.2.2 1.11 upspin.io/... 11.95 1579132 -2017.2.2 1.11 upspin.io/... 12.42 1660664 -2017.2.2 1.11 upspin.io/... 11.43 1696480 - -2017.2.2 1.11 perkeep.org/... 35.73 3490408 -2017.2.2 1.11 perkeep.org/... 35.78 3814476 -2017.2.2 1.11 perkeep.org/... 34.49 3483144 -2017.2.2 1.11 perkeep.org/... 35.68 3848560 -2017.2.2 1.11 perkeep.org/... 35.19 3922828 -2017.2.2 1.11 perkeep.org/... 35.75 3911088 -2017.2.2 1.11 perkeep.org/... 35.34 3988724 -2017.2.2 1.11 perkeep.org/... 35.18 3989232 -2017.2.2 1.11 perkeep.org/... 34.48 3694220 -2017.2.2 1.11 perkeep.org/... 35.92 3975064 - -2017.2.2 1.11 bazil.org/fuse/... 6.10 505632 -2017.2.2 1.11 bazil.org/fuse/... 5.39 493540 -2017.2.2 1.11 bazil.org/fuse/... 5.46 497192 -2017.2.2 1.11 bazil.org/fuse/... 6.10 493396 -2017.2.2 1.11 bazil.org/fuse/... 5.53 507156 -2017.2.2 1.11 bazil.org/fuse/... 6.17 493656 -2017.2.2 1.11 bazil.org/fuse/... 5.84 493256 -2017.2.2 1.11 bazil.org/fuse/... 5.44 514216 -2017.2.2 1.11 bazil.org/fuse/... 5.62 493516 -2017.2.2 1.11 bazil.org/fuse/... 5.98 493840 - - -068d161 1.11 std 32.87 3524588 -068d161 1.11 std 30.74 3282428 -068d161 1.11 std 32.15 3306216 -068d161 1.11 std 33.41 3486552 -068d161 1.11 std 30.44 3184928 -068d161 1.11 std 31.74 3242392 -068d161 1.11 std 32.62 3248008 -068d161 1.11 std 31.86 3460508 -068d161 1.11 std 32.62 3385512 -068d161 1.11 std 32.21 3454948 - -068d161 1.11 upspin.io/... 11.59 1989988 -068d161 1.11 upspin.io/... 11.39 1784532 -068d161 1.11 upspin.io/... 12.10 1928752 -068d161 1.11 upspin.io/... 11.84 1857596 -068d161 1.11 upspin.io/... 11.48 1841728 -068d161 1.11 upspin.io/... 11.19 1923852 -068d161 1.11 upspin.io/... 11.57 1934748 -068d161 1.11 upspin.io/... 11.35 1732500 -068d161 1.11 upspin.io/... 11.37 1931232 -068d161 1.11 upspin.io/... 11.40 1849344 - -068d161 1.11 perkeep.org/... 29.66 4618896 -068d161 1.11 perkeep.org/... 26.48 4382124 -068d161 1.11 perkeep.org/... 25.98 4113952 -068d161 1.11 perkeep.org/... 27.32 4588184 -068d161 1.11 perkeep.org/... 27.45 4608852 -068d161 1.11 perkeep.org/... 27.11 4580752 -068d161 1.11 perkeep.org/... 26.74 4617136 -068d161 1.11 perkeep.org/... 26.34 4267472 -068d161 1.11 perkeep.org/... 28.21 4493456 -068d161 1.11 perkeep.org/... 26.57 4253336 - -068d161 1.11 bazil.org/fuse/... 3.45 567048 -068d161 1.11 bazil.org/fuse/... 3.48 577632 -068d161 1.11 bazil.org/fuse/... 3.94 582388 -068d161 1.11 bazil.org/fuse/... 4.00 542004 -068d161 1.11 bazil.org/fuse/... 4.02 541608 -068d161 1.11 bazil.org/fuse/... 3.95 565080 -068d161 1.11 bazil.org/fuse/... 3.87 555460 -068d161 1.11 bazil.org/fuse/... 3.81 540440 -068d161 1.11 bazil.org/fuse/... 3.65 550076 -068d161 1.11 bazil.org/fuse/... 4.27 558204 - - -24f0899 1.11 std 34.54 5174064 -24f0899 1.11 std 34.52 5234892 -24f0899 1.11 std 33.90 4818920 -24f0899 1.11 std 35.14 4790684 -24f0899 1.11 std 33.49 5249772 -24f0899 1.11 std 34.65 5248956 -24f0899 1.11 std 34.41 4626268 -24f0899 1.11 std 34.35 4881636 -24f0899 1.11 std 34.55 4763492 -24f0899 1.11 std 35.13 4788356 - -24f0899 1.11 upspin.io/... 08.12 1142104 -24f0899 1.11 upspin.io/... 07.90 1098844 -24f0899 1.11 upspin.io/... 07.80 1096440 -24f0899 1.11 upspin.io/... 07.84 1065252 -24f0899 1.11 upspin.io/... 07.89 1145580 -24f0899 1.11 upspin.io/... 07.91 1090000 -24f0899 1.11 upspin.io/... 08.21 1064752 -24f0899 1.11 upspin.io/... 07.98 1085524 -24f0899 1.11 upspin.io/... 08.18 1042764 -24f0899 1.11 upspin.io/... 07.99 1082360 - -24f0899 1.11 perkeep.org/... 22.08 2483684 -24f0899 1.11 perkeep.org/... 18.02 2491092 -24f0899 1.11 perkeep.org/... 17.87 2482036 -24f0899 1.11 perkeep.org/... 17.56 2444012 -24f0899 1.11 perkeep.org/... 18.10 2444124 -24f0899 1.11 perkeep.org/... 18.33 2453940 -24f0899 1.11 perkeep.org/... 17.71 2510748 -24f0899 1.11 perkeep.org/... 17.75 2515572 -24f0899 1.11 perkeep.org/... 17.88 2475480 -24f0899 1.11 perkeep.org/... 17.70 2523404 - -24f0899 1.11 bazil.org/fuse/... 2.91 338340 -24f0899 1.11 bazil.org/fuse/... 2.51 334200 -24f0899 1.11 bazil.org/fuse/... 2.26 337872 -24f0899 1.11 bazil.org/fuse/... 2.33 346636 -24f0899 1.11 bazil.org/fuse/... 3.05 335228 -24f0899 1.11 bazil.org/fuse/... 2.66 334136 -24f0899 1.11 bazil.org/fuse/... 2.80 338552 -24f0899 1.11 bazil.org/fuse/... 2.30 349576 -24f0899 1.11 bazil.org/fuse/... 2.62 351488 -24f0899 1.11 bazil.org/fuse/... 2.48 330780 - - -b269b6e 1.11 std 40.11 4265088 -b269b6e 1.11 std 39.80 4539972 -b269b6e 1.11 std 39.52 4602568 -b269b6e 1.11 std 39.78 4518456 -b269b6e 1.11 std 39.71 4512576 -b269b6e 1.11 std 39.72 4654632 -b269b6e 1.11 std 40.52 4577604 -b269b6e 1.11 std 42.14 4190716 -b269b6e 1.11 std 39.25 4660632 -b269b6e 1.11 std 39.27 4502208 - -b269b6e 1.11 upspin.io/... 08.79 1054764 -b269b6e 1.11 upspin.io/... 08.66 975468 -b269b6e 1.11 upspin.io/... 08.68 990360 -b269b6e 1.11 upspin.io/... 08.41 976188 -b269b6e 1.11 upspin.io/... 08.59 1110452 -b269b6e 1.11 upspin.io/... 08.85 1028520 -b269b6e 1.11 upspin.io/... 08.66 1079864 -b269b6e 1.11 upspin.io/... 09.07 1025932 -b269b6e 1.11 upspin.io/... 08.53 1046612 -b269b6e 1.11 upspin.io/... 08.35 1076192 - -b269b6e 1.11 perkeep.org/... 20.00 2271160 -b269b6e 1.11 perkeep.org/... 19.57 2235624 -b269b6e 1.11 perkeep.org/... 19.31 2268772 -b269b6e 1.11 perkeep.org/... 19.25 2247516 -b269b6e 1.11 perkeep.org/... 19.33 2343708 -b269b6e 1.11 perkeep.org/... 19.30 2445352 -b269b6e 1.11 perkeep.org/... 19.32 2250472 -b269b6e 1.11 perkeep.org/... 19.31 2331324 -b269b6e 1.11 perkeep.org/... 19.96 2348392 -b269b6e 1.11 perkeep.org/... 19.44 2411592 - -b269b6e 1.11 bazil.org/fuse/... 2.47 336080 -b269b6e 1.11 bazil.org/fuse/... 2.54 330488 -b269b6e 1.11 bazil.org/fuse/... 2.72 328648 -b269b6e 1.11 bazil.org/fuse/... 2.53 329348 -b269b6e 1.11 bazil.org/fuse/... 2.86 334376 -b269b6e 1.11 bazil.org/fuse/... 2.58 335844 -b269b6e 1.11 bazil.org/fuse/... 2.68 325652 -b269b6e 1.11 bazil.org/fuse/... 2.64 334520 -b269b6e 1.11 bazil.org/fuse/... 2.27 327360 -b269b6e 1.11 bazil.org/fuse/... 2.72 348820 diff --git a/_benchmarks/bench.sh b/_benchmarks/bench.sh new file mode 100755 index 000000000..4471a7dd6 --- /dev/null +++ b/_benchmarks/bench.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env sh +set -e +# PKG="k8s.io/kubernetes/pkg/..." +# LABEL="k8s" +PKG="std" +LABEL=$PKG +MIN_CORES=16 +MAX_CORES=16 +SAMPLES=5 +WIPE_CACHE=1 +BIN=$(realpath ./silent-staticcheck.sh) + +go build ../cmd/staticcheck + +export GO111MODULE=off + +for cores in $(seq $MIN_CORES $MAX_CORES); do + for i in $(seq 1 $SAMPLES); do + procs=$((cores*2)) + if [ $WIPE_CACHE -ne 0 ]; then + rm -rf ~/.cache/staticcheck + fi + + out=$(env time -f "%e %M" taskset -c 0-$((procs-1)) $BIN $PKG 2>&1) + t=$(echo "$out" | cut -f1 -d" ") + m=$(echo "$out" | cut -f2 -d" ") + ns=$(printf "%s 1000000000 * p" $t | dc) + b=$((m * 1024)) + printf "BenchmarkStaticcheck-%s-%d 1 %.0f ns/op %.0f B/op\n" "$LABEL" "$procs" "$ns" "$b" + done +done diff --git a/_benchmarks/benchmark.go b/_benchmarks/benchmark.go deleted file mode 100644 index 0a6b20608..000000000 --- a/_benchmarks/benchmark.go +++ /dev/null @@ -1,76 +0,0 @@ -// +build ignore - -package main - -import ( - "bytes" - "flag" - "fmt" - "os/exec" - "strings" -) - -const theBigMerge = "0a9027c2bab9ca0d25a5db0f906fd1793774fd67" -const N = 10 - -func isAfterMerge(sha string) bool { - cmd := exec.Command("git", "merge-base", "--is-ancestor", theBigMerge, sha) - err := cmd.Run() - if err == nil { - return true - } - _ = err.(*exec.ExitError) - return false -} - -func checkout(sha string) { - cmd := exec.Command("git", "checkout", "-q", sha) - err := cmd.Run() - if err != nil { - panic(err) - } -} - -func build(tool string) { - err := exec.Command("go", "build", "-o", "/tmp/"+tool, "honnef.co/go/tools/cmd/"+tool).Run() - if err != nil { - panic(err) - } -} - -func run(tool, target string) (time, mem string) { - cmd := exec.Command("/usr/bin/time", "-f", "%e %M", "/tmp/"+tool, target) - out, _ := cmd.CombinedOutput() - lines := bytes.Split(out, []byte("\n")) - res := string(lines[len(lines)-2]) - fields := strings.Split(res, " ") - return fields[0], fields[1] -} - -func main() { - var ( - shas string - targets string - version string - ) - flag.StringVar(&shas, "shas", "HEAD", "") - flag.StringVar(&targets, "targets", "std", "") - flag.StringVar(&version, "version", "unknown", "") - flag.Parse() - - for _, sha := range strings.Split(shas, ",") { - tool := "megacheck" - if isAfterMerge(sha) { - tool = "staticcheck" - } - checkout(sha) - build(tool) - - for _, target := range strings.Split(targets, ",") { - for i := 0; i < N; i++ { - time, mem := run(tool, target) - fmt.Printf("%s %s %s %s %s\n", sha, version, target, time, mem) - } - } - } -} diff --git a/_benchmarks/benchmark.r b/_benchmarks/benchmark.r deleted file mode 100644 index e94310d91..000000000 --- a/_benchmarks/benchmark.r +++ /dev/null @@ -1,18 +0,0 @@ -library("ggplot2") -library("sitools") - -kb <- function(x) { f2si(x * 1000) } - -data <- read.table("bench", header=TRUE, sep=" ", colClasses=c("character", "character", "character", "numeric", "numeric")) - -ggplot(data = data, aes(x = Version, y = Time)) + - geom_point() + - scale_x_discrete(limits=data$Version) + - facet_wrap(~ Target) - -ggplot(data = data, aes(x = Version, y = Memory)) + - geom_point() + - scale_x_discrete(limits=data$Version) + - scale_y_continuous(labels = kb) + - facet_wrap(~ Target) - diff --git a/_benchmarks/silent-staticcheck.sh b/_benchmarks/silent-staticcheck.sh new file mode 100755 index 000000000..d2818e124 --- /dev/null +++ b/_benchmarks/silent-staticcheck.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env sh +/home/dominikh/prj/src/honnef.co/go/tools/cmd/staticcheck/staticcheck -checks "all" -fail "" $1 &>/dev/null +exit 0 From ef734efc8743217e4c0590e2edb48b45d4fcb603 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 May 2020 03:45:16 +0200 Subject: [PATCH 048/111] Add 2020.1.4 release notes --- doc/2020.1.html | 28 ++++++++++++++++++++++++++++ doc/staticcheck.html | 4 ++-- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/doc/2020.1.html b/doc/2020.1.html index 14f8df550..b908002ad 100644 --- a/doc/2020.1.html +++ b/doc/2020.1.html @@ -10,6 +10,7 @@

  • Staticcheck 2020.1.1 release notes
  • Staticcheck 2020.1.2 release notes
  • Staticcheck 2020.1.3 release notes
  • +
  • Staticcheck 2020.1.4 release notes
  • Introduction to Staticcheck 2020.1

    @@ -193,3 +194,30 @@

    Staticcheck 2020.1.3 release notes

    + +

    Staticcheck 2020.1.4 release notes

    + +

    + This release adds special handling for imports of the + deprecated github.com/golang/protobuf/proto package. +

    + +

    + github.com/golang/protobuf + has deprecated the proto package, but + their protoc-gen-go still imports the package and uses + one of its constants, to enforce a weak dependency on a + sufficiently new version of the legacy package. +

    + +

    + Staticcheck would flag the import of this deprecated package in all + code generated by protoc-gen-go. Instead of forcing the project to + change their project structure, we choose to ignore such imports in + code generated by protoc-gen-go. The import still gets flagged in code + not generated by protoc-gen-go. +

    + +

    + You can find more information about this in the upstream issue. +

    diff --git a/doc/staticcheck.html b/doc/staticcheck.html index c0ed953f9..22cfdc7e3 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -22,12 +22,12 @@

    Installation

    If you use Go modules, you can simply run go get honnef.co/go/tools/cmd/staticcheck to obtain the latest released version. If you're still using a GOPATH-based workflow, then the above command will instead fetch the master branch. - It is suggested that you explicitly check out the latest release branch instead, which is currently 2020.1.3. + It is suggested that you explicitly check out the latest release branch instead, which is currently 2020.1.4. One way of doing so would be as follows:

    cd $GOPATH/src/honnef.co/go/tools/cmd/staticcheck
    -git checkout 2020.1.3
    +git checkout 2020.1.4
     go get
     go install
     
    From 73c30d736020abd7fc1f69ff9b08d2fdbee86550 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 May 2020 07:07:09 +0200 Subject: [PATCH 049/111] S1029, SA6003: also check types with underlying type string Closes gh-743 --- internal/sharedcheck/lint.go | 2 +- .../src/CheckRangeStringRunes/LintRangeStringRunes.go | 8 +++++++- .../src/CheckRangeStringRunes/CheckRangeStringRunes.go | 8 +++++++- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/internal/sharedcheck/lint.go b/internal/sharedcheck/lint.go index e9abf0d89..d1433c4f1 100644 --- a/internal/sharedcheck/lint.go +++ b/internal/sharedcheck/lint.go @@ -26,7 +26,7 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { if val == nil { return true } - Tsrc, ok := val.X.Type().(*types.Basic) + Tsrc, ok := val.X.Type().Underlying().(*types.Basic) if !ok || Tsrc.Kind() != types.String { return true } diff --git a/simple/testdata/src/CheckRangeStringRunes/LintRangeStringRunes.go b/simple/testdata/src/CheckRangeStringRunes/LintRangeStringRunes.go index b6761a187..7facfc5c9 100644 --- a/simple/testdata/src/CheckRangeStringRunes/LintRangeStringRunes.go +++ b/simple/testdata/src/CheckRangeStringRunes/LintRangeStringRunes.go @@ -1,6 +1,8 @@ package pkg -func fn(s string) { +type String string + +func fn(s string, s2 String) { for _, r := range s { println(r) } @@ -24,4 +26,8 @@ func fn(s string) { println(r) } println(y[0]) + + for _, r := range []rune(s2) { // want `should range over string` + println(r) + } } diff --git a/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go b/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go index b6761a187..7facfc5c9 100644 --- a/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go +++ b/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go @@ -1,6 +1,8 @@ package pkg -func fn(s string) { +type String string + +func fn(s string, s2 String) { for _, r := range s { println(r) } @@ -24,4 +26,8 @@ func fn(s string) { println(r) } println(y[0]) + + for _, r := range []rune(s2) { // want `should range over string` + println(r) + } } From 87cefbaa37b829d7c336e580e22527d97604c941 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 16 May 2020 00:16:17 +0200 Subject: [PATCH 050/111] S1030: don't flag m[string(buf.Bytes()] The compiler optimizes []byte to string conversions in map lookups, making them much cheaper than the actual conversion. This optimization does not, however, apply to buf.String(). --- code/code.go | 9 +++++++++ simple/doc.go | 8 ++++++-- simple/lint.go | 11 +++++++++-- .../LintBytesBufferConversions.go | 3 +++ .../LintBytesBufferConversions.go.golden | 3 +++ 5 files changed, 30 insertions(+), 4 deletions(-) diff --git a/code/code.go b/code/code.go index 73aebea60..711b6d097 100644 --- a/code/code.go +++ b/code/code.go @@ -532,3 +532,12 @@ func IsGoVersion(pass *analysis.Pass, minor int) bool { func Preorder(pass *analysis.Pass, fn func(ast.Node), types ...ast.Node) { pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(types, fn) } + +func PreorderStack(pass *analysis.Pass, fn func(ast.Node, []ast.Node), types ...ast.Node) { + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).WithStack(types, func(n ast.Node, push bool, stack []ast.Node) (proceed bool) { + if push { + fn(n, stack) + } + return true + }) +} diff --git a/simple/doc.go b/simple/doc.go index 27297bf61..de0edc5bf 100644 --- a/simple/doc.go +++ b/simple/doc.go @@ -368,9 +368,13 @@ After: "S1030": { Title: `Use bytes.Buffer.String or bytes.Buffer.Bytes`, - Text: `bytes.Buffer has both a String and a Bytes method. It is never + Text: `bytes.Buffer has both a String and a Bytes method. It is almost never necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply -use the other method.`, +use the other method. + +The only exception to this are map lookups. Due to a compiler optimization, +m[string(buf.Bytes())] is more efficient than m[buf.String()]. +`, Since: "2017.1", }, diff --git a/simple/lint.go b/simple/lint.go index 5a9571deb..585c25c6e 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -160,7 +160,7 @@ func CheckBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { // The bytes package can use itself however it wants return nil, nil } - fn := func(node ast.Node) { + fn := func(node ast.Node, stack []ast.Node) { m, ok := Match(pass, checkBytesBufferConversionsQ, node) if !ok { return @@ -170,6 +170,13 @@ func CheckBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { typ := pass.TypesInfo.TypeOf(call.Fun) if typ == types.Universe.Lookup("string").Type() && code.IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).Bytes") { + if _, ok := stack[len(stack)-2].(*ast.IndexExpr); ok { + // Don't flag m[string(buf.Bytes())] – thanks to a + // compiler optimization, this is actually faster than + // m[buf.String()] + return + } + report.Report(pass, call, fmt.Sprintf("should use %v.String() instead of %v", report.Render(pass, sel.X), report.Render(pass, call)), report.FilterGenerated(), report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass, checkBytesBufferConversionsRs, m.State, node)))) @@ -180,7 +187,7 @@ func CheckBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { } } - code.Preorder(pass, fn, (*ast.CallExpr)(nil)) + code.PreorderStack(pass, fn, (*ast.CallExpr)(nil)) return nil, nil } diff --git a/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go b/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go index de5d1b7da..c79e40b93 100644 --- a/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go +++ b/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go @@ -13,6 +13,9 @@ func fn() { _ = string(m["key"].Bytes()) // want `should use m\["key"\]\.String\(\) instead of string\(m\["key"\]\.Bytes\(\)\)` _ = []byte(m["key"].String()) // want `should use m\["key"\]\.Bytes\(\) instead of \[\]byte\(m\["key"\]\.String\(\)\)` + var m2 map[string]int + _ = m2[string(buf.Bytes())] // no warning, this is more efficient than buf.String() + string := func(_ interface{}) interface{} { return nil } diff --git a/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go.golden b/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go.golden index 7c2be7db5..6c29a4d12 100644 --- a/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go.golden +++ b/simple/testdata/src/CheckBytesBufferConversions/LintBytesBufferConversions.go.golden @@ -13,6 +13,9 @@ func fn() { _ = m["key"].String() // want `should use m\["key"\]\.String\(\) instead of string\(m\["key"\]\.Bytes\(\)\)` _ = m["key"].Bytes() // want `should use m\["key"\]\.Bytes\(\) instead of \[\]byte\(m\["key"\]\.String\(\)\)` + var m2 map[string]int + _ = m2[string(buf.Bytes())] // no warning, this is more efficient than buf.String() + string := func(_ interface{}) interface{} { return nil } From aa8d49aa598f91880415ab96667ee61beca81f25 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 16 May 2020 01:41:29 +0200 Subject: [PATCH 051/111] simple: use PreorderStack where appropriate --- simple/lint.go | 22 ++++++---------------- staticcheck/lint.go | 1 + 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/simple/lint.go b/simple/lint.go index 585c25c6e..9d1455c87 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -799,24 +799,13 @@ func CheckUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { } func CheckSimplerStructConversion(pass *analysis.Pass) (interface{}, error) { - var skip ast.Node - fn := func(node ast.Node) { - // Do not suggest type conversion between pointers - if unary, ok := node.(*ast.UnaryExpr); ok && unary.Op == token.AND { - if lit, ok := unary.X.(*ast.CompositeLit); ok { - skip = lit - } - return - } - - if node == skip { + fn := func(node ast.Node, stack []ast.Node) { + if unary, ok := stack[len(stack)-2].(*ast.UnaryExpr); ok && unary.Op == token.AND { + // Do not suggest type conversion between pointers return } - lit, ok := node.(*ast.CompositeLit) - if !ok { - return - } + lit := node.(*ast.CompositeLit) typ1, _ := pass.TypesInfo.TypeOf(lit.Type).(*types.Named) if typ1 == nil { return @@ -924,7 +913,7 @@ func CheckSimplerStructConversion(pass *analysis.Pass) (interface{}, error) { report.FilterGenerated(), report.Fixes(edit.Fix("use type conversion", edit.ReplaceWithNode(pass.Fset, node, r)))) } - code.Preorder(pass, fn, (*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)) + code.PreorderStack(pass, fn, (*ast.CompositeLit)(nil)) return nil, nil } @@ -1257,6 +1246,7 @@ func CheckAssertNotNil(pass *analysis.Pass) (interface{}, error) { report.ShortRange(), report.FilterGenerated()) } + // OPT(dh): merge fn1 and fn2 code.Preorder(pass, fn1, (*ast.IfStmt)(nil)) code.Preorder(pass, fn2, (*ast.IfStmt)(nil)) return nil, nil diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 54202d9b6..fc9863708 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -900,6 +900,7 @@ func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { func CheckTemplate(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) + // OPT(dh): use integer for kind var kind string switch code.CallNameAST(pass, call) { case "(*text/template.Template).Parse": From 9a1fe455724d08fc49829ac1e655fb2a0299d206 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 16 May 2020 02:01:28 +0200 Subject: [PATCH 052/111] Clean up project structure MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - merge the deprecated and arg packages into the knowledge package - merge the functions package into analysis/code - merge the lint/lintdsl package into analysis/lint, analysis/code and analysis/edit - move the callgraph package to go/callgraph - move the code package to analysis/code - move the edit package to analysis/edit - move the facts package to analysis/facts - move the gcsizes package to go/gcsizes - move the ir package to go/ir - move the loader package to go/loader - move the report package to analysis/report - move the runner package to lintcmd/runner - move the version package to lintcmd/version - remove "// import ..." comments – modules solve this in a cleaner manner - reorganize the lint and lint/lintutil packages into the analysis/lint and lintcmd packages - move code between analysis/code, analysis/facts and analysis/lint --- {code => analysis/code}/code.go | 22 +- {functions => analysis/code}/loops.go | 4 +- analysis/code/stub.go | 10 + {functions => analysis/code}/terminates.go | 4 +- .../lintdsl.go => analysis/code/visit.go | 39 +- {edit => analysis/edit}/edit.go | 7 + {facts => analysis/facts}/deprecated.go | 0 analysis/facts/directives.go | 20 + {facts => analysis/facts}/facts_test.go | 0 {facts => analysis/facts}/generated.go | 0 {facts => analysis/facts}/purity.go | 35 +- .../testdata/src/Deprecated/Deprecated.go | 0 .../testdata/src/Purity/CheckPureFunctions.go | 0 {facts => analysis/facts}/token.go | 0 analysis/lint/lint.go | 158 ++++ {report => analysis/report}/report.go | 3 +- cmd/keyify/keyify.go | 2 +- cmd/rdeps/rdeps.go | 2 +- cmd/staticcheck/staticcheck.go | 8 +- cmd/structlayout-optimize/main.go | 2 +- cmd/structlayout-pretty/main.go | 2 +- cmd/structlayout/main.go | 4 +- facts/directives.go | 107 --- functions/stub.go | 32 - {callgraph => go/callgraph}/callgraph.go | 4 +- {callgraph => go/callgraph}/cha/cha.go | 9 +- {callgraph => go/callgraph}/cha/cha_test.go | 7 +- .../callgraph}/cha/testdata/func.go | 0 .../callgraph}/cha/testdata/iface.go | 0 .../callgraph}/cha/testdata/recv.go | 0 {callgraph => go/callgraph}/rta/rta.go | 7 +- {callgraph => go/callgraph}/rta/rta_test.go | 9 +- .../callgraph}/rta/testdata/func.go | 0 .../callgraph}/rta/testdata/iface.go | 0 .../callgraph}/rta/testdata/rtype.go | 0 {callgraph => go/callgraph}/static/static.go | 8 +- .../callgraph}/static/static_test.go | 7 +- {callgraph => go/callgraph}/util.go | 2 +- {gcsizes => go/gcsizes}/LICENSE | 0 {gcsizes => go/gcsizes}/sizes.go | 2 +- {ir => go/ir}/LICENSE | 0 {ir => go/ir}/blockopt.go | 0 {ir => go/ir}/builder.go | 0 {ir => go/ir}/builder_test.go | 5 +- {ir => go/ir}/const.go | 0 {ir => go/ir}/create.go | 0 {ir => go/ir}/doc.go | 2 +- {ir => go/ir}/dom.go | 0 {ir => go/ir}/emit.go | 0 {ir => go/ir}/example_test.go | 5 +- {ir => go/ir}/exits.go | 0 {ir => go/ir}/func.go | 0 {ir => go/ir}/html.go | 0 {ir => go/ir}/identical.go | 0 {ir => go/ir}/identical_17.go | 0 {ir => go/ir}/identical_test.go | 0 {ir => go/ir}/irutil/load.go | 3 +- {ir => go/ir}/irutil/load_test.go | 3 +- {ir => go/ir}/irutil/switch.go | 2 +- {ir => go/ir}/irutil/switch_test.go | 3 +- {ir => go/ir}/irutil/testdata/switches.go | 0 {ir => go/ir}/irutil/util.go | 2 +- {ir => go/ir}/irutil/visit.go | 4 +- {ir => go/ir}/lift.go | 0 {ir => go/ir}/lvalue.go | 0 {ir => go/ir}/methods.go | 0 {ir => go/ir}/mode.go | 0 {ir => go/ir}/print.go | 0 {ir => go/ir}/sanity.go | 0 {ir => go/ir}/source.go | 0 {ir => go/ir}/source_test.go | 5 +- {ir => go/ir}/ssa.go | 0 {ir => go/ir}/staticcheck.conf | 0 {ir => go/ir}/stdlib_test.go | 5 +- {ir => go/ir}/testdata/objlookup.go | 0 {ir => go/ir}/testdata/structconv.go | 0 {ir => go/ir}/testdata/valueforexpr.go | 0 {ir => go/ir}/util.go | 0 {ir => go/ir}/wrappers.go | 0 {ir => go/ir}/write.go | 0 {loader => go/loader}/buildid.go | 0 {loader => go/loader}/hash.go | 0 {loader => go/loader}/loader.go | 0 {loader => go/loader}/note.go | 0 internal/cmd/irdump/main.go | 5 +- internal/passes/buildir/buildir.go | 3 +- internal/sharedcheck/lint.go | 12 +- {arg => knowledge}/arg.go | 2 +- .../stdlib.go => knowledge/deprecated.go | 4 +- lint/LICENSE | 28 - lint/lint.go | 476 ----------- lint/lintutil/util.go | 443 ---------- lintcmd/cmd.go | 796 ++++++++++++++++++ .../util_test.go => lintcmd/cmd_test.go | 4 +- {lint => lintcmd}/directives.go | 13 +- {lint/lintutil/format => lintcmd}/format.go | 47 +- {lint => lintcmd}/lint_test.go | 15 +- {runner => lintcmd/runner}/runner.go | 36 +- {runner => lintcmd/runner}/stats.go | 3 +- {lint/lintutil => lintcmd}/stats.go | 2 +- {lint/lintutil => lintcmd}/stats_bsd.go | 2 +- {lint/lintutil => lintcmd}/stats_posix.go | 2 +- .../testdata/src/Test/file-ignores.go | 0 .../testdata/src/Test/line-ignores.go | 0 .../testdata/src/broken_dep/pkg.go | 0 .../testdata/src/broken_parse/pkg.go | 0 .../testdata/src/broken_pkgerror/broken.go | 0 .../testdata/src/broken_typeerror/pkg.go | 0 {version => lintcmd/version}/buildinfo.go | 0 {version => lintcmd/version}/buildinfo111.go | 0 {version => lintcmd/version}/version.go | 0 pattern/match.go | 8 +- simple/analysis.go | 6 +- simple/doc.go | 2 +- simple/lint.go | 80 +- staticcheck.conf | 1 - staticcheck/analysis.go | 6 +- staticcheck/buildtag.go | 2 +- staticcheck/doc.go | 2 +- staticcheck/lint.go | 126 +-- staticcheck/rules.go | 5 +- stylecheck/analysis.go | 6 +- stylecheck/doc.go | 2 +- stylecheck/lint.go | 20 +- stylecheck/names.go | 7 +- unused/unused.go | 16 +- 126 files changed, 1338 insertions(+), 1399 deletions(-) rename {code => analysis/code}/code.go (95%) rename {functions => analysis/code}/loops.go (95%) create mode 100644 analysis/code/stub.go rename {functions => analysis/code}/terminates.go (97%) rename lint/lintdsl/lintdsl.go => analysis/code/visit.go (59%) rename {edit => analysis/edit}/edit.go (91%) rename {facts => analysis/facts}/deprecated.go (100%) create mode 100644 analysis/facts/directives.go rename {facts => analysis/facts}/facts_test.go (100%) rename {facts => analysis/facts}/generated.go (100%) rename {facts => analysis/facts}/purity.go (83%) rename {facts => analysis/facts}/testdata/src/Deprecated/Deprecated.go (100%) rename {facts => analysis/facts}/testdata/src/Purity/CheckPureFunctions.go (100%) rename {facts => analysis/facts}/token.go (100%) create mode 100644 analysis/lint/lint.go rename {report => analysis/report}/report.go (99%) delete mode 100644 facts/directives.go delete mode 100644 functions/stub.go rename {callgraph => go/callgraph}/callgraph.go (97%) rename {callgraph => go/callgraph}/cha/cha.go (96%) rename {callgraph => go/callgraph}/cha/cha_test.go (95%) rename {callgraph => go/callgraph}/cha/testdata/func.go (100%) rename {callgraph => go/callgraph}/cha/testdata/iface.go (100%) rename {callgraph => go/callgraph}/cha/testdata/recv.go (100%) rename {callgraph => go/callgraph}/rta/rta.go (99%) rename {callgraph => go/callgraph}/rta/rta_test.go (95%) rename {callgraph => go/callgraph}/rta/testdata/func.go (100%) rename {callgraph => go/callgraph}/rta/testdata/iface.go (100%) rename {callgraph => go/callgraph}/rta/testdata/rtype.go (100%) rename {callgraph => go/callgraph}/static/static.go (85%) rename {callgraph => go/callgraph}/static/static_test.go (92%) rename {callgraph => go/callgraph}/util.go (99%) rename {gcsizes => go/gcsizes}/LICENSE (100%) rename {gcsizes => go/gcsizes}/sizes.go (98%) rename {ir => go/ir}/LICENSE (100%) rename {ir => go/ir}/blockopt.go (100%) rename {ir => go/ir}/builder.go (100%) rename {ir => go/ir}/builder_test.go (99%) rename {ir => go/ir}/const.go (100%) rename {ir => go/ir}/create.go (100%) rename {ir => go/ir}/doc.go (99%) rename {ir => go/ir}/dom.go (100%) rename {ir => go/ir}/emit.go (100%) rename {ir => go/ir}/example_test.go (98%) rename {ir => go/ir}/exits.go (100%) rename {ir => go/ir}/func.go (100%) rename {ir => go/ir}/html.go (100%) rename {ir => go/ir}/identical.go (100%) rename {ir => go/ir}/identical_17.go (100%) rename {ir => go/ir}/identical_test.go (100%) rename {ir => go/ir}/irutil/load.go (99%) rename {ir => go/ir}/irutil/load_test.go (98%) rename {ir => go/ir}/irutil/switch.go (99%) rename {ir => go/ir}/irutil/switch_test.go (98%) rename {ir => go/ir}/irutil/testdata/switches.go (100%) rename {ir => go/ir}/irutil/util.go (97%) rename {ir => go/ir}/irutil/visit.go (95%) rename {ir => go/ir}/lift.go (100%) rename {ir => go/ir}/lvalue.go (100%) rename {ir => go/ir}/methods.go (100%) rename {ir => go/ir}/mode.go (100%) rename {ir => go/ir}/print.go (100%) rename {ir => go/ir}/sanity.go (100%) rename {ir => go/ir}/source.go (100%) rename {ir => go/ir}/source_test.go (99%) rename {ir => go/ir}/ssa.go (100%) rename {ir => go/ir}/staticcheck.conf (100%) rename {ir => go/ir}/stdlib_test.go (98%) rename {ir => go/ir}/testdata/objlookup.go (100%) rename {ir => go/ir}/testdata/structconv.go (100%) rename {ir => go/ir}/testdata/valueforexpr.go (100%) rename {ir => go/ir}/util.go (100%) rename {ir => go/ir}/wrappers.go (100%) rename {ir => go/ir}/write.go (100%) rename {loader => go/loader}/buildid.go (100%) rename {loader => go/loader}/hash.go (100%) rename {loader => go/loader}/loader.go (100%) rename {loader => go/loader}/note.go (100%) rename {arg => knowledge}/arg.go (98%) rename deprecated/stdlib.go => knowledge/deprecated.go (98%) delete mode 100644 lint/LICENSE delete mode 100644 lint/lint.go delete mode 100644 lint/lintutil/util.go create mode 100644 lintcmd/cmd.go rename lint/lintutil/util_test.go => lintcmd/cmd_test.go (94%) rename {lint => lintcmd}/directives.go (80%) rename {lint/lintutil/format => lintcmd}/format.go (77%) rename {lint => lintcmd}/lint_test.go (93%) rename {runner => lintcmd/runner}/runner.go (97%) rename {runner => lintcmd/runner}/stats.go (98%) rename {lint/lintutil => lintcmd}/stats.go (88%) rename {lint/lintutil => lintcmd}/stats_bsd.go (88%) rename {lint/lintutil => lintcmd}/stats_posix.go (87%) rename {lint => lintcmd}/testdata/src/Test/file-ignores.go (100%) rename {lint => lintcmd}/testdata/src/Test/line-ignores.go (100%) rename {lint => lintcmd}/testdata/src/broken_dep/pkg.go (100%) rename {lint => lintcmd}/testdata/src/broken_parse/pkg.go (100%) rename {lint => lintcmd}/testdata/src/broken_pkgerror/broken.go (100%) rename {lint => lintcmd}/testdata/src/broken_typeerror/pkg.go (100%) rename {version => lintcmd/version}/buildinfo.go (100%) rename {version => lintcmd/version}/buildinfo111.go (100%) rename {version => lintcmd/version}/version.go (100%) delete mode 100644 staticcheck.conf diff --git a/code/code.go b/analysis/code/code.go similarity index 95% rename from code/code.go rename to analysis/code/code.go index 711b6d097..515310b2a 100644 --- a/code/code.go +++ b/analysis/code/code.go @@ -12,13 +12,12 @@ import ( "strings" "sync" + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/types/typeutil" + "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/ast/inspector" - "honnef.co/go/tools/facts" - "honnef.co/go/tools/go/types/typeutil" - "honnef.co/go/tools/ir" ) var bufferPool = &sync.Pool{ @@ -528,16 +527,3 @@ func IsGoVersion(pass *analysis.Pass, minor int) bool { version := f.Get().(int) return version >= minor } - -func Preorder(pass *analysis.Pass, fn func(ast.Node), types ...ast.Node) { - pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(types, fn) -} - -func PreorderStack(pass *analysis.Pass, fn func(ast.Node, []ast.Node), types ...ast.Node) { - pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).WithStack(types, func(n ast.Node, push bool, stack []ast.Node) (proceed bool) { - if push { - fn(n, stack) - } - return true - }) -} diff --git a/functions/loops.go b/analysis/code/loops.go similarity index 95% rename from functions/loops.go rename to analysis/code/loops.go index a8af70100..e2f263a84 100644 --- a/functions/loops.go +++ b/analysis/code/loops.go @@ -1,6 +1,6 @@ -package functions +package code -import "honnef.co/go/tools/ir" +import "honnef.co/go/tools/go/ir" type Loop struct{ *ir.BlockSet } diff --git a/analysis/code/stub.go b/analysis/code/stub.go new file mode 100644 index 000000000..284827409 --- /dev/null +++ b/analysis/code/stub.go @@ -0,0 +1,10 @@ +package code + +import ( + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/go/ir" +) + +func IsStub(fn *ir.Function) bool { + return facts.IsStub(fn) +} diff --git a/functions/terminates.go b/analysis/code/terminates.go similarity index 97% rename from functions/terminates.go rename to analysis/code/terminates.go index c4984673f..39d93129e 100644 --- a/functions/terminates.go +++ b/analysis/code/terminates.go @@ -1,9 +1,9 @@ -package functions +package code import ( "go/types" - "honnef.co/go/tools/ir" + "honnef.co/go/tools/go/ir" ) // Terminates reports whether fn is supposed to return, that is if it diff --git a/lint/lintdsl/lintdsl.go b/analysis/code/visit.go similarity index 59% rename from lint/lintdsl/lintdsl.go rename to analysis/code/visit.go index 4408aff25..f8bf2d169 100644 --- a/lint/lintdsl/lintdsl.go +++ b/analysis/code/visit.go @@ -1,22 +1,28 @@ -// Package lintdsl provides helpers for implementing static analysis -// checks. Dot-importing this package is encouraged. -package lintdsl +package code import ( "bytes" - "fmt" "go/ast" "go/format" - "golang.org/x/tools/go/analysis" "honnef.co/go/tools/pattern" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" ) -func Inspect(node ast.Node, fn func(node ast.Node) bool) { - if node == nil { - return - } - ast.Inspect(node, fn) +func Preorder(pass *analysis.Pass, fn func(ast.Node), types ...ast.Node) { + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(types, fn) +} + +func PreorderStack(pass *analysis.Pass, fn func(ast.Node, []ast.Node), types ...ast.Node) { + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).WithStack(types, func(n ast.Node, push bool, stack []ast.Node) (proceed bool) { + if push { + fn(n, stack) + } + return true + }) } func Match(pass *analysis.Pass, q pattern.Pattern, node ast.Node) (*pattern.Matcher, bool) { @@ -43,16 +49,3 @@ func MatchAndEdit(pass *analysis.Pass, before, after pattern.Pattern, node ast.N }} return m, edit, true } - -func Selector(x, sel string) *ast.SelectorExpr { - return &ast.SelectorExpr{ - X: &ast.Ident{Name: x}, - Sel: &ast.Ident{Name: sel}, - } -} - -// ExhaustiveTypeSwitch panics when called. It can be used to ensure -// that type switches are exhaustive. -func ExhaustiveTypeSwitch(v interface{}) { - panic(fmt.Sprintf("internal error: unhandled case %T", v)) -} diff --git a/edit/edit.go b/analysis/edit/edit.go similarity index 91% rename from edit/edit.go rename to analysis/edit/edit.go index f4cfba234..90bc5f8cc 100644 --- a/edit/edit.go +++ b/analysis/edit/edit.go @@ -65,3 +65,10 @@ func Fix(msg string, edits ...analysis.TextEdit) analysis.SuggestedFix { TextEdits: edits, } } + +func Selector(x, sel string) *ast.SelectorExpr { + return &ast.SelectorExpr{ + X: &ast.Ident{Name: x}, + Sel: &ast.Ident{Name: sel}, + } +} diff --git a/facts/deprecated.go b/analysis/facts/deprecated.go similarity index 100% rename from facts/deprecated.go rename to analysis/facts/deprecated.go diff --git a/analysis/facts/directives.go b/analysis/facts/directives.go new file mode 100644 index 000000000..800fce2e0 --- /dev/null +++ b/analysis/facts/directives.go @@ -0,0 +1,20 @@ +package facts + +import ( + "reflect" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/analysis/lint" +) + +func directives(pass *analysis.Pass) (interface{}, error) { + return lint.ParseDirectives(pass.Files, pass.Fset), nil +} + +var Directives = &analysis.Analyzer{ + Name: "directives", + Doc: "extracts linter directives", + Run: directives, + RunDespiteErrors: true, + ResultType: reflect.TypeOf([]lint.Directive{}), +} diff --git a/facts/facts_test.go b/analysis/facts/facts_test.go similarity index 100% rename from facts/facts_test.go rename to analysis/facts/facts_test.go diff --git a/facts/generated.go b/analysis/facts/generated.go similarity index 100% rename from facts/generated.go rename to analysis/facts/generated.go diff --git a/facts/purity.go b/analysis/facts/purity.go similarity index 83% rename from facts/purity.go rename to analysis/facts/purity.go index 099ee23e3..d708c841c 100644 --- a/facts/purity.go +++ b/analysis/facts/purity.go @@ -4,10 +4,10 @@ import ( "go/types" "reflect" - "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/functions" + "honnef.co/go/tools/go/ir" "honnef.co/go/tools/internal/passes/buildir" - "honnef.co/go/tools/ir" + + "golang.org/x/tools/go/analysis" ) type IsPure struct{} @@ -54,6 +54,33 @@ var pureStdlib = map[string]struct{}{ "(*net/http.Request).WithContext": {}, } +// IsStub reports whether a function is a stub. A function is +// considered a stub if it has no instructions or if all it does is +// return a constant value. +func IsStub(fn *ir.Function) bool { + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + switch instr.(type) { + case *ir.Const: + // const naturally has no side-effects + case *ir.Panic: + // panic is a stub if it only uses constants + case *ir.Return: + // return is a stub if it only uses constants + case *ir.DebugRef: + case *ir.Jump: + // if there are no disallowed instructions, then we're + // only jumping to the exit block (or possibly + // somewhere else that's stubby?) + default: + // all other instructions are assumed to do actual work + return false + } + } + } + return true +} + func purity(pass *analysis.Pass) (interface{}, error) { seen := map[*ir.Function]struct{}{} irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg @@ -83,7 +110,7 @@ func purity(pass *analysis.Pass) (interface{}, error) { } }() - if functions.IsStub(fn) { + if IsStub(fn) { return false } diff --git a/facts/testdata/src/Deprecated/Deprecated.go b/analysis/facts/testdata/src/Deprecated/Deprecated.go similarity index 100% rename from facts/testdata/src/Deprecated/Deprecated.go rename to analysis/facts/testdata/src/Deprecated/Deprecated.go diff --git a/facts/testdata/src/Purity/CheckPureFunctions.go b/analysis/facts/testdata/src/Purity/CheckPureFunctions.go similarity index 100% rename from facts/testdata/src/Purity/CheckPureFunctions.go rename to analysis/facts/testdata/src/Purity/CheckPureFunctions.go diff --git a/facts/token.go b/analysis/facts/token.go similarity index 100% rename from facts/token.go rename to analysis/facts/token.go diff --git a/analysis/lint/lint.go b/analysis/lint/lint.go new file mode 100644 index 000000000..fc256d747 --- /dev/null +++ b/analysis/lint/lint.go @@ -0,0 +1,158 @@ +// Package lint provides abstractions on top of go/analysis. +package lint + +import ( + "errors" + "flag" + "fmt" + "go/ast" + "go/build" + "go/token" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" +) + +type Documentation struct { + Title string + Text string + Since string + NonDefault bool + Options []string +} + +func (doc *Documentation) String() string { + b := &strings.Builder{} + fmt.Fprintf(b, "%s\n\n", doc.Title) + if doc.Text != "" { + fmt.Fprintf(b, "%s\n\n", doc.Text) + } + fmt.Fprint(b, "Available since\n ") + if doc.Since == "" { + fmt.Fprint(b, "unreleased") + } else { + fmt.Fprintf(b, "%s", doc.Since) + } + if doc.NonDefault { + fmt.Fprint(b, ", non-default") + } + fmt.Fprint(b, "\n") + if len(doc.Options) > 0 { + fmt.Fprintf(b, "\nOptions\n") + for _, opt := range doc.Options { + fmt.Fprintf(b, " %s", opt) + } + fmt.Fprint(b, "\n") + } + return b.String() +} + +func newVersionFlag() flag.Getter { + tags := build.Default.ReleaseTags + v := tags[len(tags)-1][2:] + version := new(VersionFlag) + if err := version.Set(v); err != nil { + panic(fmt.Sprintf("internal error: %s", err)) + } + return version +} + +type VersionFlag int + +func (v *VersionFlag) String() string { + return fmt.Sprintf("1.%d", *v) +} + +func (v *VersionFlag) Set(s string) error { + if len(s) < 3 { + return errors.New("invalid Go version") + } + if s[0] != '1' { + return errors.New("invalid Go version") + } + if s[1] != '.' { + return errors.New("invalid Go version") + } + i, err := strconv.Atoi(s[2:]) + *v = VersionFlag(i) + return err +} + +func (v *VersionFlag) Get() interface{} { + return int(*v) +} + +func InitializeAnalyzers(docs map[string]*Documentation, analyzers map[string]*analysis.Analyzer) map[string]*analysis.Analyzer { + out := make(map[string]*analysis.Analyzer, len(analyzers)) + for k, v := range analyzers { + vc := *v + out[k] = &vc + + vc.Name = k + doc, ok := docs[k] + if !ok { + panic(fmt.Sprintf("missing documentation for check %s", k)) + } + vc.Doc = doc.String() + if vc.Flags.Usage == nil { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.Var(newVersionFlag(), "go", "Target Go version") + vc.Flags = *fs + } + } + return out +} + +// ExhaustiveTypeSwitch panics when called. It can be used to ensure +// that type switches are exhaustive. +func ExhaustiveTypeSwitch(v interface{}) { + panic(fmt.Sprintf("internal error: unhandled case %T", v)) +} + +// A directive is a comment of the form '//lint: +// [arguments...]'. It represents instructions to the static analysis +// tool. +type Directive struct { + Command string + Arguments []string + Directive *ast.Comment + Node ast.Node +} + +func parseDirective(s string) (cmd string, args []string) { + if !strings.HasPrefix(s, "//lint:") { + return "", nil + } + s = strings.TrimPrefix(s, "//lint:") + fields := strings.Split(s, " ") + return fields[0], fields[1:] +} + +func ParseDirectives(files []*ast.File, fset *token.FileSet) []Directive { + var dirs []Directive + for _, f := range files { + // OPT(dh): in our old code, we skip all the commentmap work if we + // couldn't find any directives, benchmark if that's actually + // worth doing + cm := ast.NewCommentMap(fset, f, f.Comments) + for node, cgs := range cm { + for _, cg := range cgs { + for _, c := range cg.List { + if !strings.HasPrefix(c.Text, "//lint:") { + continue + } + cmd, args := parseDirective(c.Text) + d := Directive{ + Command: cmd, + Arguments: args, + Directive: c, + Node: node, + } + dirs = append(dirs, d) + } + } + } + } + return dirs +} diff --git a/report/report.go b/analysis/report/report.go similarity index 99% rename from report/report.go rename to analysis/report/report.go index 5b5343617..2985334d3 100644 --- a/report/report.go +++ b/analysis/report/report.go @@ -8,9 +8,10 @@ import ( "path/filepath" "strings" + "honnef.co/go/tools/analysis/facts" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" - "honnef.co/go/tools/facts" ) type Options struct { diff --git a/cmd/keyify/keyify.go b/cmd/keyify/keyify.go index 95c920e6e..45f8ed8a0 100644 --- a/cmd/keyify/keyify.go +++ b/cmd/keyify/keyify.go @@ -16,7 +16,7 @@ import ( "os" "path/filepath" - "honnef.co/go/tools/version" + "honnef.co/go/tools/lintcmd/version" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/buildutil" diff --git a/cmd/rdeps/rdeps.go b/cmd/rdeps/rdeps.go index f4a3d0c74..711e35ee2 100644 --- a/cmd/rdeps/rdeps.go +++ b/cmd/rdeps/rdeps.go @@ -12,7 +12,7 @@ import ( "go/build" "os" - "honnef.co/go/tools/version" + "honnef.co/go/tools/lintcmd/version" "github.com/kisielk/gotool" "golang.org/x/tools/go/buildutil" diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 87bed55c0..d3871a432 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -1,12 +1,12 @@ // staticcheck analyses Go code and makes it better. -package main // import "honnef.co/go/tools/cmd/staticcheck" +package main import ( "log" "os" "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/lint/lintutil" + "honnef.co/go/tools/lintcmd" "honnef.co/go/tools/simple" "honnef.co/go/tools/staticcheck" "honnef.co/go/tools/stylecheck" @@ -14,7 +14,7 @@ import ( ) func main() { - fs := lintutil.FlagSet("staticcheck") + fs := lintcmd.FlagSet("staticcheck") debug := fs.String("debug.unused-graph", "", "Write unused's object graph to `file`") fs.Parse(os.Args[1:]) @@ -38,5 +38,5 @@ func main() { unused.Debug = f } - lintutil.ProcessFlagSet(cs, fs) + lintcmd.ProcessFlagSet(cs, fs) } diff --git a/cmd/structlayout-optimize/main.go b/cmd/structlayout-optimize/main.go index dd9c1f3b5..0c9571cf0 100644 --- a/cmd/structlayout-optimize/main.go +++ b/cmd/structlayout-optimize/main.go @@ -11,8 +11,8 @@ import ( "sort" "strings" + "honnef.co/go/tools/lintcmd/version" st "honnef.co/go/tools/structlayout" - "honnef.co/go/tools/version" ) var ( diff --git a/cmd/structlayout-pretty/main.go b/cmd/structlayout-pretty/main.go index a75192c9d..3c2123e5b 100644 --- a/cmd/structlayout-pretty/main.go +++ b/cmd/structlayout-pretty/main.go @@ -10,8 +10,8 @@ import ( "os" "strings" + "honnef.co/go/tools/lintcmd/version" st "honnef.co/go/tools/structlayout" - "honnef.co/go/tools/version" ) var ( diff --git a/cmd/structlayout/main.go b/cmd/structlayout/main.go index 157920767..6818422d7 100644 --- a/cmd/structlayout/main.go +++ b/cmd/structlayout/main.go @@ -10,9 +10,9 @@ import ( "log" "os" - "honnef.co/go/tools/gcsizes" + "honnef.co/go/tools/go/gcsizes" + "honnef.co/go/tools/lintcmd/version" st "honnef.co/go/tools/structlayout" - "honnef.co/go/tools/version" "golang.org/x/tools/go/packages" ) diff --git a/facts/directives.go b/facts/directives.go deleted file mode 100644 index 04cee52aa..000000000 --- a/facts/directives.go +++ /dev/null @@ -1,107 +0,0 @@ -package facts - -import ( - "go/ast" - "go/token" - "path/filepath" - "reflect" - "strings" - - "golang.org/x/tools/go/analysis" -) - -// A directive is a comment of the form '//lint: -// [arguments...]'. It represents instructions to the static analysis -// tool. -type Directive struct { - Command string - Arguments []string - Directive *ast.Comment - Node ast.Node -} - -type SerializedDirective struct { - Command string - Arguments []string - // The position of the comment - DirectivePosition token.Position - // The position of the node that the comment is attached to - NodePosition token.Position -} - -func parseDirective(s string) (cmd string, args []string) { - if !strings.HasPrefix(s, "//lint:") { - return "", nil - } - s = strings.TrimPrefix(s, "//lint:") - fields := strings.Split(s, " ") - return fields[0], fields[1:] -} - -func directives(pass *analysis.Pass) (interface{}, error) { - return ParseDirectives(pass.Files, pass.Fset), nil -} - -func ParseDirectives(files []*ast.File, fset *token.FileSet) []Directive { - var dirs []Directive - for _, f := range files { - // OPT(dh): in our old code, we skip all the commentmap work if we - // couldn't find any directives, benchmark if that's actually - // worth doing - cm := ast.NewCommentMap(fset, f, f.Comments) - for node, cgs := range cm { - for _, cg := range cgs { - for _, c := range cg.List { - if !strings.HasPrefix(c.Text, "//lint:") { - continue - } - cmd, args := parseDirective(c.Text) - d := Directive{ - Command: cmd, - Arguments: args, - Directive: c, - Node: node, - } - dirs = append(dirs, d) - } - } - } - } - return dirs -} - -// duplicated from report.DisplayPosition to break import cycle -func displayPosition(fset *token.FileSet, p token.Pos) token.Position { - if p == token.NoPos { - return token.Position{} - } - - // Only use the adjusted position if it points to another Go file. - // This means we'll point to the original file for cgo files, but - // we won't point to a YACC grammar file. - pos := fset.PositionFor(p, false) - adjPos := fset.PositionFor(p, true) - - if filepath.Ext(adjPos.Filename) == ".go" { - return adjPos - } - - return pos -} - -var Directives = &analysis.Analyzer{ - Name: "directives", - Doc: "extracts linter directives", - Run: directives, - RunDespiteErrors: true, - ResultType: reflect.TypeOf([]Directive{}), -} - -func SerializeDirective(dir Directive, fset *token.FileSet) SerializedDirective { - return SerializedDirective{ - Command: dir.Command, - Arguments: dir.Arguments, - DirectivePosition: displayPosition(fset, dir.Directive.Pos()), - NodePosition: displayPosition(fset, dir.Node.Pos()), - } -} diff --git a/functions/stub.go b/functions/stub.go deleted file mode 100644 index 4d5de10b8..000000000 --- a/functions/stub.go +++ /dev/null @@ -1,32 +0,0 @@ -package functions - -import ( - "honnef.co/go/tools/ir" -) - -// IsStub reports whether a function is a stub. A function is -// considered a stub if it has no instructions or if all it does is -// return a constant value. -func IsStub(fn *ir.Function) bool { - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - switch instr.(type) { - case *ir.Const: - // const naturally has no side-effects - case *ir.Panic: - // panic is a stub if it only uses constants - case *ir.Return: - // return is a stub if it only uses constants - case *ir.DebugRef: - case *ir.Jump: - // if there are no disallowed instructions, then we're - // only jumping to the exit block (or possibly - // somewhere else that's stubby?) - default: - // all other instructions are assumed to do actual work - return false - } - } - } - return true -} diff --git a/callgraph/callgraph.go b/go/callgraph/callgraph.go similarity index 97% rename from callgraph/callgraph.go rename to go/callgraph/callgraph.go index 9539df9a4..ef1a0d01f 100644 --- a/callgraph/callgraph.go +++ b/go/callgraph/callgraph.go @@ -32,7 +32,7 @@ in the call graph; they are treated like built-in operators of the language. */ -package callgraph // import "honnef.co/go/tools/callgraph" +package callgraph // TODO(adonovan): add a function to eliminate wrappers from the // callgraph, preserving topology. @@ -43,7 +43,7 @@ import ( "fmt" "go/token" - "honnef.co/go/tools/ir" + "honnef.co/go/tools/go/ir" ) // A Graph represents a call graph. diff --git a/callgraph/cha/cha.go b/go/callgraph/cha/cha.go similarity index 96% rename from callgraph/cha/cha.go rename to go/callgraph/cha/cha.go index 985d4dd9c..449cec4e7 100644 --- a/callgraph/cha/cha.go +++ b/go/callgraph/cha/cha.go @@ -21,15 +21,16 @@ // and all concrete types are put into interfaces, it is sound to run on // partial programs, such as libraries without a main or test function. // -package cha // import "honnef.co/go/tools/callgraph/cha" +package cha import ( "go/types" + "honnef.co/go/tools/go/callgraph" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/types/typeutil" - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" ) // CallGraph computes the call graph of the specified program using the diff --git a/callgraph/cha/cha_test.go b/go/callgraph/cha/cha_test.go similarity index 95% rename from callgraph/cha/cha_test.go rename to go/callgraph/cha/cha_test.go index f2b770ee3..7efd096af 100644 --- a/callgraph/cha/cha_test.go +++ b/go/callgraph/cha/cha_test.go @@ -22,10 +22,11 @@ import ( "strings" "testing" + "honnef.co/go/tools/go/callgraph" + "honnef.co/go/tools/go/callgraph/cha" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/loader" - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/callgraph/cha" - "honnef.co/go/tools/ir/irutil" ) var inputs = []string{ diff --git a/callgraph/cha/testdata/func.go b/go/callgraph/cha/testdata/func.go similarity index 100% rename from callgraph/cha/testdata/func.go rename to go/callgraph/cha/testdata/func.go diff --git a/callgraph/cha/testdata/iface.go b/go/callgraph/cha/testdata/iface.go similarity index 100% rename from callgraph/cha/testdata/iface.go rename to go/callgraph/cha/testdata/iface.go diff --git a/callgraph/cha/testdata/recv.go b/go/callgraph/cha/testdata/recv.go similarity index 100% rename from callgraph/cha/testdata/recv.go rename to go/callgraph/cha/testdata/recv.go diff --git a/callgraph/rta/rta.go b/go/callgraph/rta/rta.go similarity index 99% rename from callgraph/rta/rta.go rename to go/callgraph/rta/rta.go index bf2995eb8..fe71a15fd 100644 --- a/callgraph/rta/rta.go +++ b/go/callgraph/rta/rta.go @@ -40,7 +40,7 @@ // cmd/callgraph tool on its own source takes ~2.1s for RTA and ~5.4s // for points-to analysis. // -package rta // import "honnef.co/go/tools/callgraph/rta" +package rta // TODO(adonovan): test it by connecting it to the interpreter and // replacing all "unreachable" functions by a special intrinsic, and @@ -50,9 +50,10 @@ import ( "fmt" "go/types" + "honnef.co/go/tools/go/callgraph" + "honnef.co/go/tools/go/ir" + "golang.org/x/tools/go/types/typeutil" - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/ir" ) // A Result holds the results of Rapid Type Analysis, which includes the diff --git a/callgraph/rta/rta_test.go b/go/callgraph/rta/rta_test.go similarity index 95% rename from callgraph/rta/rta_test.go rename to go/callgraph/rta/rta_test.go index e586db72b..c37356b7c 100644 --- a/callgraph/rta/rta_test.go +++ b/go/callgraph/rta/rta_test.go @@ -22,11 +22,12 @@ import ( "strings" "testing" + "honnef.co/go/tools/go/callgraph" + "honnef.co/go/tools/go/callgraph/rta" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/loader" - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/callgraph/rta" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" ) var inputs = []string{ diff --git a/callgraph/rta/testdata/func.go b/go/callgraph/rta/testdata/func.go similarity index 100% rename from callgraph/rta/testdata/func.go rename to go/callgraph/rta/testdata/func.go diff --git a/callgraph/rta/testdata/iface.go b/go/callgraph/rta/testdata/iface.go similarity index 100% rename from callgraph/rta/testdata/iface.go rename to go/callgraph/rta/testdata/iface.go diff --git a/callgraph/rta/testdata/rtype.go b/go/callgraph/rta/testdata/rtype.go similarity index 100% rename from callgraph/rta/testdata/rtype.go rename to go/callgraph/rta/testdata/rtype.go diff --git a/callgraph/static/static.go b/go/callgraph/static/static.go similarity index 85% rename from callgraph/static/static.go rename to go/callgraph/static/static.go index a4c73b76b..415f5c712 100644 --- a/callgraph/static/static.go +++ b/go/callgraph/static/static.go @@ -1,11 +1,11 @@ // Package static computes the call graph of a Go program containing // only static call edges. -package static // import "honnef.co/go/tools/callgraph/static" +package static import ( - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" + "honnef.co/go/tools/go/callgraph" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" ) // CallGraph computes the call graph of the specified program diff --git a/callgraph/static/static_test.go b/go/callgraph/static/static_test.go similarity index 92% rename from callgraph/static/static_test.go rename to go/callgraph/static/static_test.go index a50c60324..e0a0bf415 100644 --- a/callgraph/static/static_test.go +++ b/go/callgraph/static/static_test.go @@ -13,10 +13,11 @@ import ( "sort" "testing" + "honnef.co/go/tools/go/callgraph" + "honnef.co/go/tools/go/callgraph/static" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/loader" - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/callgraph/static" - "honnef.co/go/tools/ir/irutil" ) const input = `package P diff --git a/callgraph/util.go b/go/callgraph/util.go similarity index 99% rename from callgraph/util.go rename to go/callgraph/util.go index 7f81964f7..d86b53ec8 100644 --- a/callgraph/util.go +++ b/go/callgraph/util.go @@ -4,7 +4,7 @@ package callgraph -import "honnef.co/go/tools/ir" +import "honnef.co/go/tools/go/ir" // This file provides various utilities over call graphs, such as // visitation and path search. diff --git a/gcsizes/LICENSE b/go/gcsizes/LICENSE similarity index 100% rename from gcsizes/LICENSE rename to go/gcsizes/LICENSE diff --git a/gcsizes/sizes.go b/go/gcsizes/sizes.go similarity index 98% rename from gcsizes/sizes.go rename to go/gcsizes/sizes.go index 4857418c9..d6bbf4734 100644 --- a/gcsizes/sizes.go +++ b/go/gcsizes/sizes.go @@ -4,7 +4,7 @@ // Package gcsizes provides a types.Sizes implementation that adheres // to the rules used by the gc compiler. -package gcsizes // import "honnef.co/go/tools/gcsizes" +package gcsizes import ( "go/build" diff --git a/ir/LICENSE b/go/ir/LICENSE similarity index 100% rename from ir/LICENSE rename to go/ir/LICENSE diff --git a/ir/blockopt.go b/go/ir/blockopt.go similarity index 100% rename from ir/blockopt.go rename to go/ir/blockopt.go diff --git a/ir/builder.go b/go/ir/builder.go similarity index 100% rename from ir/builder.go rename to go/ir/builder.go diff --git a/ir/builder_test.go b/go/ir/builder_test.go similarity index 99% rename from ir/builder_test.go rename to go/ir/builder_test.go index 0648bacc1..17e204bbb 100644 --- a/ir/builder_test.go +++ b/go/ir/builder_test.go @@ -18,9 +18,10 @@ import ( "sort" "testing" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/loader" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" ) func isEmpty(f *ir.Function) bool { return f.Blocks == nil } diff --git a/ir/const.go b/go/ir/const.go similarity index 100% rename from ir/const.go rename to go/ir/const.go diff --git a/ir/create.go b/go/ir/create.go similarity index 100% rename from ir/create.go rename to go/ir/create.go diff --git a/ir/doc.go b/go/ir/doc.go similarity index 99% rename from ir/doc.go rename to go/ir/doc.go index a5f42e4f4..87c54c55e 100644 --- a/ir/doc.go +++ b/go/ir/doc.go @@ -126,4 +126,4 @@ // domains of source locations, ast.Nodes, types.Objects, // ir.Values/Instructions. // -package ir // import "honnef.co/go/tools/ir" +package ir diff --git a/ir/dom.go b/go/ir/dom.go similarity index 100% rename from ir/dom.go rename to go/ir/dom.go diff --git a/ir/emit.go b/go/ir/emit.go similarity index 100% rename from ir/emit.go rename to go/ir/emit.go diff --git a/ir/example_test.go b/go/ir/example_test.go similarity index 98% rename from ir/example_test.go rename to go/ir/example_test.go index 0e016aaf9..216e02b0e 100644 --- a/ir/example_test.go +++ b/go/ir/example_test.go @@ -14,9 +14,10 @@ import ( "log" "os" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/packages" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" ) const hello = ` diff --git a/ir/exits.go b/go/ir/exits.go similarity index 100% rename from ir/exits.go rename to go/ir/exits.go diff --git a/ir/func.go b/go/ir/func.go similarity index 100% rename from ir/func.go rename to go/ir/func.go diff --git a/ir/html.go b/go/ir/html.go similarity index 100% rename from ir/html.go rename to go/ir/html.go diff --git a/ir/identical.go b/go/ir/identical.go similarity index 100% rename from ir/identical.go rename to go/ir/identical.go diff --git a/ir/identical_17.go b/go/ir/identical_17.go similarity index 100% rename from ir/identical_17.go rename to go/ir/identical_17.go diff --git a/ir/identical_test.go b/go/ir/identical_test.go similarity index 100% rename from ir/identical_test.go rename to go/ir/identical_test.go diff --git a/ir/irutil/load.go b/go/ir/irutil/load.go similarity index 99% rename from ir/irutil/load.go rename to go/ir/irutil/load.go index a62df49ea..1e83effa1 100644 --- a/ir/irutil/load.go +++ b/go/ir/irutil/load.go @@ -11,9 +11,10 @@ import ( "go/token" "go/types" + "honnef.co/go/tools/go/ir" + "golang.org/x/tools/go/loader" "golang.org/x/tools/go/packages" - "honnef.co/go/tools/ir" ) type Options struct { diff --git a/ir/irutil/load_test.go b/go/ir/irutil/load_test.go similarity index 98% rename from ir/irutil/load_test.go rename to go/ir/irutil/load_test.go index fce0add3f..970411cb2 100644 --- a/ir/irutil/load_test.go +++ b/go/ir/irutil/load_test.go @@ -15,8 +15,9 @@ import ( "strings" "testing" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/packages" - "honnef.co/go/tools/ir/irutil" ) const hello = `package main diff --git a/ir/irutil/switch.go b/go/ir/irutil/switch.go similarity index 99% rename from ir/irutil/switch.go rename to go/ir/irutil/switch.go index f44cbca9e..e7654e008 100644 --- a/ir/irutil/switch.go +++ b/go/ir/irutil/switch.go @@ -24,7 +24,7 @@ import ( "go/token" "go/types" - "honnef.co/go/tools/ir" + "honnef.co/go/tools/go/ir" ) // A ConstCase represents a single constant comparison. diff --git a/ir/irutil/switch_test.go b/go/ir/irutil/switch_test.go similarity index 98% rename from ir/irutil/switch_test.go rename to go/ir/irutil/switch_test.go index ef4527078..beeb9823e 100644 --- a/ir/irutil/switch_test.go +++ b/go/ir/irutil/switch_test.go @@ -15,8 +15,9 @@ import ( "strings" "testing" + "honnef.co/go/tools/go/ir" + "golang.org/x/tools/go/loader" - "honnef.co/go/tools/ir" ) func TestSwitches(t *testing.T) { diff --git a/ir/irutil/testdata/switches.go b/go/ir/irutil/testdata/switches.go similarity index 100% rename from ir/irutil/testdata/switches.go rename to go/ir/irutil/testdata/switches.go diff --git a/ir/irutil/util.go b/go/ir/irutil/util.go similarity index 97% rename from ir/irutil/util.go rename to go/ir/irutil/util.go index 04b25f5f9..badff17f2 100644 --- a/ir/irutil/util.go +++ b/go/ir/irutil/util.go @@ -1,7 +1,7 @@ package irutil import ( - "honnef.co/go/tools/ir" + "honnef.co/go/tools/go/ir" ) func Reachable(from, to *ir.BasicBlock) bool { diff --git a/ir/irutil/visit.go b/go/ir/irutil/visit.go similarity index 95% rename from ir/irutil/visit.go rename to go/ir/irutil/visit.go index 657c9cde7..f6d0503dd 100644 --- a/ir/irutil/visit.go +++ b/go/ir/irutil/visit.go @@ -2,9 +2,9 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -package irutil // import "honnef.co/go/tools/ir/irutil" +package irutil -import "honnef.co/go/tools/ir" +import "honnef.co/go/tools/go/ir" // This file defines utilities for visiting the IR of // a Program. diff --git a/ir/lift.go b/go/ir/lift.go similarity index 100% rename from ir/lift.go rename to go/ir/lift.go diff --git a/ir/lvalue.go b/go/ir/lvalue.go similarity index 100% rename from ir/lvalue.go rename to go/ir/lvalue.go diff --git a/ir/methods.go b/go/ir/methods.go similarity index 100% rename from ir/methods.go rename to go/ir/methods.go diff --git a/ir/mode.go b/go/ir/mode.go similarity index 100% rename from ir/mode.go rename to go/ir/mode.go diff --git a/ir/print.go b/go/ir/print.go similarity index 100% rename from ir/print.go rename to go/ir/print.go diff --git a/ir/sanity.go b/go/ir/sanity.go similarity index 100% rename from ir/sanity.go rename to go/ir/sanity.go diff --git a/ir/source.go b/go/ir/source.go similarity index 100% rename from ir/source.go rename to go/ir/source.go diff --git a/ir/source_test.go b/go/ir/source_test.go similarity index 99% rename from ir/source_test.go rename to go/ir/source_test.go index c3f010f4b..16aa0561b 100644 --- a/ir/source_test.go +++ b/go/ir/source_test.go @@ -21,11 +21,12 @@ import ( "strings" "testing" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/expect" "golang.org/x/tools/go/loader" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" ) func TestObjValueLookup(t *testing.T) { diff --git a/ir/ssa.go b/go/ir/ssa.go similarity index 100% rename from ir/ssa.go rename to go/ir/ssa.go diff --git a/ir/staticcheck.conf b/go/ir/staticcheck.conf similarity index 100% rename from ir/staticcheck.conf rename to go/ir/staticcheck.conf diff --git a/ir/stdlib_test.go b/go/ir/stdlib_test.go similarity index 98% rename from ir/stdlib_test.go rename to go/ir/stdlib_test.go index f6495e8f4..232f52ccf 100644 --- a/ir/stdlib_test.go +++ b/go/ir/stdlib_test.go @@ -22,9 +22,10 @@ import ( "testing" "time" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/packages" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" ) func bytesAllocated() uint64 { diff --git a/ir/testdata/objlookup.go b/go/ir/testdata/objlookup.go similarity index 100% rename from ir/testdata/objlookup.go rename to go/ir/testdata/objlookup.go diff --git a/ir/testdata/structconv.go b/go/ir/testdata/structconv.go similarity index 100% rename from ir/testdata/structconv.go rename to go/ir/testdata/structconv.go diff --git a/ir/testdata/valueforexpr.go b/go/ir/testdata/valueforexpr.go similarity index 100% rename from ir/testdata/valueforexpr.go rename to go/ir/testdata/valueforexpr.go diff --git a/ir/util.go b/go/ir/util.go similarity index 100% rename from ir/util.go rename to go/ir/util.go diff --git a/ir/wrappers.go b/go/ir/wrappers.go similarity index 100% rename from ir/wrappers.go rename to go/ir/wrappers.go diff --git a/ir/write.go b/go/ir/write.go similarity index 100% rename from ir/write.go rename to go/ir/write.go diff --git a/loader/buildid.go b/go/loader/buildid.go similarity index 100% rename from loader/buildid.go rename to go/loader/buildid.go diff --git a/loader/hash.go b/go/loader/hash.go similarity index 100% rename from loader/hash.go rename to go/loader/hash.go diff --git a/loader/loader.go b/go/loader/loader.go similarity index 100% rename from loader/loader.go rename to go/loader/loader.go diff --git a/loader/note.go b/go/loader/note.go similarity index 100% rename from loader/note.go rename to go/loader/note.go diff --git a/internal/cmd/irdump/main.go b/internal/cmd/irdump/main.go index 8727715c3..b39ddc3c9 100644 --- a/internal/cmd/irdump/main.go +++ b/internal/cmd/irdump/main.go @@ -12,10 +12,11 @@ import ( "os" "runtime/pprof" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "golang.org/x/tools/go/buildutil" "golang.org/x/tools/go/packages" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" ) // flags diff --git a/internal/passes/buildir/buildir.go b/internal/passes/buildir/buildir.go index 884884f55..645e216a9 100644 --- a/internal/passes/buildir/buildir.go +++ b/internal/passes/buildir/buildir.go @@ -15,8 +15,9 @@ import ( "go/types" "reflect" + "honnef.co/go/tools/go/ir" + "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/ir" ) type willExit struct{} diff --git a/internal/sharedcheck/lint.go b/internal/sharedcheck/lint.go index d1433c4f1..6b0d31ba8 100644 --- a/internal/sharedcheck/lint.go +++ b/internal/sharedcheck/lint.go @@ -4,11 +4,11 @@ import ( "go/ast" "go/types" - "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/code" + "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/go/ir" "honnef.co/go/tools/internal/passes/buildir" - "honnef.co/go/tools/ir" - . "honnef.co/go/tools/lint/lintdsl" + + "golang.org/x/tools/go/analysis" ) func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { @@ -65,7 +65,9 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { return true } - Inspect(fn.Source(), cb) + if source := fn.Source(); source != nil { + ast.Inspect(source, cb) + } } return nil, nil } diff --git a/arg/arg.go b/knowledge/arg.go similarity index 98% rename from arg/arg.go rename to knowledge/arg.go index 1e7f30db4..c14ed73e4 100644 --- a/arg/arg.go +++ b/knowledge/arg.go @@ -1,4 +1,4 @@ -package arg +package knowledge var args = map[string]int{ "(*encoding/json.Decoder).Decode.v": 0, diff --git a/deprecated/stdlib.go b/knowledge/deprecated.go similarity index 98% rename from deprecated/stdlib.go rename to knowledge/deprecated.go index cabb8500a..ffed387c9 100644 --- a/deprecated/stdlib.go +++ b/knowledge/deprecated.go @@ -1,11 +1,11 @@ -package deprecated +package knowledge type Deprecation struct { DeprecatedSince int AlternativeAvailableSince int } -var Stdlib = map[string]Deprecation{ +var StdlibDeprecations = map[string]Deprecation{ // FIXME(dh): AllowBinary isn't being detected as deprecated // because the comment has a newline right after "Deprecated:" "go/build.AllowBinary": {7, 7}, diff --git a/lint/LICENSE b/lint/LICENSE deleted file mode 100644 index 796130a12..000000000 --- a/lint/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -Copyright (c) 2013 The Go Authors. All rights reserved. -Copyright (c) 2016 Dominik Honnef. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lint/lint.go b/lint/lint.go deleted file mode 100644 index f2a4f0114..000000000 --- a/lint/lint.go +++ /dev/null @@ -1,476 +0,0 @@ -// Package lint provides the foundation for tools like staticcheck -package lint // import "honnef.co/go/tools/lint" - -import ( - "fmt" - "go/token" - "os" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - "unicode" - - "honnef.co/go/tools/config" - "honnef.co/go/tools/runner" - "honnef.co/go/tools/unused" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" -) - -type Documentation struct { - Title string - Text string - Since string - NonDefault bool - Options []string -} - -func (doc *Documentation) String() string { - b := &strings.Builder{} - fmt.Fprintf(b, "%s\n\n", doc.Title) - if doc.Text != "" { - fmt.Fprintf(b, "%s\n\n", doc.Text) - } - fmt.Fprint(b, "Available since\n ") - if doc.Since == "" { - fmt.Fprint(b, "unreleased") - } else { - fmt.Fprintf(b, "%s", doc.Since) - } - if doc.NonDefault { - fmt.Fprint(b, ", non-default") - } - fmt.Fprint(b, "\n") - if len(doc.Options) > 0 { - fmt.Fprintf(b, "\nOptions\n") - for _, opt := range doc.Options { - fmt.Fprintf(b, " %s", opt) - } - fmt.Fprint(b, "\n") - } - return b.String() -} - -type ignore interface { - Match(p Problem) bool -} - -type lineIgnore struct { - File string - Line int - Checks []string - Matched bool - Pos token.Position -} - -func (li *lineIgnore) Match(p Problem) bool { - pos := p.Position - if pos.Filename != li.File || pos.Line != li.Line { - return false - } - for _, c := range li.Checks { - if m, _ := filepath.Match(c, p.Category); m { - li.Matched = true - return true - } - } - return false -} - -func (li *lineIgnore) String() string { - matched := "not matched" - if li.Matched { - matched = "matched" - } - return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched) -} - -type fileIgnore struct { - File string - Checks []string -} - -func (fi *fileIgnore) Match(p Problem) bool { - if p.Position.Filename != fi.File { - return false - } - for _, c := range fi.Checks { - if m, _ := filepath.Match(c, p.Category); m { - return true - } - } - return false -} - -type Severity uint8 - -const ( - Error Severity = iota - Warning - Ignored -) - -// Problem represents a problem in some source code. -type Problem struct { - runner.Diagnostic - Severity Severity -} - -func (p Problem) equal(o Problem) bool { - return p.Position == o.Position && - p.End == o.End && - p.Message == o.Message && - p.Category == o.Category && - p.Severity == o.Severity -} - -func (p *Problem) String() string { - return fmt.Sprintf("%s (%s)", p.Message, p.Category) -} - -// A Linter lints Go source code. -type Linter struct { - Checkers []*analysis.Analyzer - Config config.Config - Runner *runner.Runner -} - -func failed(res runner.Result) []Problem { - var problems []Problem - - for _, e := range res.Errors { - switch e := e.(type) { - case packages.Error: - msg := e.Msg - if len(msg) != 0 && msg[0] == '\n' { - // TODO(dh): See https://github.com/golang/go/issues/32363 - msg = msg[1:] - } - - var posn token.Position - if e.Pos == "" { - // Under certain conditions (malformed package - // declarations, multiple packages in the same - // directory), go list emits an error on stderr - // instead of JSON. Those errors do not have - // associated position information in - // go/packages.Error, even though the output on - // stderr may contain it. - if p, n, err := parsePos(msg); err == nil { - if abs, err := filepath.Abs(p.Filename); err == nil { - p.Filename = abs - } - posn = p - msg = msg[n+2:] - } - } else { - var err error - posn, _, err = parsePos(e.Pos) - if err != nil { - panic(fmt.Sprintf("internal error: %s", e)) - } - } - p := Problem{ - Diagnostic: runner.Diagnostic{ - Position: posn, - Message: msg, - Category: "compile", - }, - Severity: Error, - } - problems = append(problems, p) - case error: - p := Problem{ - Diagnostic: runner.Diagnostic{ - Position: token.Position{}, - Message: e.Error(), - Category: "compile", - }, - Severity: Error, - } - problems = append(problems, p) - } - } - - return problems -} - -type unusedKey struct { - pkgPath string - base string - line int - name string -} - -type unusedPair struct { - key unusedKey - obj unused.SerializedObject -} - -func success(allowedChecks map[string]bool, res runner.ResultData) []Problem { - diags := res.Diagnostics - var problems []Problem - for _, diag := range diags { - if !allowedChecks[diag.Category] { - continue - } - problems = append(problems, Problem{Diagnostic: diag}) - } - return problems -} - -func filterIgnored(problems []Problem, res runner.ResultData, allowedAnalyzers map[string]bool) ([]Problem, error) { - couldveMatched := func(ig *lineIgnore) bool { - for _, c := range ig.Checks { - if c == "U1000" { - // We never want to flag ignores for U1000, - // because U1000 isn't local to a single - // package. For example, an identifier may - // only be used by tests, in which case an - // ignore would only fire when not analyzing - // tests. To avoid spurious "useless ignore" - // warnings, just never flag U1000. - return false - } - - // Even though the runner always runs all analyzers, we - // still only flag unmatched ignores for the set of - // analyzers the user has expressed interest in. That way, - // `staticcheck -checks=SA1000` won't complain about an - // unmatched ignore for an unrelated check. - if allowedAnalyzers[c] { - return true - } - } - - return false - } - - ignores, moreProblems := parseDirectives(res.Directives) - - for _, ig := range ignores { - for i := range problems { - p := &problems[i] - if ig.Match(*p) { - p.Severity = Ignored - } - } - - if ig, ok := ig.(*lineIgnore); ok && !ig.Matched && couldveMatched(ig) { - p := Problem{ - Diagnostic: runner.Diagnostic{ - Position: ig.Pos, - Message: "this linter directive didn't match anything; should it be removed?", - Category: "", - }, - } - moreProblems = append(moreProblems, p) - } - } - - return append(problems, moreProblems...), nil -} - -func NewLinter(cfg config.Config) (*Linter, error) { - r, err := runner.New(cfg) - if err != nil { - return nil, err - } - return &Linter{ - Config: cfg, - Runner: r, - }, nil -} - -func (l *Linter) SetGoVersion(n int) { - l.Runner.GoVersion = n -} - -func (l *Linter) Lint(cfg *packages.Config, patterns []string) (problems []Problem, warnings []string, err error) { - results, err := l.Runner.Run(cfg, l.Checkers, patterns) - if err != nil { - return nil, nil, err - } - - if len(results) == 0 && err == nil { - // TODO(dh): emulate Go's behavior more closely once we have - // access to go list's Match field. - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", patterns) - } - - analyzerNames := make([]string, len(l.Checkers)) - for i, a := range l.Checkers { - analyzerNames[i] = a.Name - } - - used := map[unusedKey]bool{} - var unuseds []unusedPair - for _, res := range results { - if len(res.Errors) > 0 && !res.Failed { - panic("package has errors but isn't marked as failed") - } - if res.Failed { - problems = append(problems, failed(res)...) - } else { - if res.Skipped { - warnings = append(warnings, fmt.Sprintf("skipped package %s because it is too large", res.Package)) - continue - } - - if !res.Initial { - continue - } - - allowedAnalyzers := FilterAnalyzerNames(analyzerNames, res.Config.Checks) - resd, err := res.Load() - if err != nil { - return nil, nil, err - } - ps := success(allowedAnalyzers, resd) - filtered, err := filterIgnored(ps, resd, allowedAnalyzers) - if err != nil { - return nil, nil, err - } - problems = append(problems, filtered...) - - for _, obj := range resd.Unused.Used { - // FIXME(dh): pick the object whose filename does not include $GOROOT - key := unusedKey{ - pkgPath: res.Package.PkgPath, - base: filepath.Base(obj.Position.Filename), - line: obj.Position.Line, - name: obj.Name, - } - used[key] = true - } - - if allowedAnalyzers["U1000"] { - for _, obj := range resd.Unused.Unused { - key := unusedKey{ - pkgPath: res.Package.PkgPath, - base: filepath.Base(obj.Position.Filename), - line: obj.Position.Line, - name: obj.Name, - } - unuseds = append(unuseds, unusedPair{key, obj}) - if _, ok := used[key]; !ok { - used[key] = false - } - } - } - } - } - - for _, uo := range unuseds { - if used[uo.key] { - continue - } - if uo.obj.InGenerated { - continue - } - problems = append(problems, Problem{ - Diagnostic: runner.Diagnostic{ - Position: uo.obj.DisplayPosition, - Message: fmt.Sprintf("%s %s is unused", uo.obj.Kind, uo.obj.Name), - Category: "U1000", - }, - }) - } - - if len(problems) == 0 { - return nil, warnings, nil - } - - sort.Slice(problems, func(i, j int) bool { - pi := problems[i].Position - pj := problems[j].Position - - if pi.Filename != pj.Filename { - return pi.Filename < pj.Filename - } - if pi.Line != pj.Line { - return pi.Line < pj.Line - } - if pi.Column != pj.Column { - return pi.Column < pj.Column - } - - return problems[i].Message < problems[j].Message - }) - - var out []Problem - out = append(out, problems[0]) - for i, p := range problems[1:] { - // We may encounter duplicate problems because one file - // can be part of many packages. - if !problems[i].equal(p) { - out = append(out, p) - } - } - return out, warnings, nil -} - -func FilterAnalyzerNames(analyzers []string, checks []string) map[string]bool { - allowedChecks := map[string]bool{} - - for _, check := range checks { - b := true - if len(check) > 1 && check[0] == '-' { - b = false - check = check[1:] - } - if check == "*" || check == "all" { - // Match all - for _, c := range analyzers { - allowedChecks[c] = b - } - } else if strings.HasSuffix(check, "*") { - // Glob - prefix := check[:len(check)-1] - isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 - - for _, a := range analyzers { - idx := strings.IndexFunc(a, func(r rune) bool { return unicode.IsNumber(r) }) - if isCat { - // Glob is S*, which should match S1000 but not SA1000 - cat := a[:idx] - if prefix == cat { - allowedChecks[a] = b - } - } else { - // Glob is S1* - if strings.HasPrefix(a, prefix) { - allowedChecks[a] = b - } - } - } - } else { - // Literal check name - allowedChecks[check] = b - } - } - return allowedChecks -} - -var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?`) - -func parsePos(pos string) (token.Position, int, error) { - if pos == "-" || pos == "" { - return token.Position{}, 0, nil - } - parts := posRe.FindStringSubmatch(pos) - if parts == nil { - return token.Position{}, 0, fmt.Errorf("malformed position %q", pos) - } - file := parts[1] - line, _ := strconv.Atoi(parts[2]) - col, _ := strconv.Atoi(parts[3]) - return token.Position{ - Filename: file, - Line: line, - Column: col, - }, len(parts[0]), nil -} diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go deleted file mode 100644 index 98e3fa43c..000000000 --- a/lint/lintutil/util.go +++ /dev/null @@ -1,443 +0,0 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// Package lintutil provides helpers for writing linter command lines. -package lintutil // import "honnef.co/go/tools/lint/lintutil" - -import ( - "crypto/sha256" - "errors" - "flag" - "fmt" - "go/build" - "go/token" - "io" - "log" - "os" - "os/signal" - "regexp" - "runtime" - "runtime/pprof" - "strconv" - "strings" - "sync" - "time" - - "honnef.co/go/tools/config" - "honnef.co/go/tools/internal/cache" - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil/format" - "honnef.co/go/tools/loader" - "honnef.co/go/tools/runner" - "honnef.co/go/tools/version" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/buildutil" - "golang.org/x/tools/go/packages" -) - -func newVersionFlag() flag.Getter { - tags := build.Default.ReleaseTags - v := tags[len(tags)-1][2:] - version := new(VersionFlag) - if err := version.Set(v); err != nil { - panic(fmt.Sprintf("internal error: %s", err)) - } - return version -} - -type VersionFlag int - -func (v *VersionFlag) String() string { - return fmt.Sprintf("1.%d", *v) -} - -func (v *VersionFlag) Set(s string) error { - if len(s) < 3 { - return errors.New("invalid Go version") - } - if s[0] != '1' { - return errors.New("invalid Go version") - } - if s[1] != '.' { - return errors.New("invalid Go version") - } - i, err := strconv.Atoi(s[2:]) - *v = VersionFlag(i) - return err -} - -func (v *VersionFlag) Get() interface{} { - return int(*v) -} - -func usage(name string, flags *flag.FlagSet) func() { - return func() { - fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name) - fmt.Fprintf(os.Stderr, "Flags:\n") - flags.PrintDefaults() - } -} - -type list []string - -func (list *list) String() string { - return `"` + strings.Join(*list, ",") + `"` -} - -func (list *list) Set(s string) error { - if s == "" { - *list = nil - return nil - } - - *list = strings.Split(s, ",") - return nil -} - -func FlagSet(name string) *flag.FlagSet { - flags := flag.NewFlagSet("", flag.ExitOnError) - flags.Usage = usage(name, flags) - flags.String("tags", "", "List of `build tags`") - flags.Bool("tests", true, "Include tests") - flags.Bool("version", false, "Print version and exit") - flags.Bool("show-ignored", false, "Don't filter ignored problems") - flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')") - flags.String("explain", "", "Print description of `check`") - - flags.String("debug.cpuprofile", "", "Write CPU profile to `file`") - flags.String("debug.memprofile", "", "Write memory profile to `file`") - flags.Bool("debug.version", false, "Print detailed version information about this program") - flags.Bool("debug.no-compile-errors", false, "Don't print compile errors") - flags.String("debug.measure-analyzers", "", "Write analysis measurements to `file`. `file` will be opened for appending if it already exists.") - - checks := list{"inherit"} - fail := list{"all"} - flags.Var(&checks, "checks", "Comma-separated list of `checks` to enable.") - flags.Var(&fail, "fail", "Comma-separated list of `checks` that can cause a non-zero exit status.") - - tags := build.Default.ReleaseTags - v := tags[len(tags)-1][2:] - version := new(VersionFlag) - if err := version.Set(v); err != nil { - panic(fmt.Sprintf("internal error: %s", err)) - } - - flags.Var(version, "go", "Target Go `version` in the format '1.x'") - return flags -} - -func findCheck(cs []*analysis.Analyzer, check string) (*analysis.Analyzer, bool) { - for _, c := range cs { - if c.Name == check { - return c, true - } - } - return nil, false -} - -func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { - tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) - tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) - goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int) - formatter := fs.Lookup("f").Value.(flag.Getter).Get().(string) - printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool) - showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) - explain := fs.Lookup("explain").Value.(flag.Getter).Get().(string) - - cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string) - memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) - debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool) - debugNoCompile := fs.Lookup("debug.no-compile-errors").Value.(flag.Getter).Get().(bool) - - var measureAnalyzers func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) - if path := fs.Lookup("debug.measure-analyzers").Value.(flag.Getter).Get().(string); path != "" { - f, err := os.OpenFile(path, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600) - if err != nil { - log.Fatal(err) - } - - mu := &sync.Mutex{} - measureAnalyzers = func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) { - mu.Lock() - defer mu.Unlock() - // FIXME(dh): print pkg.ID - if _, err := fmt.Fprintf(f, "%s\t%s\t%d\n", analysis.Name, pkg, d.Nanoseconds()); err != nil { - log.Println("error writing analysis measurements:", err) - } - } - } - - cfg := config.Config{} - cfg.Checks = *fs.Lookup("checks").Value.(*list) - - exit := func(code int) { - if cpuProfile != "" { - pprof.StopCPUProfile() - } - if memProfile != "" { - f, err := os.Create(memProfile) - if err != nil { - panic(err) - } - runtime.GC() - pprof.WriteHeapProfile(f) - } - os.Exit(code) - } - if cpuProfile != "" { - f, err := os.Create(cpuProfile) - if err != nil { - log.Fatal(err) - } - pprof.StartCPUProfile(f) - } - - if debugVersion { - version.Verbose() - exit(0) - } - - if printVersion { - version.Print() - exit(0) - } - - // Validate that the tags argument is well-formed. go/packages - // doesn't detect malformed build flags and returns unhelpful - // errors. - tf := buildutil.TagsFlag{} - if err := tf.Set(tags); err != nil { - fmt.Fprintln(os.Stderr, fmt.Errorf("invalid value %q for flag -tags: %s", tags, err)) - exit(1) - } - - if explain != "" { - var haystack []*analysis.Analyzer - haystack = append(haystack, cs...) - check, ok := findCheck(haystack, explain) - if !ok { - fmt.Fprintln(os.Stderr, "Couldn't find check", explain) - exit(1) - } - if check.Doc == "" { - fmt.Fprintln(os.Stderr, explain, "has no documentation") - exit(1) - } - fmt.Println(check.Doc) - exit(0) - } - - var f format.Formatter - switch formatter { - case "text": - f = format.Text{W: os.Stdout} - case "stylish": - f = &format.Stylish{W: os.Stdout} - case "json": - f = format.JSON{W: os.Stdout} - default: - fmt.Fprintf(os.Stderr, "unsupported output format %q\n", formatter) - exit(2) - } - - ps, warnings, err := doLint(cs, fs.Args(), &Options{ - Tags: tags, - LintTests: tests, - GoVersion: goVersion, - Config: cfg, - PrintAnalyzerMeasurement: measureAnalyzers, - }) - if err != nil { - fmt.Fprintln(os.Stderr, err) - exit(1) - } - - for _, w := range warnings { - fmt.Fprintln(os.Stderr, "warning:", w) - } - - var ( - numErrors int - numWarnings int - numIgnored int - ) - - fail := *fs.Lookup("fail").Value.(*list) - analyzerNames := make([]string, len(cs)) - for i, a := range cs { - analyzerNames[i] = a.Name - } - shouldExit := lint.FilterAnalyzerNames(analyzerNames, fail) - shouldExit["compile"] = true - - for _, p := range ps { - if p.Category == "compile" && debugNoCompile { - continue - } - if p.Severity == lint.Ignored && !showIgnored { - numIgnored++ - continue - } - if shouldExit[p.Category] { - numErrors++ - } else { - p.Severity = lint.Warning - numWarnings++ - } - f.Format(p) - } - if f, ok := f.(format.Statter); ok { - f.Stats(len(ps), numErrors, numWarnings, numIgnored) - } - - if f, ok := f.(format.DocumentationMentioner); ok && (numErrors > 0 || numWarnings > 0) && len(os.Args) > 0 { - f.MentionCheckDocumentation(os.Args[0]) - } - - if numErrors > 0 { - exit(1) - } - exit(0) -} - -type Options struct { - Config config.Config - - Tags string - LintTests bool - GoVersion int - PrintAnalyzerMeasurement func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) -} - -func computeSalt() ([]byte, error) { - if version.Version != "devel" { - return []byte(version.Version), nil - } - p, err := os.Executable() - if err != nil { - return nil, err - } - f, err := os.Open(p) - if err != nil { - return nil, err - } - defer f.Close() - h := sha256.New() - if _, err := io.Copy(h, f); err != nil { - return nil, err - } - return h.Sum(nil), nil -} - -func doLint(cs []*analysis.Analyzer, paths []string, opt *Options) ([]lint.Problem, []string, error) { - salt, err := computeSalt() - if err != nil { - return nil, nil, fmt.Errorf("could not compute salt for cache: %s", err) - } - cache.SetSalt(salt) - - if opt == nil { - opt = &Options{} - } - - l, err := lint.NewLinter(opt.Config) - if err != nil { - return nil, nil, err - } - l.Checkers = cs - l.SetGoVersion(opt.GoVersion) - l.Runner.Stats.PrintAnalyzerMeasurement = opt.PrintAnalyzerMeasurement - - cfg := &packages.Config{} - if opt.LintTests { - cfg.Tests = true - } - if opt.Tags != "" { - cfg.BuildFlags = append(cfg.BuildFlags, "-tags", opt.Tags) - } - - printStats := func() { - // Individual stats are read atomically, but overall there - // is no synchronisation. For printing rough progress - // information, this doesn't matter. - switch l.Runner.Stats.State() { - case runner.StateInitializing: - fmt.Fprintln(os.Stderr, "Status: initializing") - case runner.StateLoadPackageGraph: - fmt.Fprintln(os.Stderr, "Status: loading package graph") - case runner.StateBuildActionGraph: - fmt.Fprintln(os.Stderr, "Status: building action graph") - case runner.StateProcessing: - fmt.Fprintf(os.Stderr, "Packages: %d/%d initial, %d/%d total; Workers: %d/%d\n", - l.Runner.Stats.ProcessedInitialPackages(), - l.Runner.Stats.InitialPackages(), - l.Runner.Stats.ProcessedPackages(), - l.Runner.Stats.TotalPackages(), - l.Runner.ActiveWorkers(), - l.Runner.TotalWorkers(), - ) - case runner.StateFinalizing: - fmt.Fprintln(os.Stderr, "Status: finalizing") - } - } - if len(infoSignals) > 0 { - ch := make(chan os.Signal, 1) - signal.Notify(ch, infoSignals...) - defer signal.Stop(ch) - go func() { - for range ch { - printStats() - } - }() - } - return l.Lint(cfg, paths) -} - -var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`) - -func parsePos(pos string) token.Position { - if pos == "-" || pos == "" { - return token.Position{} - } - parts := posRe.FindStringSubmatch(pos) - if parts == nil { - panic(fmt.Sprintf("internal error: malformed position %q", pos)) - } - file := parts[1] - line, _ := strconv.Atoi(parts[2]) - col, _ := strconv.Atoi(parts[3]) - return token.Position{ - Filename: file, - Line: line, - Column: col, - } -} - -func InitializeAnalyzers(docs map[string]*lint.Documentation, analyzers map[string]*analysis.Analyzer) map[string]*analysis.Analyzer { - out := make(map[string]*analysis.Analyzer, len(analyzers)) - for k, v := range analyzers { - vc := *v - out[k] = &vc - - vc.Name = k - doc, ok := docs[k] - if !ok { - panic(fmt.Sprintf("missing documentation for check %s", k)) - } - vc.Doc = doc.String() - if vc.Flags.Usage == nil { - fs := flag.NewFlagSet("", flag.PanicOnError) - fs.Var(newVersionFlag(), "go", "Target Go version") - vc.Flags = *fs - } - } - return out -} diff --git a/lintcmd/cmd.go b/lintcmd/cmd.go new file mode 100644 index 000000000..f44b3c726 --- /dev/null +++ b/lintcmd/cmd.go @@ -0,0 +1,796 @@ +package lintcmd + +import ( + "crypto/sha256" + "flag" + "fmt" + "go/build" + "go/token" + "io" + "log" + "os" + "os/signal" + "path/filepath" + "regexp" + "runtime" + "runtime/pprof" + "sort" + "strconv" + "strings" + "sync" + "time" + "unicode" + + "honnef.co/go/tools/analysis/lint" + "honnef.co/go/tools/config" + "honnef.co/go/tools/go/loader" + "honnef.co/go/tools/internal/cache" + "honnef.co/go/tools/lintcmd/runner" + "honnef.co/go/tools/lintcmd/version" + "honnef.co/go/tools/unused" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/buildutil" + "golang.org/x/tools/go/packages" +) + +type ignore interface { + Match(p problem) bool +} + +type lineIgnore struct { + File string + Line int + Checks []string + Matched bool + Pos token.Position +} + +func (li *lineIgnore) Match(p problem) bool { + pos := p.Position + if pos.Filename != li.File || pos.Line != li.Line { + return false + } + for _, c := range li.Checks { + if m, _ := filepath.Match(c, p.Category); m { + li.Matched = true + return true + } + } + return false +} + +func (li *lineIgnore) String() string { + matched := "not matched" + if li.Matched { + matched = "matched" + } + return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched) +} + +type fileIgnore struct { + File string + Checks []string +} + +func (fi *fileIgnore) Match(p problem) bool { + if p.Position.Filename != fi.File { + return false + } + for _, c := range fi.Checks { + if m, _ := filepath.Match(c, p.Category); m { + return true + } + } + return false +} + +type severity uint8 + +const ( + severityError severity = iota + severityWarning + severityIgnored +) + +func (s severity) String() string { + switch s { + case severityError: + return "error" + case severityWarning: + return "warning" + case severityIgnored: + return "ignored" + default: + return fmt.Sprintf("Severity(%d)", s) + } +} + +// problem represents a problem in some source code. +type problem struct { + runner.Diagnostic + Severity severity +} + +func (p problem) equal(o problem) bool { + return p.Position == o.Position && + p.End == o.End && + p.Message == o.Message && + p.Category == o.Category && + p.Severity == o.Severity +} + +func (p *problem) String() string { + return fmt.Sprintf("%s (%s)", p.Message, p.Category) +} + +// A linter lints Go source code. +type linter struct { + Checkers []*analysis.Analyzer + Config config.Config + Runner *runner.Runner +} + +func failed(res runner.Result) []problem { + var problems []problem + + for _, e := range res.Errors { + switch e := e.(type) { + case packages.Error: + msg := e.Msg + if len(msg) != 0 && msg[0] == '\n' { + // TODO(dh): See https://github.com/golang/go/issues/32363 + msg = msg[1:] + } + + var posn token.Position + if e.Pos == "" { + // Under certain conditions (malformed package + // declarations, multiple packages in the same + // directory), go list emits an error on stderr + // instead of JSON. Those errors do not have + // associated position information in + // go/packages.Error, even though the output on + // stderr may contain it. + if p, n, err := parsePos(msg); err == nil { + if abs, err := filepath.Abs(p.Filename); err == nil { + p.Filename = abs + } + posn = p + msg = msg[n+2:] + } + } else { + var err error + posn, _, err = parsePos(e.Pos) + if err != nil { + panic(fmt.Sprintf("internal error: %s", e)) + } + } + p := problem{ + Diagnostic: runner.Diagnostic{ + Position: posn, + Message: msg, + Category: "compile", + }, + Severity: severityError, + } + problems = append(problems, p) + case error: + p := problem{ + Diagnostic: runner.Diagnostic{ + Position: token.Position{}, + Message: e.Error(), + Category: "compile", + }, + Severity: severityError, + } + problems = append(problems, p) + } + } + + return problems +} + +type unusedKey struct { + pkgPath string + base string + line int + name string +} + +type unusedPair struct { + key unusedKey + obj unused.SerializedObject +} + +func success(allowedChecks map[string]bool, res runner.ResultData) []problem { + diags := res.Diagnostics + var problems []problem + for _, diag := range diags { + if !allowedChecks[diag.Category] { + continue + } + problems = append(problems, problem{Diagnostic: diag}) + } + return problems +} + +func filterIgnored(problems []problem, res runner.ResultData, allowedAnalyzers map[string]bool) ([]problem, error) { + couldveMatched := func(ig *lineIgnore) bool { + for _, c := range ig.Checks { + if c == "U1000" { + // We never want to flag ignores for U1000, + // because U1000 isn't local to a single + // package. For example, an identifier may + // only be used by tests, in which case an + // ignore would only fire when not analyzing + // tests. To avoid spurious "useless ignore" + // warnings, just never flag U1000. + return false + } + + // Even though the runner always runs all analyzers, we + // still only flag unmatched ignores for the set of + // analyzers the user has expressed interest in. That way, + // `staticcheck -checks=SA1000` won't complain about an + // unmatched ignore for an unrelated check. + if allowedAnalyzers[c] { + return true + } + } + + return false + } + + ignores, moreProblems := parseDirectives(res.Directives) + + for _, ig := range ignores { + for i := range problems { + p := &problems[i] + if ig.Match(*p) { + p.Severity = severityIgnored + } + } + + if ig, ok := ig.(*lineIgnore); ok && !ig.Matched && couldveMatched(ig) { + p := problem{ + Diagnostic: runner.Diagnostic{ + Position: ig.Pos, + Message: "this linter directive didn't match anything; should it be removed?", + Category: "", + }, + } + moreProblems = append(moreProblems, p) + } + } + + return append(problems, moreProblems...), nil +} + +func newLinter(cfg config.Config) (*linter, error) { + r, err := runner.New(cfg) + if err != nil { + return nil, err + } + return &linter{ + Config: cfg, + Runner: r, + }, nil +} + +func (l *linter) SetGoVersion(n int) { + l.Runner.GoVersion = n +} + +func (l *linter) Lint(cfg *packages.Config, patterns []string) (problems []problem, warnings []string, err error) { + results, err := l.Runner.Run(cfg, l.Checkers, patterns) + if err != nil { + return nil, nil, err + } + + if len(results) == 0 && err == nil { + // TODO(dh): emulate Go's behavior more closely once we have + // access to go list's Match field. + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", patterns) + } + + analyzerNames := make([]string, len(l.Checkers)) + for i, a := range l.Checkers { + analyzerNames[i] = a.Name + } + + used := map[unusedKey]bool{} + var unuseds []unusedPair + for _, res := range results { + if len(res.Errors) > 0 && !res.Failed { + panic("package has errors but isn't marked as failed") + } + if res.Failed { + problems = append(problems, failed(res)...) + } else { + if res.Skipped { + warnings = append(warnings, fmt.Sprintf("skipped package %s because it is too large", res.Package)) + continue + } + + if !res.Initial { + continue + } + + allowedAnalyzers := filterAnalyzerNames(analyzerNames, res.Config.Checks) + resd, err := res.Load() + if err != nil { + return nil, nil, err + } + ps := success(allowedAnalyzers, resd) + filtered, err := filterIgnored(ps, resd, allowedAnalyzers) + if err != nil { + return nil, nil, err + } + problems = append(problems, filtered...) + + for _, obj := range resd.Unused.Used { + // FIXME(dh): pick the object whose filename does not include $GOROOT + key := unusedKey{ + pkgPath: res.Package.PkgPath, + base: filepath.Base(obj.Position.Filename), + line: obj.Position.Line, + name: obj.Name, + } + used[key] = true + } + + if allowedAnalyzers["U1000"] { + for _, obj := range resd.Unused.Unused { + key := unusedKey{ + pkgPath: res.Package.PkgPath, + base: filepath.Base(obj.Position.Filename), + line: obj.Position.Line, + name: obj.Name, + } + unuseds = append(unuseds, unusedPair{key, obj}) + if _, ok := used[key]; !ok { + used[key] = false + } + } + } + } + } + + for _, uo := range unuseds { + if used[uo.key] { + continue + } + if uo.obj.InGenerated { + continue + } + problems = append(problems, problem{ + Diagnostic: runner.Diagnostic{ + Position: uo.obj.DisplayPosition, + Message: fmt.Sprintf("%s %s is unused", uo.obj.Kind, uo.obj.Name), + Category: "U1000", + }, + }) + } + + if len(problems) == 0 { + return nil, warnings, nil + } + + sort.Slice(problems, func(i, j int) bool { + pi := problems[i].Position + pj := problems[j].Position + + if pi.Filename != pj.Filename { + return pi.Filename < pj.Filename + } + if pi.Line != pj.Line { + return pi.Line < pj.Line + } + if pi.Column != pj.Column { + return pi.Column < pj.Column + } + + return problems[i].Message < problems[j].Message + }) + + var out []problem + out = append(out, problems[0]) + for i, p := range problems[1:] { + // We may encounter duplicate problems because one file + // can be part of many packages. + if !problems[i].equal(p) { + out = append(out, p) + } + } + return out, warnings, nil +} + +func filterAnalyzerNames(analyzers []string, checks []string) map[string]bool { + allowedChecks := map[string]bool{} + + for _, check := range checks { + b := true + if len(check) > 1 && check[0] == '-' { + b = false + check = check[1:] + } + if check == "*" || check == "all" { + // Match all + for _, c := range analyzers { + allowedChecks[c] = b + } + } else if strings.HasSuffix(check, "*") { + // Glob + prefix := check[:len(check)-1] + isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 + + for _, a := range analyzers { + idx := strings.IndexFunc(a, func(r rune) bool { return unicode.IsNumber(r) }) + if isCat { + // Glob is S*, which should match S1000 but not SA1000 + cat := a[:idx] + if prefix == cat { + allowedChecks[a] = b + } + } else { + // Glob is S1* + if strings.HasPrefix(a, prefix) { + allowedChecks[a] = b + } + } + } + } else { + // Literal check name + allowedChecks[check] = b + } + } + return allowedChecks +} + +var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?`) + +func parsePos(pos string) (token.Position, int, error) { + if pos == "-" || pos == "" { + return token.Position{}, 0, nil + } + parts := posRe.FindStringSubmatch(pos) + if parts == nil { + return token.Position{}, 0, fmt.Errorf("internal error: malformed position %q", pos) + } + file := parts[1] + line, _ := strconv.Atoi(parts[2]) + col, _ := strconv.Atoi(parts[3]) + return token.Position{ + Filename: file, + Line: line, + Column: col, + }, len(parts[0]), nil +} + +func usage(name string, flags *flag.FlagSet) func() { + return func() { + fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name) + fmt.Fprintf(os.Stderr, "Flags:\n") + flags.PrintDefaults() + } +} + +type list []string + +func (list *list) String() string { + return `"` + strings.Join(*list, ",") + `"` +} + +func (list *list) Set(s string) error { + if s == "" { + *list = nil + return nil + } + + *list = strings.Split(s, ",") + return nil +} + +func FlagSet(name string) *flag.FlagSet { + flags := flag.NewFlagSet("", flag.ExitOnError) + flags.Usage = usage(name, flags) + flags.String("tags", "", "List of `build tags`") + flags.Bool("tests", true, "Include tests") + flags.Bool("version", false, "Print version and exit") + flags.Bool("show-ignored", false, "Don't filter ignored problems") + flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')") + flags.String("explain", "", "Print description of `check`") + + flags.String("debug.cpuprofile", "", "Write CPU profile to `file`") + flags.String("debug.memprofile", "", "Write memory profile to `file`") + flags.Bool("debug.version", false, "Print detailed version information about this program") + flags.Bool("debug.no-compile-errors", false, "Don't print compile errors") + flags.String("debug.measure-analyzers", "", "Write analysis measurements to `file`. `file` will be opened for appending if it already exists.") + + checks := list{"inherit"} + fail := list{"all"} + flags.Var(&checks, "checks", "Comma-separated list of `checks` to enable.") + flags.Var(&fail, "fail", "Comma-separated list of `checks` that can cause a non-zero exit status.") + + tags := build.Default.ReleaseTags + v := tags[len(tags)-1][2:] + version := new(lint.VersionFlag) + if err := version.Set(v); err != nil { + panic(fmt.Sprintf("internal error: %s", err)) + } + + flags.Var(version, "go", "Target Go `version` in the format '1.x'") + return flags +} + +func findCheck(cs []*analysis.Analyzer, check string) (*analysis.Analyzer, bool) { + for _, c := range cs { + if c.Name == check { + return c, true + } + } + return nil, false +} + +func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { + tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) + tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) + goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int) + theFormatter := fs.Lookup("f").Value.(flag.Getter).Get().(string) + printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool) + showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) + explain := fs.Lookup("explain").Value.(flag.Getter).Get().(string) + + cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string) + memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) + debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool) + debugNoCompile := fs.Lookup("debug.no-compile-errors").Value.(flag.Getter).Get().(bool) + + var measureAnalyzers func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) + if path := fs.Lookup("debug.measure-analyzers").Value.(flag.Getter).Get().(string); path != "" { + f, err := os.OpenFile(path, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600) + if err != nil { + log.Fatal(err) + } + + mu := &sync.Mutex{} + measureAnalyzers = func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) { + mu.Lock() + defer mu.Unlock() + // FIXME(dh): print pkg.ID + if _, err := fmt.Fprintf(f, "%s\t%s\t%d\n", analysis.Name, pkg, d.Nanoseconds()); err != nil { + log.Println("error writing analysis measurements:", err) + } + } + } + + cfg := config.Config{} + cfg.Checks = *fs.Lookup("checks").Value.(*list) + + exit := func(code int) { + if cpuProfile != "" { + pprof.StopCPUProfile() + } + if memProfile != "" { + f, err := os.Create(memProfile) + if err != nil { + panic(err) + } + runtime.GC() + pprof.WriteHeapProfile(f) + } + os.Exit(code) + } + if cpuProfile != "" { + f, err := os.Create(cpuProfile) + if err != nil { + log.Fatal(err) + } + pprof.StartCPUProfile(f) + } + + if debugVersion { + version.Verbose() + exit(0) + } + + if printVersion { + version.Print() + exit(0) + } + + // Validate that the tags argument is well-formed. go/packages + // doesn't detect malformed build flags and returns unhelpful + // errors. + tf := buildutil.TagsFlag{} + if err := tf.Set(tags); err != nil { + fmt.Fprintln(os.Stderr, fmt.Errorf("invalid value %q for flag -tags: %s", tags, err)) + exit(1) + } + + if explain != "" { + var haystack []*analysis.Analyzer + haystack = append(haystack, cs...) + check, ok := findCheck(haystack, explain) + if !ok { + fmt.Fprintln(os.Stderr, "Couldn't find check", explain) + exit(1) + } + if check.Doc == "" { + fmt.Fprintln(os.Stderr, explain, "has no documentation") + exit(1) + } + fmt.Println(check.Doc) + exit(0) + } + + var f formatter + switch theFormatter { + case "text": + f = textFormatter{W: os.Stdout} + case "stylish": + f = &stylishFormatter{W: os.Stdout} + case "json": + f = jsonFormatter{W: os.Stdout} + default: + fmt.Fprintf(os.Stderr, "unsupported output format %q\n", theFormatter) + exit(2) + } + + ps, warnings, err := doLint(cs, fs.Args(), &options{ + Tags: tags, + LintTests: tests, + GoVersion: goVersion, + Config: cfg, + PrintAnalyzerMeasurement: measureAnalyzers, + }) + if err != nil { + fmt.Fprintln(os.Stderr, err) + exit(1) + } + + for _, w := range warnings { + fmt.Fprintln(os.Stderr, "warning:", w) + } + + var ( + numErrors int + numWarnings int + numIgnored int + ) + + fail := *fs.Lookup("fail").Value.(*list) + analyzerNames := make([]string, len(cs)) + for i, a := range cs { + analyzerNames[i] = a.Name + } + shouldExit := filterAnalyzerNames(analyzerNames, fail) + shouldExit["compile"] = true + + for _, p := range ps { + if p.Category == "compile" && debugNoCompile { + continue + } + if p.Severity == severityIgnored && !showIgnored { + numIgnored++ + continue + } + if shouldExit[p.Category] { + numErrors++ + } else { + p.Severity = severityWarning + numWarnings++ + } + f.Format(p) + } + if f, ok := f.(statter); ok { + f.Stats(len(ps), numErrors, numWarnings, numIgnored) + } + + if f, ok := f.(documentationMentioner); ok && (numErrors > 0 || numWarnings > 0) && len(os.Args) > 0 { + f.MentionCheckDocumentation(os.Args[0]) + } + + if numErrors > 0 { + exit(1) + } + exit(0) +} + +type options struct { + Config config.Config + + Tags string + LintTests bool + GoVersion int + PrintAnalyzerMeasurement func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) +} + +func computeSalt() ([]byte, error) { + if version.Version != "devel" { + return []byte(version.Version), nil + } + p, err := os.Executable() + if err != nil { + return nil, err + } + f, err := os.Open(p) + if err != nil { + return nil, err + } + defer f.Close() + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return nil, err + } + return h.Sum(nil), nil +} + +func doLint(cs []*analysis.Analyzer, paths []string, opt *options) ([]problem, []string, error) { + salt, err := computeSalt() + if err != nil { + return nil, nil, fmt.Errorf("could not compute salt for cache: %s", err) + } + cache.SetSalt(salt) + + if opt == nil { + opt = &options{} + } + + l, err := newLinter(opt.Config) + if err != nil { + return nil, nil, err + } + l.Checkers = cs + l.SetGoVersion(opt.GoVersion) + l.Runner.Stats.PrintAnalyzerMeasurement = opt.PrintAnalyzerMeasurement + + cfg := &packages.Config{} + if opt.LintTests { + cfg.Tests = true + } + if opt.Tags != "" { + cfg.BuildFlags = append(cfg.BuildFlags, "-tags", opt.Tags) + } + + printStats := func() { + // Individual stats are read atomically, but overall there + // is no synchronisation. For printing rough progress + // information, this doesn't matter. + switch l.Runner.Stats.State() { + case runner.StateInitializing: + fmt.Fprintln(os.Stderr, "Status: initializing") + case runner.StateLoadPackageGraph: + fmt.Fprintln(os.Stderr, "Status: loading package graph") + case runner.StateBuildActionGraph: + fmt.Fprintln(os.Stderr, "Status: building action graph") + case runner.StateProcessing: + fmt.Fprintf(os.Stderr, "Packages: %d/%d initial, %d/%d total; Workers: %d/%d\n", + l.Runner.Stats.ProcessedInitialPackages(), + l.Runner.Stats.InitialPackages(), + l.Runner.Stats.ProcessedPackages(), + l.Runner.Stats.TotalPackages(), + l.Runner.ActiveWorkers(), + l.Runner.TotalWorkers(), + ) + case runner.StateFinalizing: + fmt.Fprintln(os.Stderr, "Status: finalizing") + } + } + if len(infoSignals) > 0 { + ch := make(chan os.Signal, 1) + signal.Notify(ch, infoSignals...) + defer signal.Stop(ch) + go func() { + for range ch { + printStats() + } + }() + } + return l.Lint(cfg, paths) +} diff --git a/lint/lintutil/util_test.go b/lintcmd/cmd_test.go similarity index 94% rename from lint/lintutil/util_test.go rename to lintcmd/cmd_test.go index b348ba1bf..868260317 100644 --- a/lint/lintutil/util_test.go +++ b/lintcmd/cmd_test.go @@ -1,4 +1,4 @@ -package lintutil +package lintcmd import ( "go/token" @@ -37,7 +37,7 @@ func TestParsePos(t *testing.T) { } for _, tt := range tests { - res := parsePos(tt.in) + res, _, _ := parsePos(tt.in) if res != tt.out { t.Errorf("failed to parse %q correctly", tt.in) } diff --git a/lint/directives.go b/lintcmd/directives.go similarity index 80% rename from lint/directives.go rename to lintcmd/directives.go index 1ca8d5acf..e35b9c7ae 100644 --- a/lint/directives.go +++ b/lintcmd/directives.go @@ -1,15 +1,14 @@ -package lint +package lintcmd import ( "strings" - "honnef.co/go/tools/facts" - "honnef.co/go/tools/runner" + "honnef.co/go/tools/lintcmd/runner" ) -func parseDirectives(dirs []facts.SerializedDirective) ([]ignore, []Problem) { +func parseDirectives(dirs []runner.SerializedDirective) ([]ignore, []problem) { var ignores []ignore - var problems []Problem + var problems []problem for _, dir := range dirs { cmd := dir.Command @@ -17,13 +16,13 @@ func parseDirectives(dirs []facts.SerializedDirective) ([]ignore, []Problem) { switch cmd { case "ignore", "file-ignore": if len(args) < 2 { - p := Problem{ + p := problem{ Diagnostic: runner.Diagnostic{ Position: dir.NodePosition, Message: "malformed linter directive; missing the required reason field?", Category: "compile", }, - Severity: Error, + Severity: severityError, } problems = append(problems, p) continue diff --git a/lint/lintutil/format/format.go b/lintcmd/format.go similarity index 77% rename from lint/lintutil/format/format.go rename to lintcmd/format.go index 60aa6165c..f3d95345a 100644 --- a/lint/lintutil/format/format.go +++ b/lintcmd/format.go @@ -1,5 +1,4 @@ -// Package format provides formatters for linter problems. -package format +package lintcmd import ( "encoding/json" @@ -9,8 +8,6 @@ import ( "os" "path/filepath" "text/tabwriter" - - "honnef.co/go/tools/lint" ) func shortPath(path string) string { @@ -38,50 +35,38 @@ func relativePositionString(pos token.Position) string { return s } -type Statter interface { +type statter interface { Stats(total, errors, warnings, ignored int) } -type Formatter interface { - Format(p lint.Problem) +type formatter interface { + Format(p problem) } -type DocumentationMentioner interface { +type documentationMentioner interface { MentionCheckDocumentation(cmd string) } -type Text struct { +type textFormatter struct { W io.Writer } -func (o Text) Format(p lint.Problem) { +func (o textFormatter) Format(p problem) { fmt.Fprintf(o.W, "%s: %s\n", relativePositionString(p.Position), p.String()) for _, r := range p.Related { fmt.Fprintf(o.W, "\t%s: %s\n", relativePositionString(r.Position), r.Message) } } -func (o Text) MentionCheckDocumentation(cmd string) { +func (o textFormatter) MentionCheckDocumentation(cmd string) { fmt.Fprintf(o.W, "\nRun '%s -explain ' or visit https://staticcheck.io/docs/checks for documentation on checks.\n", cmd) } -type JSON struct { +type jsonFormatter struct { W io.Writer } -func severity(s lint.Severity) string { - switch s { - case lint.Error: - return "error" - case lint.Warning: - return "warning" - case lint.Ignored: - return "ignored" - } - return "" -} - -func (o JSON) Format(p lint.Problem) { +func (o jsonFormatter) Format(p problem) { type location struct { File string `json:"file"` Line int `json:"line"` @@ -101,7 +86,7 @@ func (o JSON) Format(p lint.Problem) { Related []related `json:"related,omitempty"` }{ Code: p.Category, - Severity: severity(p.Severity), + Severity: p.Severity.String(), Location: location{ File: p.Position.Filename, Line: p.Position.Line, @@ -132,14 +117,14 @@ func (o JSON) Format(p lint.Problem) { _ = json.NewEncoder(o.W).Encode(jp) } -type Stylish struct { +type stylishFormatter struct { W io.Writer prevFile string tw *tabwriter.Writer } -func (o *Stylish) Format(p lint.Problem) { +func (o *stylishFormatter) Format(p problem) { pos := p.Position if pos.Filename == "" { pos.Filename = "-" @@ -160,11 +145,11 @@ func (o *Stylish) Format(p lint.Problem) { } } -func (o *Stylish) MentionCheckDocumentation(cmd string) { - Text{W: o.W}.MentionCheckDocumentation(cmd) +func (o *stylishFormatter) MentionCheckDocumentation(cmd string) { + textFormatter{W: o.W}.MentionCheckDocumentation(cmd) } -func (o *Stylish) Stats(total, errors, warnings, ignored int) { +func (o *stylishFormatter) Stats(total, errors, warnings, ignored int) { if o.tw != nil { o.tw.Flush() fmt.Fprintln(o.W) diff --git a/lint/lint_test.go b/lintcmd/lint_test.go similarity index 93% rename from lint/lint_test.go rename to lintcmd/lint_test.go index 08be73b30..1b302e77f 100644 --- a/lint/lint_test.go +++ b/lintcmd/lint_test.go @@ -1,4 +1,4 @@ -package lint +package lintcmd import ( "go/token" @@ -9,9 +9,10 @@ import ( "strings" "testing" - "golang.org/x/tools/go/packages" "honnef.co/go/tools/config" - "honnef.co/go/tools/runner" + "honnef.co/go/tools/lintcmd/runner" + + "golang.org/x/tools/go/packages" ) func testdata() string { @@ -22,8 +23,8 @@ func testdata() string { return testdata } -func lintPackage(t *testing.T, name string) []Problem { - l, err := NewLinter(config.Config{}) +func lintPackage(t *testing.T, name string) []problem { + l, err := newLinter(config.Config{}) if err != nil { t.Fatal(err) } @@ -67,7 +68,7 @@ func TestErrors(t *testing.T) { t.Fatalf("got %d problems, want 1", len(ps)) } trimPosition(&ps[0].Position) - want := Problem{ + want := problem{ Diagnostic: runner.Diagnostic{ Position: token.Position{ Filename: "broken_typeerror/pkg.go", @@ -99,7 +100,7 @@ func TestErrors(t *testing.T) { } trimPosition(&ps[0].Position) - want := Problem{ + want := problem{ Diagnostic: runner.Diagnostic{ Position: token.Position{ Filename: "broken_parse/pkg.go", diff --git a/runner/runner.go b/lintcmd/runner/runner.go similarity index 97% rename from runner/runner.go rename to lintcmd/runner/runner.go index add837dfb..b3c245a43 100644 --- a/runner/runner.go +++ b/lintcmd/runner/runner.go @@ -126,12 +126,12 @@ import ( "sync/atomic" "time" + "honnef.co/go/tools/analysis/lint" + "honnef.co/go/tools/analysis/report" "honnef.co/go/tools/config" - "honnef.co/go/tools/facts" + "honnef.co/go/tools/go/loader" "honnef.co/go/tools/internal/cache" tsync "honnef.co/go/tools/internal/sync" - "honnef.co/go/tools/loader" - "honnef.co/go/tools/report" "honnef.co/go/tools/unused" "golang.org/x/tools/go/analysis" @@ -183,8 +183,26 @@ type Result struct { results string } +type SerializedDirective struct { + Command string + Arguments []string + // The position of the comment + DirectivePosition token.Position + // The position of the node that the comment is attached to + NodePosition token.Position +} + +func serializeDirective(dir lint.Directive, fset *token.FileSet) SerializedDirective { + return SerializedDirective{ + Command: dir.Command, + Arguments: dir.Arguments, + DirectivePosition: report.DisplayPosition(fset, dir.Directive.Pos()), + NodePosition: report.DisplayPosition(fset, dir.Node.Pos()), + } +} + type ResultData struct { - Directives []facts.SerializedDirective + Directives []SerializedDirective Diagnostics []Diagnostic Unused unused.SerializedResult } @@ -543,9 +561,9 @@ func (r *subrunner) do(act action) error { } var out ResultData - out.Directives = make([]facts.SerializedDirective, len(result.dirs)) + out.Directives = make([]SerializedDirective, len(result.dirs)) for i, dir := range result.dirs { - out.Directives[i] = facts.SerializeDirective(dir, result.lpkg.Fset) + out.Directives[i] = serializeDirective(dir, result.lpkg.Fset) } out.Diagnostics = result.diags @@ -597,7 +615,7 @@ type packageActionResult struct { facts []gobFact diags []Diagnostic unused unused.SerializedResult - dirs []facts.Directive + dirs []lint.Directive lpkg *loader.Package skipped bool } @@ -629,9 +647,9 @@ func (r *subrunner) doUncached(a *packageAction) (packageActionResult, error) { // OPT(dh): instead of parsing directives twice (twice because // U1000 depends on the facts.Directives analyzer), reuse the // existing result - var dirs []facts.Directive + var dirs []lint.Directive if !a.factsOnly { - dirs = facts.ParseDirectives(pkg.Syntax, pkg.Fset) + dirs = lint.ParseDirectives(pkg.Syntax, pkg.Fset) } res, err := r.runAnalyzers(a, pkg) diff --git a/runner/stats.go b/lintcmd/runner/stats.go similarity index 98% rename from runner/stats.go rename to lintcmd/runner/stats.go index ce3d589df..fd7da2fa4 100644 --- a/runner/stats.go +++ b/lintcmd/runner/stats.go @@ -4,8 +4,9 @@ import ( "sync/atomic" "time" + "honnef.co/go/tools/go/loader" + "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/loader" ) const ( diff --git a/lint/lintutil/stats.go b/lintcmd/stats.go similarity index 88% rename from lint/lintutil/stats.go rename to lintcmd/stats.go index ba8caf0af..198ef0ba7 100644 --- a/lint/lintutil/stats.go +++ b/lintcmd/stats.go @@ -1,6 +1,6 @@ // +build !aix,!android,!darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd,!solaris -package lintutil +package lintcmd import "os" diff --git a/lint/lintutil/stats_bsd.go b/lintcmd/stats_bsd.go similarity index 88% rename from lint/lintutil/stats_bsd.go rename to lintcmd/stats_bsd.go index 3a62ede03..0395a36d3 100644 --- a/lint/lintutil/stats_bsd.go +++ b/lintcmd/stats_bsd.go @@ -1,6 +1,6 @@ // +build darwin dragonfly freebsd netbsd openbsd -package lintutil +package lintcmd import ( "os" diff --git a/lint/lintutil/stats_posix.go b/lintcmd/stats_posix.go similarity index 87% rename from lint/lintutil/stats_posix.go rename to lintcmd/stats_posix.go index 53f21c666..1737692b2 100644 --- a/lint/lintutil/stats_posix.go +++ b/lintcmd/stats_posix.go @@ -1,6 +1,6 @@ // +build aix android linux solaris -package lintutil +package lintcmd import ( "os" diff --git a/lint/testdata/src/Test/file-ignores.go b/lintcmd/testdata/src/Test/file-ignores.go similarity index 100% rename from lint/testdata/src/Test/file-ignores.go rename to lintcmd/testdata/src/Test/file-ignores.go diff --git a/lint/testdata/src/Test/line-ignores.go b/lintcmd/testdata/src/Test/line-ignores.go similarity index 100% rename from lint/testdata/src/Test/line-ignores.go rename to lintcmd/testdata/src/Test/line-ignores.go diff --git a/lint/testdata/src/broken_dep/pkg.go b/lintcmd/testdata/src/broken_dep/pkg.go similarity index 100% rename from lint/testdata/src/broken_dep/pkg.go rename to lintcmd/testdata/src/broken_dep/pkg.go diff --git a/lint/testdata/src/broken_parse/pkg.go b/lintcmd/testdata/src/broken_parse/pkg.go similarity index 100% rename from lint/testdata/src/broken_parse/pkg.go rename to lintcmd/testdata/src/broken_parse/pkg.go diff --git a/lint/testdata/src/broken_pkgerror/broken.go b/lintcmd/testdata/src/broken_pkgerror/broken.go similarity index 100% rename from lint/testdata/src/broken_pkgerror/broken.go rename to lintcmd/testdata/src/broken_pkgerror/broken.go diff --git a/lint/testdata/src/broken_typeerror/pkg.go b/lintcmd/testdata/src/broken_typeerror/pkg.go similarity index 100% rename from lint/testdata/src/broken_typeerror/pkg.go rename to lintcmd/testdata/src/broken_typeerror/pkg.go diff --git a/version/buildinfo.go b/lintcmd/version/buildinfo.go similarity index 100% rename from version/buildinfo.go rename to lintcmd/version/buildinfo.go diff --git a/version/buildinfo111.go b/lintcmd/version/buildinfo111.go similarity index 100% rename from version/buildinfo111.go rename to lintcmd/version/buildinfo111.go diff --git a/version/version.go b/lintcmd/version/version.go similarity index 100% rename from version/version.go rename to lintcmd/version/version.go diff --git a/pattern/match.go b/pattern/match.go index 0c42178f7..f0fda0619 100644 --- a/pattern/match.go +++ b/pattern/match.go @@ -6,8 +6,6 @@ import ( "go/token" "go/types" "reflect" - - "honnef.co/go/tools/code" ) var tokensByString = map[string]Token{ @@ -452,7 +450,8 @@ func (fn Function) Match(m *Matcher, node interface{}) (interface{}, bool) { obj = m.TypesInfo.ObjectOf(node) switch obj := obj.(type) { case *types.Func: - name = code.FuncName(obj) + // OPT(dh): optimize this similar to code.FuncName + name = obj.FullName() case *types.Builtin: name = obj.Name() default: @@ -464,7 +463,8 @@ func (fn Function) Match(m *Matcher, node interface{}) (interface{}, bool) { if !ok { return nil, false } - name = code.FuncName(obj.(*types.Func)) + // OPT(dh): optimize this similar to code.FuncName + name = obj.(*types.Func).FullName() default: return nil, false } diff --git a/simple/analysis.go b/simple/analysis.go index 9f554c310..bf8f31942 100644 --- a/simple/analysis.go +++ b/simple/analysis.go @@ -3,12 +3,12 @@ package simple import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "honnef.co/go/tools/facts" + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/internal/passes/buildir" - "honnef.co/go/tools/lint/lintutil" ) -var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ +var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ "S1000": { Run: CheckSingleCaseSelect, Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, diff --git a/simple/doc.go b/simple/doc.go index de0edc5bf..dce35e332 100644 --- a/simple/doc.go +++ b/simple/doc.go @@ -1,6 +1,6 @@ package simple -import "honnef.co/go/tools/lint" +import "honnef.co/go/tools/analysis/lint" var Docs = map[string]*lint.Documentation{ "S1000": { diff --git a/simple/lint.go b/simple/lint.go index 9d1455c87..0ade9bff8 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -1,5 +1,5 @@ // Package simple contains a linter for Go source code. -package simple // import "honnef.co/go/tools/simple" +package simple import ( "fmt" @@ -14,14 +14,14 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/types/typeutil" - . "honnef.co/go/tools/arg" - "honnef.co/go/tools/code" - "honnef.co/go/tools/edit" + "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/analysis/edit" + "honnef.co/go/tools/analysis/lint" + "honnef.co/go/tools/analysis/report" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/internal/sharedcheck" - . "honnef.co/go/tools/lint/lintdsl" + "honnef.co/go/tools/knowledge" "honnef.co/go/tools/pattern" - "honnef.co/go/tools/report" ) var ( @@ -40,10 +40,10 @@ var ( func CheckSingleCaseSelect(pass *analysis.Pass) (interface{}, error) { seen := map[ast.Node]struct{}{} fn := func(node ast.Node) { - if m, ok := Match(pass, checkSingleCaseSelectQ1, node); ok { + if m, ok := code.Match(pass, checkSingleCaseSelectQ1, node); ok { seen[m.State["select"].(ast.Node)] = struct{}{} report.Report(pass, node, "should use for range instead of for { select {} }", report.FilterGenerated()) - } else if _, ok := Match(pass, checkSingleCaseSelectQ2, node); ok { + } else if _, ok := code.Match(pass, checkSingleCaseSelectQ2, node); ok { if _, ok := seen[node]; !ok { report.Report(pass, node, "should use a simple channel send/receive instead of select with a single case", report.ShortRange(), @@ -75,7 +75,7 @@ var ( func CheckLoopCopy(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - m, edits, ok := MatchAndEdit(pass, checkLoopCopyQ, checkLoopCopyR, node) + m, edits, ok := code.MatchAndEdit(pass, checkLoopCopyQ, checkLoopCopyR, node) if !ok { return } @@ -161,7 +161,7 @@ func CheckBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { return nil, nil } fn := func(node ast.Node, stack []ast.Node) { - m, ok := Match(pass, checkBytesBufferConversionsQ, node) + m, ok := code.Match(pass, checkBytesBufferConversionsQ, node) if !ok { return } @@ -288,7 +288,7 @@ func CheckBytesCompare(pass *analysis.Pass) (interface{}, error) { return nil, nil } fn := func(node ast.Node) { - m, ok := Match(pass, checkBytesCompareQ, node) + m, ok := code.Match(pass, checkBytesCompareQ, node) if !ok { return } @@ -341,7 +341,7 @@ func CheckRegexpRaw(pass *analysis.Pass) (interface{}, error) { if !ok { return } - lit, ok := call.Args[Arg("regexp.Compile.expr")].(*ast.BasicLit) + lit, ok := call.Args[knowledge.Arg("regexp.Compile.expr")].(*ast.BasicLit) if !ok { // TODO(dominikh): support string concat, maybe support constants return @@ -404,11 +404,11 @@ func CheckIfReturn(pass *analysis.Pass) (interface{}, error) { return } } - m1, ok := Match(pass, checkIfReturnQIf, n1) + m1, ok := code.Match(pass, checkIfReturnQIf, n1) if !ok { return } - m2, ok := Match(pass, checkIfReturnQRet, n2) + m2, ok := code.Match(pass, checkIfReturnQRet, n2) if !ok { return } @@ -553,7 +553,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { if !ok || yxFun.Name != "len" || len(yx.Args) != 1 { return } - yxArg, ok := yx.Args[Arg("len.v")].(*ast.Ident) + yxArg, ok := yx.Args[knowledge.Arg("len.v")].(*ast.Ident) if !ok { return } @@ -616,7 +616,7 @@ var checkSlicingQ = pattern.MustParse(`(SliceExpr x@(Object _) low (CallExpr (Bu func CheckSlicing(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if _, ok := Match(pass, checkSlicingQ, node); ok { + if _, ok := code.Match(pass, checkSlicingQ, node); ok { expr := node.(*ast.SliceExpr) report.Report(pass, expr.High, "should omit second index in slice, s[a:len(s)] is identical to s[a:]", @@ -655,7 +655,7 @@ var checkLoopAppendQ = pattern.MustParse(` func CheckLoopAppend(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - m, ok := Match(pass, checkLoopAppendQ, node) + m, ok := code.Match(pass, checkLoopAppendQ, node) if !ok { return } @@ -702,7 +702,7 @@ var ( func CheckTimeSince(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if _, edits, ok := MatchAndEdit(pass, checkTimeSinceQ, checkTimeSinceR, node); ok { + if _, edits, ok := code.MatchAndEdit(pass, checkTimeSinceQ, checkTimeSinceR, node); ok { report.Report(pass, node, "should use time.Since instead of time.Now().Sub", report.FilterGenerated(), report.Fixes(edit.Fix("replace with call to time.Since", edits...))) @@ -722,7 +722,7 @@ func CheckTimeUntil(pass *analysis.Pass) (interface{}, error) { return nil, nil } fn := func(node ast.Node) { - if _, ok := Match(pass, checkTimeUntilQ, node); ok { + if _, ok := code.Match(pass, checkTimeUntilQ, node); ok { if sel, ok := node.(*ast.CallExpr).Fun.(*ast.SelectorExpr); ok { r := pattern.NodeToAST(checkTimeUntilR.Root, map[string]interface{}{"arg": sel.X}).(ast.Node) report.Report(pass, node, "should use time.Until instead of t.Sub(time.Now())", @@ -752,13 +752,13 @@ var ( func CheckUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { fn1 := func(node ast.Node) { - if _, ok := Match(pass, checkUnnecessaryBlankQ1, node); ok { + if _, ok := code.Match(pass, checkUnnecessaryBlankQ1, node); ok { r := *node.(*ast.AssignStmt) r.Lhs = r.Lhs[0:1] report.Report(pass, node, "unnecessary assignment to the blank identifier", report.FilterGenerated(), report.Fixes(edit.Fix("remove assignment to blank identifier", edit.ReplaceWithNode(pass.Fset, node, &r)))) - } else if m, ok := Match(pass, checkUnnecessaryBlankQ2, node); ok { + } else if m, ok := code.Match(pass, checkUnnecessaryBlankQ2, node); ok { report.Report(pass, node, "unnecessary assignment to the blank identifier", report.FilterGenerated(), report.Fixes(edit.Fix("simplify channel receive operation", edit.ReplaceWithNode(pass.Fset, node, m.State["recv"].(ast.Node))))) @@ -945,7 +945,7 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) { if len(call.Args) != 1 { return false } - return sameNonDynamic(call.Args[Arg("len.v")], ident) + return sameNonDynamic(call.Args[knowledge.Arg("len.v")], ident) } fn := func(node ast.Node) { @@ -1062,7 +1062,7 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) { if len(index.Args) != 1 { return } - id3 := index.Args[Arg("len.v")] + id3 := index.Args[knowledge.Arg("len.v")] switch oid3 := condCall.Args[1].(type) { case *ast.BasicLit: if pkg != "strings" { @@ -1150,7 +1150,7 @@ func CheckLoopSlide(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.ForStmt) - m, edits, ok := MatchAndEdit(pass, checkLoopSlideQ, checkLoopSlideR, loop) + m, edits, ok := code.MatchAndEdit(pass, checkLoopSlideQ, checkLoopSlideR, loop) if !ok { return } @@ -1178,14 +1178,14 @@ func CheckMakeLenCap(pass *analysis.Pass) (interface{}, error) { // special case of runtime tests testing map creation return } - if m, ok := Match(pass, checkMakeLenCapQ1, node); ok { + if m, ok := code.Match(pass, checkMakeLenCapQ1, node); ok { T := m.State["typ"].(ast.Expr) size := m.State["size"].(ast.Node) if _, ok := pass.TypesInfo.TypeOf(T).Underlying().(*types.Slice); ok { return } report.Report(pass, size, fmt.Sprintf("should use make(%s) instead", report.Render(pass, T)), report.FilterGenerated()) - } else if m, ok := Match(pass, checkMakeLenCapQ2, node); ok { + } else if m, ok := code.Match(pass, checkMakeLenCapQ2, node); ok { // TODO(dh): don't consider sizes identical if they're // dynamic. for example: make(T, <-ch, <-ch). T := m.State["typ"].(ast.Expr) @@ -1224,7 +1224,7 @@ var ( func CheckAssertNotNil(pass *analysis.Pass) (interface{}, error) { fn1 := func(node ast.Node) { - m, ok := Match(pass, checkAssertNotNilFn1Q, node) + m, ok := code.Match(pass, checkAssertNotNilFn1Q, node) if !ok { return } @@ -1235,7 +1235,7 @@ func CheckAssertNotNil(pass *analysis.Pass) (interface{}, error) { report.FilterGenerated()) } fn2 := func(node ast.Node) { - m, ok := Match(pass, checkAssertNotNilFn2Q, node) + m, ok := code.Match(pass, checkAssertNotNilFn2Q, node) if !ok { return } @@ -1359,7 +1359,7 @@ func CheckRedundantBreak(pass *analysis.Pass) (interface{}, error) { ret = x.Type.Results body = x.Body default: - ExhaustiveTypeSwitch(node) + lint.ExhaustiveTypeSwitch(node) } // if the func has results, a return can't be redundant. // similarly, if there are no statements, there can be @@ -1408,7 +1408,7 @@ var checkRedundantSprintfQ = pattern.MustParse(`(CallExpr (Function "fmt.Sprintf func CheckRedundantSprintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - m, ok := Match(pass, checkRedundantSprintfQ, node) + m, ok := code.Match(pass, checkRedundantSprintfQ, node) if !ok { return } @@ -1460,7 +1460,7 @@ var ( func CheckErrorsNewSprintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if _, edits, ok := MatchAndEdit(pass, checkErrorsNewSprintfQ, checkErrorsNewSprintfR, node); ok { + if _, edits, ok := code.MatchAndEdit(pass, checkErrorsNewSprintfQ, checkErrorsNewSprintfR, node); ok { // TODO(dh): the suggested fix may leave an unused import behind report.Report(pass, node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))", report.FilterGenerated(), @@ -1484,7 +1484,7 @@ var checkNilCheckAroundRangeQ = pattern.MustParse(` func CheckNilCheckAroundRange(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - m, ok := Match(pass, checkNilCheckAroundRangeQ, node) + m, ok := code.Match(pass, checkNilCheckAroundRangeQ, node) if !ok { return } @@ -1535,7 +1535,7 @@ func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) { case *ast.FuncDecl: body = node.Body default: - ExhaustiveTypeSwitch(node) + lint.ExhaustiveTypeSwitch(node) } if body == nil { return @@ -1555,7 +1555,7 @@ func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) { return false } call := node.(*ast.CallExpr) - typeconv := call.Args[Arg("sort.Sort.data")].(*ast.CallExpr) + typeconv := call.Args[knowledge.Arg("sort.Sort.data")].(*ast.CallExpr) sel := typeconv.Fun.(*ast.SelectorExpr) name := code.SelectorName(pass, sel) @@ -1603,7 +1603,7 @@ var checkGuardedDeleteQ = pattern.MustParse(` func CheckGuardedDelete(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if m, ok := Match(pass, checkGuardedDeleteQ, node); ok { + if m, ok := code.Match(pass, checkGuardedDeleteQ, node); ok { report.Report(pass, node, "unnecessary guard around call to delete", report.ShortRange(), report.FilterGenerated(), @@ -1626,7 +1626,7 @@ var ( func CheckSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - m, ok := Match(pass, checkSimplifyTypeSwitchQ, node) + m, ok := code.Match(pass, checkSimplifyTypeSwitchQ, node) if !ok { return } @@ -1745,7 +1745,7 @@ var checkUnnecessaryGuardQ = pattern.MustParse(` func CheckUnnecessaryGuard(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if m, ok := Match(pass, checkUnnecessaryGuardQ, node); ok { + if m, ok := code.Match(pass, checkUnnecessaryGuardQ, node); ok { if code.MayHaveSideEffects(pass, m.State["indexexpr"].(ast.Expr), nil) { return } @@ -1765,7 +1765,7 @@ var ( func CheckElaborateSleep(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if m, ok := Match(pass, checkElaborateSleepQ, node); ok { + if m, ok := code.Match(pass, checkElaborateSleepQ, node); ok { if body, ok := m.State["body"].([]ast.Stmt); ok && len(body) == 0 { report.Report(pass, node, "should use time.Sleep instead of elaborate way of sleeping", report.ShortRange(), @@ -1801,7 +1801,7 @@ var checkPrintSprintQ = pattern.MustParse(` func CheckPrintSprintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - m, ok := Match(pass, checkPrintSprintQ, node) + m, ok := code.Match(pass, checkPrintSprintQ, node) if !ok { return } @@ -1844,7 +1844,7 @@ func CheckSprintLiteral(pass *analysis.Pass) (interface{}, error) { // for copying strings, which may be useful when extracing a small // substring from a large string. fn := func(node ast.Node) { - m, ok := Match(pass, checkSprintLiteralQ, node) + m, ok := code.Match(pass, checkSprintLiteralQ, node) if !ok { return } diff --git a/staticcheck.conf b/staticcheck.conf deleted file mode 100644 index 56ae61df9..000000000 --- a/staticcheck.conf +++ /dev/null @@ -1 +0,0 @@ -dot_import_whitelist = ["honnef.co/go/tools/lint/lintdsl", "honnef.co/go/tools/arg"] diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index 6590312d2..37f973388 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -1,9 +1,9 @@ package staticcheck import ( - "honnef.co/go/tools/facts" + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/internal/passes/buildir" - "honnef.co/go/tools/lint/lintutil" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -18,7 +18,7 @@ func makeCallCheckerAnalyzer(rules map[string]CallCheck, extraReqs ...*analysis. } } -var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ +var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ "SA1000": makeCallCheckerAnalyzer(checkRegexpRules), "SA1001": { Run: CheckTemplate, diff --git a/staticcheck/buildtag.go b/staticcheck/buildtag.go index 58e1e4ae1..0ed3a93a6 100644 --- a/staticcheck/buildtag.go +++ b/staticcheck/buildtag.go @@ -4,7 +4,7 @@ import ( "go/ast" "strings" - "honnef.co/go/tools/code" + "honnef.co/go/tools/analysis/code" ) func buildTags(f *ast.File) [][]string { diff --git a/staticcheck/doc.go b/staticcheck/doc.go index 17d28ad60..911b00001 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -1,6 +1,6 @@ package staticcheck -import "honnef.co/go/tools/lint" +import "honnef.co/go/tools/analysis/lint" var Docs = map[string]*lint.Documentation{ "SA1000": { diff --git a/staticcheck/lint.go b/staticcheck/lint.go index fc9863708..a344c36c3 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -1,5 +1,5 @@ // Package staticcheck contains a linter for Go source code. -package staticcheck // import "honnef.co/go/tools/staticcheck" +package staticcheck import ( "fmt" @@ -18,20 +18,18 @@ import ( texttemplate "text/template" "unicode" - . "honnef.co/go/tools/arg" - "honnef.co/go/tools/code" - "honnef.co/go/tools/deprecated" - "honnef.co/go/tools/edit" - "honnef.co/go/tools/facts" - "honnef.co/go/tools/functions" + "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/analysis/edit" + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/analysis/lint" + "honnef.co/go/tools/analysis/report" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/internal/sharedcheck" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/ir/irutil" - . "honnef.co/go/tools/lint/lintdsl" + "honnef.co/go/tools/knowledge" "honnef.co/go/tools/pattern" "honnef.co/go/tools/printf" - "honnef.co/go/tools/report" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -122,7 +120,7 @@ var ( checkTimeParseRules = map[string]CallCheck{ "time.Parse": func(call *Call) { - arg := call.Args[Arg("time.Parse.layout")] + arg := call.Args[knowledge.Arg("time.Parse.layout")] err := ValidateTimeLayout(arg.Value) if err != nil { arg.Invalid(err.Error()) @@ -132,7 +130,7 @@ var ( checkEncodingBinaryRules = map[string]CallCheck{ "encoding/binary.Write": func(call *Call) { - arg := call.Args[Arg("encoding/binary.Write.data")] + arg := call.Args[knowledge.Arg("encoding/binary.Write.data")] if !CanBinaryMarshal(call.Pass, arg.Value) { arg.Invalid(fmt.Sprintf("value of type %s cannot be used with binary.Write", arg.Value.Value.Type())) } @@ -141,7 +139,7 @@ var ( checkURLsRules = map[string]CallCheck{ "net/url.Parse": func(call *Call) { - arg := call.Args[Arg("net/url.Parse.rawurl")] + arg := call.Args[knowledge.Arg("net/url.Parse.rawurl")] err := ValidateURL(arg.Value) if err != nil { arg.Invalid(err.Error()) @@ -151,7 +149,7 @@ var ( checkSyncPoolValueRules = map[string]CallCheck{ "(*sync.Pool).Put": func(call *Call) { - arg := call.Args[Arg("(*sync.Pool).Put.x")] + arg := call.Args[knowledge.Arg("(*sync.Pool).Put.x")] typ := arg.Value.Value.Type() if !code.IsPointerLike(typ) { arg.Invalid("argument should be pointer-like to avoid allocations") @@ -195,7 +193,7 @@ var ( checkUnbufferedSignalChanRules = map[string]CallCheck{ "os/signal.Notify": func(call *Call) { - arg := call.Args[Arg("os/signal.Notify.c")] + arg := call.Args[knowledge.Arg("os/signal.Notify.c")] if UnbufferedChannel(arg.Value) { arg.Invalid("the channel used with signal.Notify should be buffered") } @@ -222,8 +220,8 @@ var ( checkBytesEqualIPRules = map[string]CallCheck{ "bytes.Equal": func(call *Call) { - if ConvertedFrom(call.Args[Arg("bytes.Equal.a")].Value, "net.IP") && - ConvertedFrom(call.Args[Arg("bytes.Equal.b")].Value, "net.IP") { + if ConvertedFrom(call.Args[knowledge.Arg("bytes.Equal.a")].Value, "net.IP") && + ConvertedFrom(call.Args[knowledge.Arg("bytes.Equal.b")].Value, "net.IP") { call.Invalid("use net.IP.Equal to compare net.IPs, not bytes.Equal") } }, @@ -241,22 +239,22 @@ var ( // Let's see if we encounter any false positives. // // Also, should we flag gob? - "encoding/json.Marshal": checkNoopMarshalImpl(Arg("json.Marshal.v"), "MarshalJSON", "MarshalText"), - "encoding/xml.Marshal": checkNoopMarshalImpl(Arg("xml.Marshal.v"), "MarshalXML", "MarshalText"), - "(*encoding/json.Encoder).Encode": checkNoopMarshalImpl(Arg("(*encoding/json.Encoder).Encode.v"), "MarshalJSON", "MarshalText"), - "(*encoding/xml.Encoder).Encode": checkNoopMarshalImpl(Arg("(*encoding/xml.Encoder).Encode.v"), "MarshalXML", "MarshalText"), + "encoding/json.Marshal": checkNoopMarshalImpl(knowledge.Arg("json.Marshal.v"), "MarshalJSON", "MarshalText"), + "encoding/xml.Marshal": checkNoopMarshalImpl(knowledge.Arg("xml.Marshal.v"), "MarshalXML", "MarshalText"), + "(*encoding/json.Encoder).Encode": checkNoopMarshalImpl(knowledge.Arg("(*encoding/json.Encoder).Encode.v"), "MarshalJSON", "MarshalText"), + "(*encoding/xml.Encoder).Encode": checkNoopMarshalImpl(knowledge.Arg("(*encoding/xml.Encoder).Encode.v"), "MarshalXML", "MarshalText"), - "encoding/json.Unmarshal": checkNoopMarshalImpl(Arg("json.Unmarshal.v"), "UnmarshalJSON", "UnmarshalText"), - "encoding/xml.Unmarshal": checkNoopMarshalImpl(Arg("xml.Unmarshal.v"), "UnmarshalXML", "UnmarshalText"), - "(*encoding/json.Decoder).Decode": checkNoopMarshalImpl(Arg("(*encoding/json.Decoder).Decode.v"), "UnmarshalJSON", "UnmarshalText"), - "(*encoding/xml.Decoder).Decode": checkNoopMarshalImpl(Arg("(*encoding/xml.Decoder).Decode.v"), "UnmarshalXML", "UnmarshalText"), + "encoding/json.Unmarshal": checkNoopMarshalImpl(knowledge.Arg("json.Unmarshal.v"), "UnmarshalJSON", "UnmarshalText"), + "encoding/xml.Unmarshal": checkNoopMarshalImpl(knowledge.Arg("xml.Unmarshal.v"), "UnmarshalXML", "UnmarshalText"), + "(*encoding/json.Decoder).Decode": checkNoopMarshalImpl(knowledge.Arg("(*encoding/json.Decoder).Decode.v"), "UnmarshalJSON", "UnmarshalText"), + "(*encoding/xml.Decoder).Decode": checkNoopMarshalImpl(knowledge.Arg("(*encoding/xml.Decoder).Decode.v"), "UnmarshalXML", "UnmarshalText"), } checkUnsupportedMarshal = map[string]CallCheck{ - "encoding/json.Marshal": checkUnsupportedMarshalImpl(Arg("json.Marshal.v"), "json", "MarshalJSON", "MarshalText"), - "encoding/xml.Marshal": checkUnsupportedMarshalImpl(Arg("xml.Marshal.v"), "xml", "MarshalXML", "MarshalText"), - "(*encoding/json.Encoder).Encode": checkUnsupportedMarshalImpl(Arg("(*encoding/json.Encoder).Encode.v"), "json", "MarshalJSON", "MarshalText"), - "(*encoding/xml.Encoder).Encode": checkUnsupportedMarshalImpl(Arg("(*encoding/xml.Encoder).Encode.v"), "xml", "MarshalXML", "MarshalText"), + "encoding/json.Marshal": checkUnsupportedMarshalImpl(knowledge.Arg("json.Marshal.v"), "json", "MarshalJSON", "MarshalText"), + "encoding/xml.Marshal": checkUnsupportedMarshalImpl(knowledge.Arg("xml.Marshal.v"), "xml", "MarshalXML", "MarshalText"), + "(*encoding/json.Encoder).Encode": checkUnsupportedMarshalImpl(knowledge.Arg("(*encoding/json.Encoder).Encode.v"), "json", "MarshalJSON", "MarshalText"), + "(*encoding/xml.Encoder).Encode": checkUnsupportedMarshalImpl(knowledge.Arg("(*encoding/xml.Encoder).Encode.v"), "xml", "MarshalXML", "MarshalText"), } checkAtomicAlignment = map[string]CallCheck{ @@ -818,7 +816,7 @@ func fieldPath(start types.Type, indices []int) string { } func isInLoop(b *ir.BasicBlock) bool { - sets := functions.FindLoops(b.Parent()) + sets := code.FindLoops(b.Parent()) for _, set := range sets { if set.Has(b) { return true @@ -858,7 +856,7 @@ func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { nargs := make([]ast.Expr, len(call.Args)) for j, a := range call.Args { if i == j { - nargs[j] = Selector("syscall", "SIGTERM") + nargs[j] = edit.Selector("syscall", "SIGTERM") } else { nargs[j] = a } @@ -918,7 +916,7 @@ func CheckTemplate(pass *analysis.Pass) (interface{}, error) { // template comes from and where it has been return } - s, ok := code.ExprToString(pass, call.Args[Arg("(*text/template.Template).Parse.text")]) + s, ok := code.ExprToString(pass, call.Args[knowledge.Arg("(*text/template.Template).Parse.text")]) if !ok { return } @@ -932,7 +930,7 @@ func CheckTemplate(pass *analysis.Pass) (interface{}, error) { if err != nil { // TODO(dominikh): whitelist other parse errors, if any if strings.Contains(err.Error(), "unexpected") { - report.Report(pass, call.Args[Arg("(*text/template.Template).Parse.text")], err.Error()) + report.Report(pass, call.Args[knowledge.Arg("(*text/template.Template).Parse.text")], err.Error()) } } } @@ -951,7 +949,7 @@ func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) { if !code.IsCallToAST(pass, call, "time.Sleep") { return } - lit, ok := call.Args[Arg("time.Sleep.d")].(*ast.BasicLit) + lit, ok := call.Args[knowledge.Arg("time.Sleep.d")].(*ast.BasicLit) if !ok { return } @@ -984,7 +982,7 @@ var checkWaitgroupAddQ = pattern.MustParse(` func CheckWaitgroupAdd(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if m, ok := Match(pass, checkWaitgroupAddQ, node); ok { + if m, ok := code.Match(pass, checkWaitgroupAddQ, node); ok { call := m.State["call"].(ast.Node) report.Report(pass, call, fmt.Sprintf("should call %s before starting the goroutine to avoid a race", report.Render(pass, call))) } @@ -1163,7 +1161,7 @@ func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) { } return true default: - ExhaustiveTypeSwitch(node) + lint.ExhaustiveTypeSwitch(node) return true } } @@ -1191,14 +1189,14 @@ func CheckExec(pass *analysis.Pass) (interface{}, error) { if !code.IsCallToAST(pass, call, "os/exec.Command") { return } - val, ok := code.ExprToString(pass, call.Args[Arg("os/exec.Command.name")]) + val, ok := code.ExprToString(pass, call.Args[knowledge.Arg("os/exec.Command.name")]) if !ok { return } if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { return } - report.Report(pass, call.Args[Arg("os/exec.Command.name")], + report.Report(pass, call.Args[knowledge.Arg("os/exec.Command.name")], "first argument to exec.Command looks like a shell command, but a program name or path are expected") } code.Preorder(pass, fn, (*ast.CallExpr)(nil)) @@ -1308,7 +1306,7 @@ func CheckScopedBreak(pass *analysis.Pass) (interface{}, error) { case *ast.RangeStmt: body = node.Body default: - ExhaustiveTypeSwitch(node) + lint.ExhaustiveTypeSwitch(node) } for _, stmt := range body.List { var blocks [][]ast.Stmt @@ -1366,9 +1364,9 @@ func CheckUnsafePrintf(pass *analysis.Pass) (interface{}, error) { switch name { case "fmt.Printf", "fmt.Sprintf", "log.Printf": - arg = Arg("fmt.Printf.format") + arg = knowledge.Arg("fmt.Printf.format") case "fmt.Fprintf": - arg = Arg("fmt.Fprintf.format") + arg = knowledge.Arg("fmt.Fprintf.format") default: return } @@ -1543,11 +1541,11 @@ var ( func CheckIneffectiveCopy(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if m, ok := Match(pass, checkIneffectiveCopyQ1, node); ok { + if m, ok := code.Match(pass, checkIneffectiveCopyQ1, node); ok { if ident, ok := m.State["obj"].(*ast.Ident); !ok || !cgoIdent.MatchString(ident.Name) { report.Report(pass, node, "&*x will be simplified to x. It will not copy x.") } - } else if _, ok := Match(pass, checkIneffectiveCopyQ2, node); ok { + } else if _, ok := code.Match(pass, checkIneffectiveCopyQ2, node); ok { report.Report(pass, node, "*&x will be simplified to x. It will not copy x.") } } @@ -1596,7 +1594,7 @@ func CheckCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) { fix = edit.Fix("canonicalize header key", edit.ReplaceWithString(pass.Fset, op.Index, strconv.Quote(canonical))) case *ast.Ident: call := &ast.CallExpr{ - Fun: Selector("http", "CanonicalHeaderKey"), + Fun: edit.Selector("http", "CanonicalHeaderKey"), Args: []ast.Expr{op.Index}, } fix = edit.Fix("wrap in http.CanonicalHeaderKey", edit.ReplaceWithNode(pass.Fset, op.Index, call)) @@ -2009,7 +2007,9 @@ func CheckLoopCondition(pass *analysis.Pass) (interface{}, error) { return true } - Inspect(fn.Source(), cb) + if source := fn.Source(); source != nil { + ast.Inspect(source, cb) + } } return nil, nil } @@ -2083,7 +2083,9 @@ func CheckArgOverwritten(pass *analysis.Pass) (interface{}, error) { } return true } - Inspect(fn.Source(), cb) + if source := fn.Source(); source != nil { + ast.Inspect(source, cb) + } } return nil, nil } @@ -2105,7 +2107,7 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { case *ast.FuncLit: body = fn.Body default: - ExhaustiveTypeSwitch(node) + lint.ExhaustiveTypeSwitch(node) } if body == nil { return @@ -2201,13 +2203,13 @@ var checkNilContextQ = pattern.MustParse(`(CallExpr fun@(Function _) (Builtin "n func CheckNilContext(pass *analysis.Pass) (interface{}, error) { todo := &ast.CallExpr{ - Fun: Selector("context", "TODO"), + Fun: edit.Selector("context", "TODO"), } bg := &ast.CallExpr{ - Fun: Selector("context", "Background"), + Fun: edit.Selector("context", "Background"), } fn := func(node ast.Node) { - m, ok := Match(pass, checkNilContextQ, node) + m, ok := code.Match(pass, checkNilContextQ, node) if !ok { return } @@ -2244,7 +2246,7 @@ var ( func CheckSeeker(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if _, edits, ok := MatchAndEdit(pass, checkSeekerQ, checkSeekerR, node); ok { + if _, edits, ok := code.MatchAndEdit(pass, checkSeekerQ, checkSeekerR, node); ok { report.Report(pass, node, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead", report.Fixes(edit.Fix("swap arguments", edits...))) } @@ -2402,7 +2404,7 @@ func CheckCyclicFinalizer(pass *analysis.Pass) (interface{}, error) { if callee.RelString(nil) != "runtime.SetFinalizer" { return } - arg0 := site.Common().Args[Arg("runtime.SetFinalizer.obj")] + arg0 := site.Common().Args[knowledge.Arg("runtime.SetFinalizer.obj")] if iface, ok := arg0.(*ir.MakeInterface); ok { arg0 = iface.X } @@ -2414,7 +2416,7 @@ func CheckCyclicFinalizer(pass *analysis.Pass) (interface{}, error) { if !ok { return } - arg1 := site.Common().Args[Arg("runtime.SetFinalizer.finalizer")] + arg1 := site.Common().Args[knowledge.Arg("runtime.SetFinalizer.finalizer")] if iface, ok := arg1.(*ir.MakeInterface); ok { arg1 = iface.X } @@ -2598,7 +2600,7 @@ func CheckLeakyTimeTick(pass *analysis.Pass) (interface{}, error) { if !ok || !code.IsCallTo(call.Common(), "time.Tick") { continue } - if !functions.Terminates(call.Parent()) { + if !code.Terminates(call.Parent()) { continue } report.Report(pass, call, "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here") @@ -2612,7 +2614,7 @@ var checkDoubleNegationQ = pattern.MustParse(`(UnaryExpr "!" single@(UnaryExpr " func CheckDoubleNegation(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if m, ok := Match(pass, checkDoubleNegationQ, node); ok { + if m, ok := code.Match(pass, checkDoubleNegationQ, node); ok { report.Report(pass, node, "negating a boolean twice has no effect; is this a typo?", report.Fixes( edit.Fix("turn into single negation", edit.ReplaceWithNode(pass.Fset, node, m.State["single"].(ast.Node))), edit.Fix("remove double negation", edit.ReplaceWithNode(pass.Fset, node, m.State["x"].(ast.Node))))) @@ -2928,7 +2930,7 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { // already in 1.0, and we're targeting 1.2, it still // makes sense to use the alternative from 1.0, to be // future-proof. - minVersion := deprecated.Stdlib[code.SelectorName(pass, sel)].AlternativeAvailableSince + minVersion := knowledge.StdlibDeprecations[code.SelectorName(pass, sel)].AlternativeAvailableSince if !code.IsGoVersion(pass, minVersion) { return true } @@ -3140,7 +3142,9 @@ func CheckEmptyBranch(pass *analysis.Pass) (interface{}, error) { report.Report(pass, ifstmt, "empty branch", report.FilterGenerated(), report.ShortRange()) return true } - Inspect(fn.Source(), cb) + if source := fn.Source(); source != nil { + ast.Inspect(source, cb) + } } return nil, nil } @@ -3430,7 +3434,7 @@ var ( func CheckToLowerToUpperComparison(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - m, ok := Match(pass, checkToLowerToUpperComparisonQ, node) + m, ok := code.Match(pass, checkToLowerToUpperComparisonQ, node) if !ok { return } @@ -3522,7 +3526,7 @@ var checkSingleArgAppendQ = pattern.MustParse(`(CallExpr (Builtin "append") [_]) func CheckSingleArgAppend(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - _, ok := Match(pass, checkSingleArgAppendQ, node) + _, ok := code.Match(pass, checkSingleArgAppendQ, node) if !ok { return } @@ -3845,7 +3849,7 @@ var checkAddressIsNilQ = pattern.MustParse( func CheckAddressIsNil(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - _, ok := Match(pass, checkAddressIsNilQ, node) + _, ok := code.Match(pass, checkAddressIsNilQ, node) if !ok { return } diff --git a/staticcheck/rules.go b/staticcheck/rules.go index 57f7282de..b3200362f 100644 --- a/staticcheck/rules.go +++ b/staticcheck/rules.go @@ -13,9 +13,10 @@ import ( "time" "unicode/utf8" + "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/go/ir" + "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/code" - "honnef.co/go/tools/ir" ) const ( diff --git a/stylecheck/analysis.go b/stylecheck/analysis.go index 7d8e6f3e0..cb722e815 100644 --- a/stylecheck/analysis.go +++ b/stylecheck/analysis.go @@ -1,16 +1,16 @@ package stylecheck import ( + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/config" - "honnef.co/go/tools/facts" "honnef.co/go/tools/internal/passes/buildir" - "honnef.co/go/tools/lint/lintutil" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" ) -var Analyzers = lintutil.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ +var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ "ST1000": { Run: CheckPackageComment, }, diff --git a/stylecheck/doc.go b/stylecheck/doc.go index b8e7f3f9e..bceba59c8 100644 --- a/stylecheck/doc.go +++ b/stylecheck/doc.go @@ -1,6 +1,6 @@ package stylecheck -import "honnef.co/go/tools/lint" +import "honnef.co/go/tools/analysis/lint" var Docs = map[string]*lint.Documentation{ "ST1000": { diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 2055a3968..d8edd720e 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -1,4 +1,4 @@ -package stylecheck // import "honnef.co/go/tools/stylecheck" +package stylecheck import ( "fmt" @@ -12,14 +12,14 @@ import ( "unicode" "unicode/utf8" - "honnef.co/go/tools/code" + "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/analysis/edit" + "honnef.co/go/tools/analysis/lint" + "honnef.co/go/tools/analysis/report" "honnef.co/go/tools/config" - "honnef.co/go/tools/edit" + "honnef.co/go/tools/go/ir" "honnef.co/go/tools/internal/passes/buildir" - "honnef.co/go/tools/ir" - . "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/pattern" - "honnef.co/go/tools/report" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -651,7 +651,7 @@ var ( func CheckYodaConditions(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if _, edits, ok := MatchAndEdit(pass, checkYodaConditionsQ, checkYodaConditionsR, node); ok { + if _, edits, ok := code.MatchAndEdit(pass, checkYodaConditionsQ, checkYodaConditionsR, node); ok { report.Report(pass, node, "don't use Yoda conditions", report.FilterGenerated(), report.Fixes(edit.Fix("un-Yoda-fy", edits...))) @@ -788,7 +788,7 @@ func CheckExportedFunctionDocs(pass *analysis.Pass) (interface{}, error) { return } default: - ExhaustiveTypeSwitch(T) + lint.ExhaustiveTypeSwitch(T) } } prefix := decl.Name.Name + " " @@ -856,7 +856,7 @@ func CheckExportedTypeDocs(pass *analysis.Pass) (interface{}, error) { case *ast.FuncLit, *ast.FuncDecl: return false default: - ExhaustiveTypeSwitch(node) + lint.ExhaustiveTypeSwitch(node) return false } } @@ -907,7 +907,7 @@ func CheckExportedVarDocs(pass *analysis.Pass) (interface{}, error) { case *ast.FuncLit, *ast.FuncDecl: return false default: - ExhaustiveTypeSwitch(node) + lint.ExhaustiveTypeSwitch(node) return false } } diff --git a/stylecheck/names.go b/stylecheck/names.go index ffc689e98..594bdf1f4 100644 --- a/stylecheck/names.go +++ b/stylecheck/names.go @@ -10,10 +10,11 @@ import ( "strings" "unicode" - "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/code" + "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/analysis/report" "honnef.co/go/tools/config" - "honnef.co/go/tools/report" + + "golang.org/x/tools/go/analysis" ) // knownNameExceptions is a set of names that are known to be exempt from naming checks. diff --git a/unused/unused.go b/unused/unused.go index fc4130d73..17d840230 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -9,13 +9,15 @@ import ( "reflect" "strings" - "golang.org/x/tools/go/analysis" - "honnef.co/go/tools/code" - "honnef.co/go/tools/facts" + "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/analysis/lint" + "honnef.co/go/tools/analysis/report" + "honnef.co/go/tools/go/ir" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" - "honnef.co/go/tools/ir" - "honnef.co/go/tools/report" + + "golang.org/x/tools/go/analysis" ) var Debug io.Writer @@ -397,7 +399,7 @@ type pkg struct { TypesSizes types.Sizes IR *ir.Package SrcFuncs []*ir.Function - Directives []facts.Directive + Directives []lint.Directive } // TODO(dh): should we return a map instead of two slices? @@ -502,7 +504,7 @@ func debugf(f string, v ...interface{}) { func run(pass *analysis.Pass) (interface{}, error) { irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR) - dirs := pass.ResultOf[facts.Directives].([]facts.Directive) + dirs := pass.ResultOf[facts.Directives].([]lint.Directive) pkg := &pkg{ Fset: pass.Fset, Files: pass.Files, From e306c661babf7e7dc9f0d784f83d820d7999c20e Mon Sep 17 00:00:00 2001 From: Alexey Surikov Date: Sun, 9 Feb 2020 05:01:06 +0100 Subject: [PATCH 053/111] staticcheck: flag dubious bit shifting of fixed size integers --- staticcheck/analysis.go | 4 + staticcheck/doc.go | 23 +++++ staticcheck/lint.go | 54 ++++++++++++ staticcheck/lint_test.go | 1 + .../CheckStaticBitShift.go | 87 +++++++++++++++++++ 5 files changed, 169 insertions(+) create mode 100644 staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index 37f973388..76dcd4964 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -259,6 +259,10 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ }, // Filtering generated code because it may include empty structs generated from data models. "SA9005": makeCallCheckerAnalyzer(checkNoopMarshal, facts.Generated), + "SA9006": { + Run: CheckStaticBitShift, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + }, "SA4022": { Run: CheckAddressIsNil, diff --git a/staticcheck/doc.go b/staticcheck/doc.go index 911b00001..a9b89fa31 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -877,4 +877,27 @@ marshaling behavior, e.g. via MarshalJSON methods. It will also not flag empty structs.`, Since: "2019.2", }, + + "SA9006": { + Title: `Dubious bit shifting of a fixed size integer value`, + Text: `Bit shifting a value past its size will always clear the value. + +For instance: + + v := int8(42) + v >>= 8 + +will always result in 0. + +This check flags bit shifiting operations on fixed size integer values only. +That is, int, uint and uintptr are never flagged to avoid potential false +positives in somewhat exotic but valid bit twiddling tricks: + + // Clear any value above 32 bits if integers are more than 32 bits. + func f(i int) int { + v := i >> 32 + v = v << 32 + return i-v + }`, + }, } diff --git a/staticcheck/lint.go b/staticcheck/lint.go index a344c36c3..3603d3a72 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -3858,3 +3858,57 @@ func CheckAddressIsNil(pass *analysis.Pass) (interface{}, error) { code.Preorder(pass, fn, (*ast.BinaryExpr)(nil)) return nil, nil } + +var ( + checkFixedLengthTypeShiftQ = pattern.MustParse(` + (Or + (AssignStmt _ (Or ">>=" "<<=") _) + (BinaryExpr _ (Or ">>" "<<") _)) + `) +) + +func CheckStaticBitShift(pass *analysis.Pass) (interface{}, error) { + isDubiousShift := func(x, y ast.Expr) (int64, int64, bool) { + typ, ok := pass.TypesInfo.TypeOf(x).(*types.Basic) + if !ok { + return 0, 0, false + } + switch typ.Kind() { + case types.Int8, types.Int16, types.Int32, types.Int64, + types.Uint8, types.Uint16, types.Uint32, types.Uint64: + // We're only interested in fixed–size types. + default: + return 0, 0, false + } + + const bitsInByte = 8 + typeBits := pass.TypesSizes.Sizeof(typ) * bitsInByte + + shiftLength, ok := code.ExprToInt(pass, y) + if !ok { + return 0, 0, false + } + + return typeBits, shiftLength, shiftLength >= typeBits + } + + fn := func(node ast.Node) { + if _, ok := code.Match(pass, checkFixedLengthTypeShiftQ, node); !ok { + return + } + + switch e := node.(type) { + case *ast.AssignStmt: + if size, shift, yes := isDubiousShift(e.Lhs[0], e.Rhs[0]); yes { + report.Report(pass, e, fmt.Sprintf("shifting %d-bit value by %d bits will always clear it", size, shift)) + } + case *ast.BinaryExpr: + if size, shift, yes := isDubiousShift(e.X, e.Y); yes { + report.Report(pass, e, fmt.Sprintf("shifting %d-bit value by %d bits will always clear it", size, shift)) + } + } + } + code.Preorder(pass, fn, (*ast.AssignStmt)(nil), (*ast.BinaryExpr)(nil)) + + return nil, nil +} diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index c5ea70161..2a1615696 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -90,6 +90,7 @@ func TestAll(t *testing.T) { "SA9003": {{Dir: "CheckEmptyBranch"}}, "SA9004": {{Dir: "CheckMissingEnumTypesInDeclaration"}}, "SA9005": {{Dir: "CheckNoopMarshal"}}, + "SA9006": {{Dir: "CheckStaticBitShift"}}, } testutil.Run(t, Analyzers, checks) diff --git a/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go b/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go new file mode 100644 index 000000000..efa583308 --- /dev/null +++ b/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go @@ -0,0 +1,87 @@ +package pkg + +// Partially copied from go vet's test suite. + +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE-THIRD-PARTY file. + +func fn() { + var i8 int8 + _ = i8 << 7 + _ = (i8 + 1) << 8 // want `will always clear it` + _ = i8 << (7 + 1) // want `will always clear it` + _ = i8 >> 8 // want `will always clear it` + i8 <<= 8 // want `will always clear it` + i8 >>= 8 // want `will always clear it` + + var i16 int16 + _ = i16 << 15 + _ = i16 << 16 // want `will always clear it` + _ = i16 >> 16 // want `will always clear it` + i16 <<= 16 // want `will always clear it` + i16 >>= 16 // want `will always clear it` + + var i32 int32 + _ = i32 << 31 + _ = i32 << 32 // want `will always clear it` + _ = i32 >> 32 // want `will always clear it` + i32 <<= 32 // want `will always clear it` + i32 >>= 32 // want `will always clear it` + + var i64 int64 + _ = i64 << 63 + _ = i64 << 64 // want `will always clear it` + _ = i64 >> 64 // want `will always clear it` + i64 <<= 64 // want `will always clear it` + i64 >>= 64 // want `will always clear it` + + var u8 uint8 + _ = u8 << 7 + _ = u8 << 8 // want `will always clear it` + _ = u8 >> 8 // want `will always clear it` + u8 <<= 8 // want `will always clear it` + u8 >>= 8 // want `will always clear it` + + var u16 uint16 + _ = u16 << 15 + _ = u16 << 16 // want `will always clear it` + _ = u16 >> 16 // want `will always clear it` + u16 <<= 16 // want `will always clear it` + u16 >>= 16 // want `will always clear it` + + var u32 uint32 + _ = u32 << 31 + _ = u32 << 32 // want `will always clear it` + _ = u32 >> 32 // want `will always clear it` + u32 <<= 32 // want `will always clear it` + u32 >>= 32 // want `will always clear it` + + var u64 uint64 + _ = u64 << 63 + _ = u64 << 64 // want `will always clear it` + _ = u64 >> 64 // want `will always clear it` + u64 <<= 64 // want `will always clear it` + u64 >>= 64 // want `will always clear it` + _ = u64 << u64 +} + +func fn1() { + var ui uint + _ = ui << 64 + _ = ui >> 64 + ui <<= 64 + ui >>= 64 + + var uptr uintptr + _ = uptr << 64 + _ = uptr >> 64 + uptr <<= 64 + uptr >>= 64 + + var i int + _ = i << 64 + _ = i >> 64 + i <<= 64 + i >>= 64 +} From dc48e03c6a7c6073a75a10cf87d90b0eea73b24b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 16 May 2020 16:54:39 +0200 Subject: [PATCH 054/111] SA9006: also check custom types --- staticcheck/lint.go | 2 +- .../testdata/src/CheckStaticBitShift/CheckStaticBitShift.go | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 3603d3a72..72c2bb5bf 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -3869,7 +3869,7 @@ var ( func CheckStaticBitShift(pass *analysis.Pass) (interface{}, error) { isDubiousShift := func(x, y ast.Expr) (int64, int64, bool) { - typ, ok := pass.TypesInfo.TypeOf(x).(*types.Basic) + typ, ok := pass.TypesInfo.TypeOf(x).Underlying().(*types.Basic) if !ok { return 0, 0, false } diff --git a/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go b/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go index efa583308..c9db0c908 100644 --- a/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go +++ b/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go @@ -6,7 +6,12 @@ package pkg // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE-THIRD-PARTY file. +type Number int8 + func fn() { + var n8 Number + n8 <<= 8 // want `will always clear it` + var i8 int8 _ = i8 << 7 _ = (i8 + 1) << 8 // want `will always clear it` From 1dc5519e1dce5be065fb2ec52c5a87e40e11029e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 16 May 2020 16:58:34 +0200 Subject: [PATCH 055/111] SA9005: add more unit tests Check for values that are larger than the type. All existing tests only checked with values that matched the size. --- .../src/CheckStaticBitShift/CheckStaticBitShift.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go b/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go index c9db0c908..d69fc0991 100644 --- a/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go +++ b/staticcheck/testdata/src/CheckStaticBitShift/CheckStaticBitShift.go @@ -19,6 +19,7 @@ func fn() { _ = i8 >> 8 // want `will always clear it` i8 <<= 8 // want `will always clear it` i8 >>= 8 // want `will always clear it` + i8 <<= 12 // want `will always clear it` var i16 int16 _ = i16 << 15 @@ -26,6 +27,7 @@ func fn() { _ = i16 >> 16 // want `will always clear it` i16 <<= 16 // want `will always clear it` i16 >>= 16 // want `will always clear it` + i16 <<= 18 // want `will always clear it` var i32 int32 _ = i32 << 31 @@ -33,6 +35,7 @@ func fn() { _ = i32 >> 32 // want `will always clear it` i32 <<= 32 // want `will always clear it` i32 >>= 32 // want `will always clear it` + i32 <<= 40 // want `will always clear it` var i64 int64 _ = i64 << 63 @@ -40,6 +43,7 @@ func fn() { _ = i64 >> 64 // want `will always clear it` i64 <<= 64 // want `will always clear it` i64 >>= 64 // want `will always clear it` + i64 <<= 70 // want `will always clear it` var u8 uint8 _ = u8 << 7 @@ -47,6 +51,7 @@ func fn() { _ = u8 >> 8 // want `will always clear it` u8 <<= 8 // want `will always clear it` u8 >>= 8 // want `will always clear it` + u8 <<= 12 // want `will always clear it` var u16 uint16 _ = u16 << 15 @@ -54,6 +59,7 @@ func fn() { _ = u16 >> 16 // want `will always clear it` u16 <<= 16 // want `will always clear it` u16 >>= 16 // want `will always clear it` + u16 <<= 18 // want `will always clear it` var u32 uint32 _ = u32 << 31 @@ -61,6 +67,7 @@ func fn() { _ = u32 >> 32 // want `will always clear it` u32 <<= 32 // want `will always clear it` u32 >>= 32 // want `will always clear it` + u32 <<= 40 // want `will always clear it` var u64 uint64 _ = u64 << 63 @@ -68,6 +75,7 @@ func fn() { _ = u64 >> 64 // want `will always clear it` u64 <<= 64 // want `will always clear it` u64 >>= 64 // want `will always clear it` + u64 <<= 70 // want `will always clear it` _ = u64 << u64 } From 21d01cff3791d79442e81bc489b2f09082af2638 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 20 May 2020 12:08:21 +0200 Subject: [PATCH 056/111] Move most helpers out of analysis/code Most helpers that lived in analysis/code acted purely on either type information, the AST, or the IR. Such code should live in typeutil, astutil, and irutil respectively. This simplifies the import graph and avoids potential circular dependencies, where typeutil couldn't reuse helpers that existed in analysos/code, because analysis/code itself needed typeutil. At this point, analysis/code only contains helpers that act on both the AST and information derived from the analysis framework (most often a handle to TypeInfo). Most of these helpers could probably be turned into shims, to move the actual implementations into another package. We just use the *analysis.Pass as an easy way of getting to the type information of a package. --- analysis/code/code.go | 241 +----------------- analysis/code/stub.go | 10 - analysis/facts/purity.go | 30 +-- go/ast/astutil/util.go | 65 +++++ {analysis/code => go/ir/irutil}/loops.go | 2 +- go/ir/irutil/stub.go | 32 +++ {analysis/code => go/ir/irutil}/terminates.go | 2 +- go/ir/irutil/util.go | 54 ++++ go/types/typeutil/util.go | 131 ++++++++++ internal/sharedcheck/lint.go | 7 +- simple/lint.go | 21 +- staticcheck/buildtag.go | 4 +- staticcheck/lint.go | 106 ++++---- staticcheck/rules.go | 3 +- stylecheck/lint.go | 27 +- unused/unused.go | 15 +- 16 files changed, 386 insertions(+), 364 deletions(-) delete mode 100644 analysis/code/stub.go create mode 100644 go/ast/astutil/util.go rename {analysis/code => go/ir/irutil}/loops.go (98%) create mode 100644 go/ir/irutil/stub.go rename {analysis/code => go/ir/irutil}/terminates.go (99%) create mode 100644 go/types/typeutil/util.go diff --git a/analysis/code/code.go b/analysis/code/code.go index 515310b2a..cf7fd61e1 100644 --- a/analysis/code/code.go +++ b/analysis/code/code.go @@ -2,7 +2,6 @@ package code import ( - "bytes" "flag" "fmt" "go/ast" @@ -10,154 +9,20 @@ import ( "go/token" "go/types" "strings" - "sync" "honnef.co/go/tools/analysis/facts" - "honnef.co/go/tools/go/ir" "honnef.co/go/tools/go/types/typeutil" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" ) -var bufferPool = &sync.Pool{ - New: func() interface{} { - buf := bytes.NewBuffer(nil) - buf.Grow(64) - return buf - }, -} - -func FuncName(f *types.Func) string { - buf := bufferPool.Get().(*bytes.Buffer) - buf.Reset() - if f.Type() != nil { - sig := f.Type().(*types.Signature) - if recv := sig.Recv(); recv != nil { - buf.WriteByte('(') - if _, ok := recv.Type().(*types.Interface); ok { - // gcimporter creates abstract methods of - // named interfaces using the interface type - // (not the named type) as the receiver. - // Don't print it in full. - buf.WriteString("interface") - } else { - types.WriteType(buf, recv.Type(), nil) - } - buf.WriteByte(')') - buf.WriteByte('.') - } else if f.Pkg() != nil { - writePackage(buf, f.Pkg()) - } - } - buf.WriteString(f.Name()) - s := buf.String() - bufferPool.Put(buf) - return s -} - -func writePackage(buf *bytes.Buffer, pkg *types.Package) { - if pkg == nil { - return - } - s := pkg.Path() - if s != "" { - buf.WriteString(s) - buf.WriteByte('.') - } -} - type Positioner interface { Pos() token.Pos } -func CallName(call *ir.CallCommon) string { - if call.IsInvoke() { - return "" - } - switch v := call.Value.(type) { - case *ir.Function: - fn, ok := v.Object().(*types.Func) - if !ok { - return "" - } - return FuncName(fn) - case *ir.Builtin: - return v.Name() - } - return "" -} - -func IsCallTo(call *ir.CallCommon, name string) bool { return CallName(call) == name } - -func IsCallToAny(call *ir.CallCommon, names ...string) bool { - q := CallName(call) - for _, name := range names { - if q == name { - return true - } - } - return false -} - -// OPT(dh): IsType is kind of expensive; should we really use it? -func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name } - -func FilterDebug(instr []ir.Instruction) []ir.Instruction { - var out []ir.Instruction - for _, ins := range instr { - if _, ok := ins.(*ir.DebugRef); !ok { - out = append(out, ins) - } - } - return out -} - -func IsExample(fn *ir.Function) bool { - if !strings.HasPrefix(fn.Name(), "Example") { - return false - } - f := fn.Prog.Fset.File(fn.Pos()) - if f == nil { - return false - } - return strings.HasSuffix(f.Name(), "_test.go") -} - -func IsPointerLike(T types.Type) bool { - switch T := T.Underlying().(type) { - case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer: - return true - case *types.Basic: - return T.Kind() == types.UnsafePointer - } - return false -} - -func IsIdent(expr ast.Expr, ident string) bool { - id, ok := expr.(*ast.Ident) - return ok && id.Name == ident -} - -// isBlank returns whether id is the blank identifier "_". -// If id == nil, the answer is false. -func IsBlank(id ast.Expr) bool { - ident, _ := id.(*ast.Ident) - return ident != nil && ident.Name == "_" -} - -func IsIntLiteral(expr ast.Expr, literal string) bool { - lit, ok := expr.(*ast.BasicLit) - return ok && lit.Kind == token.INT && lit.Value == literal -} - -// Deprecated: use IsIntLiteral instead -func IsZero(expr ast.Expr) bool { - return IsIntLiteral(expr, "0") -} - func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool { - return IsType(pass.TypesInfo.TypeOf(expr), name) + return typeutil.IsType(pass.TypesInfo.TypeOf(expr), name) } func IsInTest(pass *analysis.Pass, node Positioner) bool { @@ -266,25 +131,6 @@ func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) { return constant.StringVal(val), true } -// Dereference returns a pointer's element type; otherwise it returns -// T. -func Dereference(T types.Type) types.Type { - if p, ok := T.Underlying().(*types.Pointer); ok { - return p.Elem() - } - return T -} - -// DereferenceR returns a pointer's element type; otherwise it returns -// T. If the element type is itself a pointer, DereferenceR will be -// applied recursively. -func DereferenceR(T types.Type) types.Type { - if p, ok := T.Underlying().(*types.Pointer); ok { - return DereferenceR(p.Elem()) - } - return T -} - func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string { switch fun := astutil.Unparen(call.Fun).(type) { case *ast.SelectorExpr: @@ -292,12 +138,12 @@ func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string { if !ok { return "" } - return FuncName(fn) + return typeutil.FuncName(fn) case *ast.Ident: obj := pass.TypesInfo.ObjectOf(fun) switch obj := obj.(type) { case *types.Func: - return FuncName(obj) + return typeutil.FuncName(obj) case *types.Builtin: return obj.Name() default: @@ -330,87 +176,6 @@ func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool { return false } -func Preamble(f *ast.File) string { - cutoff := f.Package - if f.Doc != nil { - cutoff = f.Doc.Pos() - } - var out []string - for _, cmt := range f.Comments { - if cmt.Pos() >= cutoff { - break - } - out = append(out, cmt.Text()) - } - return strings.Join(out, "\n") -} - -func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec { - if len(specs) == 0 { - return nil - } - groups := make([][]ast.Spec, 1) - groups[0] = append(groups[0], specs[0]) - - for _, spec := range specs[1:] { - g := groups[len(groups)-1] - if fset.PositionFor(spec.Pos(), false).Line-1 != - fset.PositionFor(g[len(g)-1].End(), false).Line { - - groups = append(groups, nil) - } - - groups[len(groups)-1] = append(groups[len(groups)-1], spec) - } - - return groups -} - -func IsObject(obj types.Object, name string) bool { - var path string - if pkg := obj.Pkg(); pkg != nil { - path = pkg.Path() + "." - } - return path+obj.Name() == name -} - -type Field struct { - Var *types.Var - Tag string - Path []int -} - -// FlattenFields recursively flattens T and embedded structs, -// returning a list of fields. If multiple fields with the same name -// exist, all will be returned. -func FlattenFields(T *types.Struct) []Field { - return flattenFields(T, nil, nil) -} - -func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field { - if seen == nil { - seen = map[types.Type]bool{} - } - if seen[T] { - return nil - } - seen[T] = true - var out []Field - for i := 0; i < T.NumFields(); i++ { - field := T.Field(i) - tag := T.Tag(i) - np := append(path[:len(path):len(path)], i) - if field.Anonymous() { - if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok { - out = append(out, flattenFields(s, np, seen)...) - } - } else { - out = append(out, Field{field, tag, np}) - } - } - return out -} - func File(pass *analysis.Pass, node Positioner) *ast.File { m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File) return m[pass.Fset.File(node.Pos())] diff --git a/analysis/code/stub.go b/analysis/code/stub.go deleted file mode 100644 index 284827409..000000000 --- a/analysis/code/stub.go +++ /dev/null @@ -1,10 +0,0 @@ -package code - -import ( - "honnef.co/go/tools/analysis/facts" - "honnef.co/go/tools/go/ir" -) - -func IsStub(fn *ir.Function) bool { - return facts.IsStub(fn) -} diff --git a/analysis/facts/purity.go b/analysis/facts/purity.go index d708c841c..582b6209e 100644 --- a/analysis/facts/purity.go +++ b/analysis/facts/purity.go @@ -5,6 +5,7 @@ import ( "reflect" "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" "honnef.co/go/tools/internal/passes/buildir" "golang.org/x/tools/go/analysis" @@ -54,33 +55,6 @@ var pureStdlib = map[string]struct{}{ "(*net/http.Request).WithContext": {}, } -// IsStub reports whether a function is a stub. A function is -// considered a stub if it has no instructions or if all it does is -// return a constant value. -func IsStub(fn *ir.Function) bool { - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - switch instr.(type) { - case *ir.Const: - // const naturally has no side-effects - case *ir.Panic: - // panic is a stub if it only uses constants - case *ir.Return: - // return is a stub if it only uses constants - case *ir.DebugRef: - case *ir.Jump: - // if there are no disallowed instructions, then we're - // only jumping to the exit block (or possibly - // somewhere else that's stubby?) - default: - // all other instructions are assumed to do actual work - return false - } - } - } - return true -} - func purity(pass *analysis.Pass) (interface{}, error) { seen := map[*ir.Function]struct{}{} irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg @@ -110,7 +84,7 @@ func purity(pass *analysis.Pass) (interface{}, error) { } }() - if IsStub(fn) { + if irutil.IsStub(fn) { return false } diff --git a/go/ast/astutil/util.go b/go/ast/astutil/util.go new file mode 100644 index 000000000..6edd5df6f --- /dev/null +++ b/go/ast/astutil/util.go @@ -0,0 +1,65 @@ +package astutil + +import ( + "go/ast" + "go/token" + "strings" +) + +func IsIdent(expr ast.Expr, ident string) bool { + id, ok := expr.(*ast.Ident) + return ok && id.Name == ident +} + +// isBlank returns whether id is the blank identifier "_". +// If id == nil, the answer is false. +func IsBlank(id ast.Expr) bool { + ident, _ := id.(*ast.Ident) + return ident != nil && ident.Name == "_" +} + +func IsIntLiteral(expr ast.Expr, literal string) bool { + lit, ok := expr.(*ast.BasicLit) + return ok && lit.Kind == token.INT && lit.Value == literal +} + +// Deprecated: use IsIntLiteral instead +func IsZero(expr ast.Expr) bool { + return IsIntLiteral(expr, "0") +} + +func Preamble(f *ast.File) string { + cutoff := f.Package + if f.Doc != nil { + cutoff = f.Doc.Pos() + } + var out []string + for _, cmt := range f.Comments { + if cmt.Pos() >= cutoff { + break + } + out = append(out, cmt.Text()) + } + return strings.Join(out, "\n") +} + +func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec { + if len(specs) == 0 { + return nil + } + groups := make([][]ast.Spec, 1) + groups[0] = append(groups[0], specs[0]) + + for _, spec := range specs[1:] { + g := groups[len(groups)-1] + if fset.PositionFor(spec.Pos(), false).Line-1 != + fset.PositionFor(g[len(g)-1].End(), false).Line { + + groups = append(groups, nil) + } + + groups[len(groups)-1] = append(groups[len(groups)-1], spec) + } + + return groups +} diff --git a/analysis/code/loops.go b/go/ir/irutil/loops.go similarity index 98% rename from analysis/code/loops.go rename to go/ir/irutil/loops.go index e2f263a84..751cc680b 100644 --- a/analysis/code/loops.go +++ b/go/ir/irutil/loops.go @@ -1,4 +1,4 @@ -package code +package irutil import "honnef.co/go/tools/go/ir" diff --git a/go/ir/irutil/stub.go b/go/ir/irutil/stub.go new file mode 100644 index 000000000..4311c7dbe --- /dev/null +++ b/go/ir/irutil/stub.go @@ -0,0 +1,32 @@ +package irutil + +import ( + "honnef.co/go/tools/go/ir" +) + +// IsStub reports whether a function is a stub. A function is +// considered a stub if it has no instructions or if all it does is +// return a constant value. +func IsStub(fn *ir.Function) bool { + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + switch instr.(type) { + case *ir.Const: + // const naturally has no side-effects + case *ir.Panic: + // panic is a stub if it only uses constants + case *ir.Return: + // return is a stub if it only uses constants + case *ir.DebugRef: + case *ir.Jump: + // if there are no disallowed instructions, then we're + // only jumping to the exit block (or possibly + // somewhere else that's stubby?) + default: + // all other instructions are assumed to do actual work + return false + } + } + } + return true +} diff --git a/analysis/code/terminates.go b/go/ir/irutil/terminates.go similarity index 99% rename from analysis/code/terminates.go rename to go/ir/irutil/terminates.go index 39d93129e..84e7503bb 100644 --- a/analysis/code/terminates.go +++ b/go/ir/irutil/terminates.go @@ -1,4 +1,4 @@ -package code +package irutil import ( "go/types" diff --git a/go/ir/irutil/util.go b/go/ir/irutil/util.go index badff17f2..dace40be0 100644 --- a/go/ir/irutil/util.go +++ b/go/ir/irutil/util.go @@ -1,7 +1,11 @@ package irutil import ( + "go/types" + "strings" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/types/typeutil" ) func Reachable(from, to *ir.BasicBlock) bool { @@ -68,3 +72,53 @@ func Vararg(x *ir.Slice) ([]ir.Value, bool) { } return out, true } + +func CallName(call *ir.CallCommon) string { + if call.IsInvoke() { + return "" + } + switch v := call.Value.(type) { + case *ir.Function: + fn, ok := v.Object().(*types.Func) + if !ok { + return "" + } + return typeutil.FuncName(fn) + case *ir.Builtin: + return v.Name() + } + return "" +} + +func IsCallTo(call *ir.CallCommon, name string) bool { return CallName(call) == name } + +func IsCallToAny(call *ir.CallCommon, names ...string) bool { + q := CallName(call) + for _, name := range names { + if q == name { + return true + } + } + return false +} + +func FilterDebug(instr []ir.Instruction) []ir.Instruction { + var out []ir.Instruction + for _, ins := range instr { + if _, ok := ins.(*ir.DebugRef); !ok { + out = append(out, ins) + } + } + return out +} + +func IsExample(fn *ir.Function) bool { + if !strings.HasPrefix(fn.Name(), "Example") { + return false + } + f := fn.Prog.Fset.File(fn.Pos()) + if f == nil { + return false + } + return strings.HasSuffix(f.Name(), "_test.go") +} diff --git a/go/types/typeutil/util.go b/go/types/typeutil/util.go new file mode 100644 index 000000000..c96c1a7d3 --- /dev/null +++ b/go/types/typeutil/util.go @@ -0,0 +1,131 @@ +package typeutil + +import ( + "bytes" + "go/types" + "sync" +) + +var bufferPool = &sync.Pool{ + New: func() interface{} { + buf := bytes.NewBuffer(nil) + buf.Grow(64) + return buf + }, +} + +func FuncName(f *types.Func) string { + buf := bufferPool.Get().(*bytes.Buffer) + buf.Reset() + if f.Type() != nil { + sig := f.Type().(*types.Signature) + if recv := sig.Recv(); recv != nil { + buf.WriteByte('(') + if _, ok := recv.Type().(*types.Interface); ok { + // gcimporter creates abstract methods of + // named interfaces using the interface type + // (not the named type) as the receiver. + // Don't print it in full. + buf.WriteString("interface") + } else { + types.WriteType(buf, recv.Type(), nil) + } + buf.WriteByte(')') + buf.WriteByte('.') + } else if f.Pkg() != nil { + writePackage(buf, f.Pkg()) + } + } + buf.WriteString(f.Name()) + s := buf.String() + bufferPool.Put(buf) + return s +} + +func writePackage(buf *bytes.Buffer, pkg *types.Package) { + if pkg == nil { + return + } + s := pkg.Path() + if s != "" { + buf.WriteString(s) + buf.WriteByte('.') + } +} + +// Dereference returns a pointer's element type; otherwise it returns +// T. +func Dereference(T types.Type) types.Type { + if p, ok := T.Underlying().(*types.Pointer); ok { + return p.Elem() + } + return T +} + +// DereferenceR returns a pointer's element type; otherwise it returns +// T. If the element type is itself a pointer, DereferenceR will be +// applied recursively. +func DereferenceR(T types.Type) types.Type { + if p, ok := T.Underlying().(*types.Pointer); ok { + return DereferenceR(p.Elem()) + } + return T +} + +func IsObject(obj types.Object, name string) bool { + var path string + if pkg := obj.Pkg(); pkg != nil { + path = pkg.Path() + "." + } + return path+obj.Name() == name +} + +// OPT(dh): IsType is kind of expensive; should we really use it? +func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name } + +func IsPointerLike(T types.Type) bool { + switch T := T.Underlying().(type) { + case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer: + return true + case *types.Basic: + return T.Kind() == types.UnsafePointer + } + return false +} + +type Field struct { + Var *types.Var + Tag string + Path []int +} + +// FlattenFields recursively flattens T and embedded structs, +// returning a list of fields. If multiple fields with the same name +// exist, all will be returned. +func FlattenFields(T *types.Struct) []Field { + return flattenFields(T, nil, nil) +} + +func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field { + if seen == nil { + seen = map[types.Type]bool{} + } + if seen[T] { + return nil + } + seen[T] = true + var out []Field + for i := 0; i < T.NumFields(); i++ { + field := T.Field(i) + tag := T.Tag(i) + np := append(path[:len(path):len(path)], i) + if field.Anonymous() { + if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok { + out = append(out, flattenFields(s, np, seen)...) + } + } else { + out = append(out, Field{field, tag, np}) + } + } + return out +} diff --git a/internal/sharedcheck/lint.go b/internal/sharedcheck/lint.go index 6b0d31ba8..df6c82fb9 100644 --- a/internal/sharedcheck/lint.go +++ b/internal/sharedcheck/lint.go @@ -4,8 +4,9 @@ import ( "go/ast" "go/types" - "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/go/ast/astutil" "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" "honnef.co/go/tools/internal/passes/buildir" "golang.org/x/tools/go/analysis" @@ -15,7 +16,7 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { cb := func(node ast.Node) bool { rng, ok := node.(*ast.RangeStmt) - if !ok || !code.IsBlank(rng.Key) { + if !ok || !astutil.IsBlank(rng.Key) { return true } @@ -48,7 +49,7 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { // Expect two refs: one for obtaining the length of the slice, // one for accessing the elements - if len(code.FilterDebug(*refs)) != 2 { + if len(irutil.FilterDebug(*refs)) != 2 { // TODO(dh): right now, we check that only one place // refers to our slice. This will miss cases such as // ranging over the slice twice. Ideally, we'd ensure that diff --git a/simple/lint.go b/simple/lint.go index 0ade9bff8..9aca1b369 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -12,16 +12,19 @@ import ( "sort" "strings" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/types/typeutil" "honnef.co/go/tools/analysis/code" "honnef.co/go/tools/analysis/edit" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/analysis/report" + "honnef.co/go/tools/go/ast/astutil" + "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/knowledge" "honnef.co/go/tools/pattern" + + "golang.org/x/tools/go/analysis" + gotypeutil "golang.org/x/tools/go/types/typeutil" ) var ( @@ -497,7 +500,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) { _, ok := expr.(*ast.BasicLit) if ok { - return true, code.IsIntLiteral(expr, "0") + return true, astutil.IsIntLiteral(expr, "0") } id, ok := expr.(*ast.Ident) if !ok { @@ -561,7 +564,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { return } - if eqNil && !code.IsIntLiteral(y.Y, "0") { // must be len(x) == *0* + if eqNil && !astutil.IsIntLiteral(y.Y, "0") { // must be len(x) == *0* return } @@ -769,14 +772,14 @@ func CheckUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { rs := node.(*ast.RangeStmt) // for _ - if rs.Value == nil && code.IsBlank(rs.Key) { + if rs.Value == nil && astutil.IsBlank(rs.Key) { report.Report(pass, rs.Key, "unnecessary assignment to the blank identifier", report.FilterGenerated(), report.Fixes(edit.Fix("remove assignment to blank identifier", edit.Delete(edit.Range{rs.Key.Pos(), rs.TokPos + 1})))) } // for _, _ - if code.IsBlank(rs.Key) && code.IsBlank(rs.Value) { + if astutil.IsBlank(rs.Key) && astutil.IsBlank(rs.Value) { // FIXME we should mark both key and value report.Report(pass, rs.Key, "unnecessary assignment to the blank identifier", report.FilterGenerated(), @@ -784,7 +787,7 @@ func CheckUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { } // for x, _ - if !code.IsBlank(rs.Key) && code.IsBlank(rs.Value) { + if !astutil.IsBlank(rs.Key) && astutil.IsBlank(rs.Value) { report.Report(pass, rs.Value, "unnecessary assignment to the blank identifier", report.FilterGenerated(), report.Fixes(edit.Fix("remove assignment to blank identifier", edit.Delete(edit.Range{rs.Key.End(), rs.Value.End()})))) @@ -1380,7 +1383,7 @@ func CheckRedundantBreak(pass *analysis.Pass) (interface{}, error) { return nil, nil } -func isStringer(T types.Type, msCache *typeutil.MethodSetCache) bool { +func isStringer(T types.Type, msCache *gotypeutil.MethodSetCache) bool { ms := msCache.MethodSet(T) sel := ms.Lookup(nil, "String") if sel == nil { @@ -1398,7 +1401,7 @@ func isStringer(T types.Type, msCache *typeutil.MethodSetCache) bool { if sig.Results().Len() != 1 { return false } - if !code.IsType(sig.Results().At(0).Type(), "string") { + if !typeutil.IsType(sig.Results().At(0).Type(), "string") { return false } return true diff --git a/staticcheck/buildtag.go b/staticcheck/buildtag.go index 0ed3a93a6..97ccf77b4 100644 --- a/staticcheck/buildtag.go +++ b/staticcheck/buildtag.go @@ -4,12 +4,12 @@ import ( "go/ast" "strings" - "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/go/ast/astutil" ) func buildTags(f *ast.File) [][]string { var out [][]string - for _, line := range strings.Split(code.Preamble(f), "\n") { + for _, line := range strings.Split(astutil.Preamble(f), "\n") { if !strings.HasPrefix(line, "+build ") { continue } diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 72c2bb5bf..2b3b5e867 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -23,8 +23,10 @@ import ( "honnef.co/go/tools/analysis/facts" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/analysis/report" + "honnef.co/go/tools/go/ast/astutil" "honnef.co/go/tools/go/ir" "honnef.co/go/tools/go/ir/irutil" + "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/knowledge" @@ -33,9 +35,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/astutil" + goastutil "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" + gotypeutil "golang.org/x/tools/go/types/typeutil" ) func checkSortSlice(call *Call) { @@ -97,7 +99,7 @@ func unmarshalPointer(name string, arg int) CallCheck { func pointlessIntMath(call *Call) { if ConvertedFromInt(call.Args[0].Value) { - call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", code.CallName(call.Instr.Common()))) + call.Invalid(fmt.Sprintf("calling %s on a converted integer is pointless", irutil.CallName(call.Instr.Common()))) } } @@ -151,7 +153,7 @@ var ( "(*sync.Pool).Put": func(call *Call) { arg := call.Args[knowledge.Arg("(*sync.Pool).Put.x")] typ := arg.Value.Value.Type() - if !code.IsPointerLike(typ) { + if !typeutil.IsPointerLike(typ) { arg.Invalid("argument should be pointer-like to avoid allocations") } }, @@ -351,7 +353,7 @@ var verbs = [...]verbFlag{ } func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) { - var msCache *typeutil.MethodSetCache + var msCache *gotypeutil.MethodSetCache if f.Parent() != nil { msCache = &f.Parent().Prog.MethodSets } @@ -365,7 +367,7 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) { if verbs[verb]&isSlice != 0 { return []types.Type{T}, false } - if verbs[verb]&isString != 0 && code.IsType(T.Elem().Underlying(), "byte") { + if verbs[verb]&isString != 0 && typeutil.IsType(T.Elem().Underlying(), "byte") { return []types.Type{T}, false } return []types.Type{T.Elem()}, true @@ -407,7 +409,7 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) { if sig.Results().Len() != 1 { return false } - if !code.IsType(sig.Results().At(0).Type(), "string") { + if !typeutil.IsType(sig.Results().At(0).Type(), "string") { return false } return true @@ -429,7 +431,7 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) { if sig.Results().Len() != 1 { return false } - if !code.IsType(sig.Results().At(0).Type(), "string") { + if !typeutil.IsType(sig.Results().At(0).Type(), "string") { return false } return true @@ -493,10 +495,10 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) { T = T.Underlying() if flags&(isPointer|isPseudoPointer) == 0 && top { - T = code.Dereference(T) + T = typeutil.Dereference(T) } if flags&isPseudoPointer != 0 && top { - t := code.Dereference(T) + t := typeutil.Dereference(T) if _, ok := t.Underlying().(*types.Struct); ok { T = t } @@ -547,7 +549,7 @@ func checkPrintfCallImpl(carg *Argument, f ir.Value, args []ir.Value) { } } - if flags&isPointer != 0 && code.IsPointerLike(T) { + if flags&isPointer != 0 && typeutil.IsPointerLike(T) { return true } if flags&isPseudoPointer != 0 { @@ -724,7 +726,7 @@ func checkAtomicAlignmentImpl(call *Call) { if off%8 != 0 { msg := fmt.Sprintf("address of non 64-bit aligned field %s passed to %s", T.Field(v.Field).Name(), - code.CallName(call.Instr.Common())) + irutil.CallName(call.Instr.Common())) call.Invalid(msg) } } @@ -736,14 +738,14 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { } arg := call.Args[argN] T := arg.Value.Value.Type() - Ts, ok := code.Dereference(T).Underlying().(*types.Struct) + Ts, ok := typeutil.Dereference(T).Underlying().(*types.Struct) if !ok { return } if Ts.NumFields() == 0 { return } - fields := code.FlattenFields(Ts) + fields := typeutil.FlattenFields(Ts) for _, field := range fields { if field.Var.Exported() { return @@ -769,7 +771,7 @@ func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallChec arg := call.Args[argN] T := arg.Value.Value.Type() - Ts, ok := code.Dereference(T).Underlying().(*types.Struct) + Ts, ok := typeutil.Dereference(T).Underlying().(*types.Struct) if !ok { return } @@ -781,7 +783,7 @@ func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallChec return } } - fields := code.FlattenFields(Ts) + fields := typeutil.FlattenFields(Ts) for _, field := range fields { if !(field.Var.Exported()) { continue @@ -808,7 +810,7 @@ func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallChec func fieldPath(start types.Type, indices []int) string { p := start.String() for _, idx := range indices { - field := code.Dereference(start).Underlying().(*types.Struct).Field(idx) + field := typeutil.Dereference(start).Underlying().(*types.Struct).Field(idx) start = field.Type() p += "." + field.Name() } @@ -816,7 +818,7 @@ func fieldPath(start types.Type, indices []int) string { } func isInLoop(b *ir.BasicBlock) bool { - sets := code.FindLoops(b.Parent()) + sets := irutil.FindLoops(b.Parent()) for _, set := range sets { if set.Has(b) { return true @@ -1635,7 +1637,7 @@ func CheckBenchmarkN(pass *analysis.Pass) (interface{}, error) { func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) { for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { - if code.IsExample(fn) { + if irutil.IsExample(fn) { continue } node := fn.Source() @@ -1828,7 +1830,7 @@ func CheckExtremeComparison(pass *analysis.Pass) (interface{}, error) { if !ok { return false } - return code.IsObject(pass.TypesInfo.ObjectOf(sel.Sel), name) + return typeutil.IsObject(pass.TypesInfo.ObjectOf(sel.Sel), name) } fn := func(node ast.Node) { @@ -1881,12 +1883,12 @@ func CheckExtremeComparison(pass *analysis.Pass) (interface{}, error) { } if (basic.Info() & types.IsUnsigned) != 0 { - if (expr.Op == token.LSS && code.IsIntLiteral(expr.Y, "0")) || - (expr.Op == token.GTR && code.IsIntLiteral(expr.X, "0")) { + if (expr.Op == token.LSS && astutil.IsIntLiteral(expr.Y, "0")) || + (expr.Op == token.GTR && astutil.IsIntLiteral(expr.X, "0")) { report.Report(pass, expr, fmt.Sprintf("no value of type %s is less than 0", basic)) } - if expr.Op == token.GEQ && code.IsIntLiteral(expr.Y, "0") || - expr.Op == token.LEQ && code.IsIntLiteral(expr.X, "0") { + if expr.Op == token.GEQ && astutil.IsIntLiteral(expr.Y, "0") || + expr.Op == token.LEQ && astutil.IsIntLiteral(expr.X, "0") { report.Report(pass, expr, fmt.Sprintf("every value of type %s is >= 0", basic)) } } else { @@ -2050,7 +2052,7 @@ func CheckArgOverwritten(pass *analysis.Pass) (interface{}, error) { if refs == nil { continue } - if len(code.FilterDebug(*refs)) != 0 { + if len(irutil.FilterDebug(*refs)) != 0 { continue } @@ -2227,7 +2229,7 @@ func CheckNilContext(pass *analysis.Pass) (interface{}, error) { // the Foo method, but the method receiver. return } - if !code.IsType(sig.Params().At(0).Type(), "context.Context") { + if !typeutil.IsType(sig.Params().At(0).Type(), "context.Context") { return } report.Report(pass, call.Args[0], @@ -2361,7 +2363,7 @@ func CheckConcurrentTesting(pass *analysis.Pass) (interface{}, error) { if recv == nil { continue } - if !code.IsType(recv.Type(), "*testing.common") { + if !typeutil.IsType(recv.Type(), "*testing.common") { continue } fn, ok := call.Call.StaticCallee().Object().(*types.Func) @@ -2467,7 +2469,7 @@ func CheckSliceOutOfBounds(pass *analysis.Pass) (interface{}, error) { func CheckDeferLock(pass *analysis.Pass) (interface{}, error) { for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { for _, block := range fn.Blocks { - instrs := code.FilterDebug(block.Instrs) + instrs := irutil.FilterDebug(block.Instrs) if len(instrs) < 2 { continue } @@ -2476,14 +2478,14 @@ func CheckDeferLock(pass *analysis.Pass) (interface{}, error) { if !ok { continue } - if !code.IsCallToAny(call.Common(), "(*sync.Mutex).Lock", "(*sync.RWMutex).RLock") { + if !irutil.IsCallToAny(call.Common(), "(*sync.Mutex).Lock", "(*sync.RWMutex).RLock") { continue } nins, ok := instrs[i+1].(*ir.Defer) if !ok { continue } - if !code.IsCallToAny(&nins.Call, "(*sync.Mutex).Lock", "(*sync.RWMutex).RLock") { + if !irutil.IsCallToAny(&nins.Call, "(*sync.Mutex).Lock", "(*sync.RWMutex).RLock") { continue } if call.Common().Args[0] != nins.Call.Args[0] { @@ -2510,7 +2512,7 @@ func CheckNaNComparison(pass *analysis.Pass) (interface{}, error) { if !ok { return false } - return code.IsCallTo(call.Common(), "math.NaN") + return irutil.IsCallTo(call.Common(), "math.NaN") } for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { for _, block := range fn.Blocks { @@ -2597,10 +2599,10 @@ func CheckLeakyTimeTick(pass *analysis.Pass) (interface{}, error) { for _, block := range fn.Blocks { for _, ins := range block.Instrs { call, ok := ins.(*ir.Call) - if !ok || !code.IsCallTo(call.Common(), "time.Tick") { + if !ok || !irutil.IsCallTo(call.Common(), "time.Tick") { continue } - if !code.Terminates(call.Parent()) { + if !irutil.Terminates(call.Parent()) { continue } report.Report(pass, call, "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here") @@ -2695,12 +2697,12 @@ func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { // of a pattern, x<<0, x<<8, x<<16, ... continue } - path, _ := astutil.PathEnclosingInterval(code.File(pass, ins), ins.Pos(), ins.Pos()) + path, _ := goastutil.PathEnclosingInterval(code.File(pass, ins), ins.Pos(), ins.Pos()) if len(path) == 0 { continue } - if node, ok := path[0].(*ast.BinaryExpr); !ok || !code.IsIntLiteral(node.Y, "0") { + if node, ok := path[0].(*ast.BinaryExpr); !ok || !astutil.IsIntLiteral(node.Y, "0") { continue } @@ -2743,7 +2745,7 @@ func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { if v, _ := constant.Int64Val(obj.Val()); v != 0 { return } - path, _ := astutil.PathEnclosingInterval(code.File(pass, obj), obj.Pos(), obj.Pos()) + path, _ := goastutil.PathEnclosingInterval(code.File(pass, obj), obj.Pos(), obj.Pos()) if len(path) < 2 { return } @@ -2771,7 +2773,7 @@ func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { fmt.Sprintf("%s always equals %s; %s is defined as iota and has value 0, maybe %s is meant to be 1 << iota?", report.Render(pass, binop), report.Render(pass, binop.X), report.Render(pass, binop.Y), report.Render(pass, binop.Y))) } case *ast.BasicLit: - if !code.IsIntLiteral(binop.Y, "0") { + if !astutil.IsIntLiteral(binop.Y, "0") { return } switch binop.Op { @@ -2809,7 +2811,7 @@ func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) { n := sig.Params().Len() for i := 0; i < n; i++ { typ := sig.Params().At(i).Type() - if !code.IsType(typ, "os.FileMode") { + if !typeutil.IsType(typ, "os.FileMode") { continue } @@ -2845,7 +2847,7 @@ fnLoop: params := fn.Signature.Params() for i := 0; i < params.Len(); i++ { param := params.At(i) - if code.IsType(param.Type(), "*testing.B") { + if typeutil.IsType(param.Type(), "*testing.B") { // Ignore discarded pure functions in code related // to benchmarks. Instead of matching BenchmarkFoo // functions, we match any function accepting a @@ -2865,7 +2867,7 @@ fnLoop: continue } refs := ins.Referrers() - if refs == nil || len(code.FilterDebug(*refs)) > 0 { + if refs == nil || len(irutil.FilterDebug(*refs)) > 0 { continue } @@ -2987,7 +2989,7 @@ func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, e return } - r, ok := rules[code.FuncName(obj)] + r, ok := rules[typeutil.FuncName(obj)] if !ok { return } @@ -3009,7 +3011,7 @@ func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, e Parent: site.Parent(), } r(call) - path, _ := astutil.PathEnclosingInterval(code.File(pass, site), site.Pos(), site.Pos()) + path, _ := goastutil.PathEnclosingInterval(code.File(pass, site), site.Pos(), site.Pos()) var astcall *ast.CallExpr for _, el := range path { if expr, ok := el.(*ast.CallExpr); ok { @@ -3073,7 +3075,7 @@ func CheckWriterBufferModified(pass *analysis.Pass) (interface{}, error) { if basic, ok := sig.Results().At(0).Type().(*types.Basic); !ok || basic.Kind() != types.Int { continue } - if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || !code.IsType(named, "error") { + if named, ok := sig.Results().At(1).Type().(*types.Named); !ok || !typeutil.IsType(named, "error") { continue } @@ -3090,7 +3092,7 @@ func CheckWriterBufferModified(pass *analysis.Pass) (interface{}, error) { } report.Report(pass, ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") case *ir.Call: - if !code.IsCallTo(ins.Common(), "append") { + if !irutil.IsCallTo(ins.Common(), "append") { continue } if ins.Common().Args[0] != fn.Params[1] { @@ -3121,7 +3123,7 @@ func CheckEmptyBranch(pass *analysis.Pass) (interface{}, error) { if fn.Source() == nil { continue } - if code.IsExample(fn) { + if irutil.IsExample(fn) { continue } cb := func(node ast.Node) bool { @@ -3281,7 +3283,7 @@ func CheckSillyRegexp(pass *analysis.Pass) (interface{}, error) { if !ok { continue } - if !code.IsCallToAny(call.Common(), "regexp.MustCompile", "regexp.Compile", "regexp.Match", "regexp.MatchReader", "regexp.MatchString") { + if !irutil.IsCallToAny(call.Common(), "regexp.MustCompile", "regexp.Compile", "regexp.Match", "regexp.MatchReader", "regexp.MatchString") { continue } c, ok := call.Common().Args[0].(*ir.Const) @@ -3313,7 +3315,7 @@ func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error return } - groups := code.GroupSpecs(pass.Fset, decl.Specs) + groups := astutil.GroupSpecs(pass.Fset, decl.Specs) groupLoop: for _, group := range groups { if len(group) < 2 { @@ -3368,14 +3370,14 @@ func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) { if !ok { continue } - if !code.IsCallTo(call.Common(), "(*time.Timer).Reset") { + if !irutil.IsCallTo(call.Common(), "(*time.Timer).Reset") { continue } refs := call.Referrers() if refs == nil { continue } - for _, ref := range code.FilterDebug(*refs) { + for _, ref := range irutil.FilterDebug(*refs) { ifstmt, ok := ref.(*ir.If) if !ok { continue @@ -3404,7 +3406,7 @@ func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) { // priority, considering the rarity of // Reset and the tiny likeliness of a // false positive - if ins, ok := ins.(*ir.Recv); ok && code.IsType(ins.Chan.Type(), "<-chan time.Time") { + if ins, ok := ins.(*ir.Recv); ok && typeutil.IsType(ins.Chan.Type(), "<-chan time.Time") { found = true return false } @@ -3610,7 +3612,7 @@ func checkJSONTag(pass *analysis.Pass, field *ast.Field, tag string) { case "string": cs++ // only for string, floating point, integer and bool - T := code.Dereference(pass.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() + T := typeutil.Dereference(pass.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() basic, ok := T.(*types.Basic) if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { report.Report(pass, field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") @@ -3786,7 +3788,7 @@ func CheckMaybeNil(pass *analysis.Pass) (interface{}, error) { // We choose to err on the side of false negatives. isNilConst := func(v ir.Value) bool { - if code.IsPointerLike(v.Type()) { + if typeutil.IsPointerLike(v.Type()) { if k, ok := v.(*ir.Const); ok { return k.IsNil() } diff --git a/staticcheck/rules.go b/staticcheck/rules.go index b3200362f..bcadabfaf 100644 --- a/staticcheck/rules.go +++ b/staticcheck/rules.go @@ -15,6 +15,7 @@ import ( "honnef.co/go/tools/analysis/code" "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/types/typeutil" "golang.org/x/tools/go/analysis" ) @@ -289,7 +290,7 @@ func ValidHostPort(v Value) bool { // ConvertedFrom reports whether value v was converted from type typ. func ConvertedFrom(v Value, typ string) bool { change, ok := v.Value.(*ir.ChangeType) - return ok && code.IsType(change.X.Type(), typ) + return ok && typeutil.IsType(change.X.Type(), typ) } func UniqueStringCutset(v Value) bool { diff --git a/stylecheck/lint.go b/stylecheck/lint.go index d8edd720e..8e551fdf0 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -17,14 +17,17 @@ import ( "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/analysis/report" "honnef.co/go/tools/config" + "honnef.co/go/tools/go/ast/astutil" "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/pattern" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" + gotypeutil "golang.org/x/tools/go/types/typeutil" ) func CheckPackageComment(pass *analysis.Pass) (interface{}, error) { @@ -151,7 +154,7 @@ func CheckBlankImports(pass *analysis.Pass) (interface{}, error) { for i, imp := range f.Imports { pos := fset.Position(imp.Pos()) - if !code.IsBlank(imp.Name) { + if !astutil.IsBlank(imp.Name) { continue } // Only flag the first blank import in a group of imports, @@ -160,7 +163,7 @@ func CheckBlankImports(pass *analysis.Pass) (interface{}, error) { if i > 0 { prev := f.Imports[i-1] prevPos := fset.Position(prev.Pos()) - if pos.Line-1 == prevPos.Line && code.IsBlank(prev.Name) { + if pos.Line-1 == prevPos.Line && astutil.IsBlank(prev.Name) { continue } } @@ -186,7 +189,7 @@ func CheckIncDec(pass *analysis.Pass) (interface{}, error) { return } if (len(assign.Lhs) != 1 || len(assign.Rhs) != 1) || - !code.IsIntLiteral(assign.Rhs[0], "1") { + !astutil.IsIntLiteral(assign.Rhs[0], "1") { return } @@ -239,12 +242,12 @@ func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) { continue } sig := fn.Type().(*types.Signature) - if sig.Recv() != nil && !ast.IsExported(code.Dereference(sig.Recv().Type()).(*types.Named).Obj().Name()) { + if sig.Recv() != nil && !ast.IsExported(typeutil.Dereference(sig.Recv().Type()).(*types.Named).Obj().Name()) { continue } res := sig.Results() for i := 0; i < res.Len(); i++ { - if named, ok := code.DereferenceR(res.At(i).Type()).(*types.Named); ok && + if named, ok := typeutil.DereferenceR(res.At(i).Type()).(*types.Named); ok && !ast.IsExported(named.Obj().Name()) && named != types.Universe.Lookup("error").Type() { report.Report(pass, fn, "should not return unexported type") @@ -258,11 +261,11 @@ func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) { irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg for _, m := range irpkg.Members { if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { - ms := typeutil.IntuitiveMethodSet(T.Type(), nil) + ms := gotypeutil.IntuitiveMethodSet(T.Type(), nil) for _, sel := range ms { fn := sel.Obj().(*types.Func) recv := fn.Type().(*types.Signature).Recv() - if code.Dereference(recv.Type()) != T.Type() { + if typeutil.Dereference(recv.Type()) != T.Type() { // skip embedded methods continue } @@ -285,7 +288,7 @@ func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) { var firstFn *types.Func if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { - ms := typeutil.IntuitiveMethodSet(T.Type(), nil) + ms := gotypeutil.IntuitiveMethodSet(T.Type(), nil) for _, sel := range ms { fn := sel.Obj().(*types.Func) recv := fn.Type().(*types.Signature).Recv() @@ -293,7 +296,7 @@ func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) { // Don't concern ourselves with methods in generated code continue } - if code.Dereference(recv.Type()) != T.Type() { + if typeutil.Dereference(recv.Type()) != T.Type() { // skip embedded methods continue } @@ -372,7 +375,7 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) { if !ok { continue } - if !code.IsCallToAny(call.Common(), "errors.New", "fmt.Errorf") { + if !irutil.IsCallToAny(call.Common(), "errors.New", "fmt.Errorf") { continue } @@ -446,7 +449,7 @@ func CheckTimeNames(pass *analysis.Pass) (interface{}, error) { continue } T := pass.TypesInfo.TypeOf(name) - if !code.IsType(T, "time.Duration") && !code.IsType(T, "*time.Duration") { + if !typeutil.IsType(T, "time.Duration") && !typeutil.IsType(T, "*time.Duration") { continue } for _, suffix := range suffixes { diff --git a/unused/unused.go b/unused/unused.go index 17d840230..c0cb7fc98 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -13,6 +13,7 @@ import ( "honnef.co/go/tools/analysis/facts" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/analysis/report" + "honnef.co/go/tools/go/ast/astutil" "honnef.co/go/tools/go/ir" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" @@ -987,7 +988,7 @@ func (g *graph) entry(pkg *pkg) { case *ast.GenDecl: switch n.Tok { case token.CONST: - groups := code.GroupSpecs(pkg.Fset, n.Specs) + groups := astutil.GroupSpecs(pkg.Fset, n.Specs) for _, specs := range groups { if len(specs) > 1 { cg := &constGroup{} @@ -1230,13 +1231,13 @@ func (g *graph) useMethod(t types.Type, sel *types.Selection, by interface{}, ki path := sel.Index() assert(obj != nil) if len(path) > 1 { - base := code.Dereference(t).Underlying().(*types.Struct) + base := typeutil.Dereference(t).Underlying().(*types.Struct) for _, idx := range path[:len(path)-1] { next := base.Field(idx) // (6.3) structs use embedded fields that help implement interfaces g.see(base) g.seeAndUse(next, base, edgeProvidesMethod) - base, _ = code.Dereference(next.Type()).Underlying().(*types.Struct) + base, _ = typeutil.Dereference(next.Type()).Underlying().(*types.Struct) } } g.seeAndUse(obj, by, kind) @@ -1329,7 +1330,7 @@ func (g *graph) typ(t types.Type, parent types.Type) { seen := map[*types.Struct]struct{}{} var hasExportedField func(t types.Type) bool hasExportedField = func(T types.Type) bool { - t, ok := code.Dereference(T).Underlying().(*types.Struct) + t, ok := typeutil.Dereference(T).Underlying().(*types.Struct) if !ok { return false } @@ -1512,7 +1513,7 @@ func (g *graph) instructions(fn *ir.Function) { // (4.7) functions use fields they access g.seeAndUse(field, fnObj, edgeFieldAccess) case *ir.FieldAddr: - st := code.Dereference(instr.X.Type()).Underlying().(*types.Struct) + st := typeutil.Dereference(instr.X.Type()).Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access g.seeAndUse(field, fnObj, edgeFieldAccess) @@ -1531,8 +1532,8 @@ func (g *graph) instructions(fn *ir.Function) { case *ir.ChangeType: // conversion type handled generically - s1, ok1 := code.Dereference(instr.Type()).Underlying().(*types.Struct) - s2, ok2 := code.Dereference(instr.X.Type()).Underlying().(*types.Struct) + s1, ok1 := typeutil.Dereference(instr.Type()).Underlying().(*types.Struct) + s2, ok2 := typeutil.Dereference(instr.X.Type()).Underlying().(*types.Struct) if ok1 && ok2 { // Converting between two structs. The fields are // relevant for the conversion, but only if the From 18aa9f1a8c7467b591c8fc08abbae28b6637fe5f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 5 Jun 2020 13:17:06 +0200 Subject: [PATCH 057/111] analysis/code: remove AST suffices from function names Since we have moved most helpers into astutil and typeutil, we no longer need the AST suffix to differentiate helpers. --- analysis/code/code.go | 10 +++++----- simple/lint.go | 16 ++++++++-------- staticcheck/lint.go | 14 +++++++------- stylecheck/lint.go | 4 ++-- 4 files changed, 22 insertions(+), 22 deletions(-) diff --git a/analysis/code/code.go b/analysis/code/code.go index cf7fd61e1..74ea6962c 100644 --- a/analysis/code/code.go +++ b/analysis/code/code.go @@ -131,7 +131,7 @@ func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) { return constant.StringVal(val), true } -func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string { +func CallName(pass *analysis.Pass, call *ast.CallExpr) string { switch fun := astutil.Unparen(call.Fun).(type) { case *ast.SelectorExpr: fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func) @@ -154,20 +154,20 @@ func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string { } } -func IsCallToAST(pass *analysis.Pass, node ast.Node, name string) bool { +func IsCallTo(pass *analysis.Pass, node ast.Node, name string) bool { call, ok := node.(*ast.CallExpr) if !ok { return false } - return CallNameAST(pass, call) == name + return CallName(pass, call) == name } -func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool { +func IsCallToAny(pass *analysis.Pass, node ast.Node, names ...string) bool { call, ok := node.(*ast.CallExpr) if !ok { return false } - q := CallNameAST(pass, call) + q := CallName(pass, call) for _, name := range names { if q == name { return true diff --git a/simple/lint.go b/simple/lint.go index 9aca1b369..8bff7b08d 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -172,7 +172,7 @@ func CheckBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { sel := m.State["sel"].(*ast.SelectorExpr) typ := pass.TypesInfo.TypeOf(call.Fun) - if typ == types.Universe.Lookup("string").Type() && code.IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).Bytes") { + if typ == types.Universe.Lookup("string").Type() && code.IsCallTo(pass, call.Args[0], "(*bytes.Buffer).Bytes") { if _, ok := stack[len(stack)-2].(*ast.IndexExpr); ok { // Don't flag m[string(buf.Bytes())] – thanks to a // compiler optimization, this is actually faster than @@ -183,7 +183,7 @@ func CheckBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { report.Report(pass, call, fmt.Sprintf("should use %v.String() instead of %v", report.Render(pass, sel.X), report.Render(pass, call)), report.FilterGenerated(), report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass, checkBytesBufferConversionsRs, m.State, node)))) - } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && code.IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).String") { + } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && code.IsCallTo(pass, call.Args[0], "(*bytes.Buffer).String") { report.Report(pass, call, fmt.Sprintf("should use %v.Bytes() instead of %v", report.Render(pass, sel.X), report.Render(pass, call)), report.FilterGenerated(), report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass, checkBytesBufferConversionsRb, m.State, node)))) @@ -337,7 +337,7 @@ func CheckForTrue(pass *analysis.Pass) (interface{}, error) { func CheckRegexpRaw(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !code.IsCallToAnyAST(pass, call, "regexp.MustCompile", "regexp.Compile") { + if !code.IsCallToAny(pass, call, "regexp.MustCompile", "regexp.Compile") { return } sel, ok := call.Fun.(*ast.SelectorExpr) @@ -970,7 +970,7 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) { return } - condCallName := code.CallNameAST(pass, condCall) + condCallName := code.CallName(pass, condCall) switch condCallName { case "strings.HasPrefix": pkg = "strings" @@ -1014,7 +1014,7 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) { return } - rhsName := code.CallNameAST(pass, rhs) + rhsName := code.CallName(pass, rhs) if condCallName == "strings.HasPrefix" && rhsName == "strings.TrimPrefix" || condCallName == "strings.HasSuffix" && rhsName == "strings.TrimSuffix" || condCallName == "strings.Contains" && rhsName == "strings.Replace" || @@ -1550,7 +1550,7 @@ func CheckSortHelpers(pass *analysis.Pass) (interface{}, error) { if permissible { return false } - if !code.IsCallToAST(pass, node, "sort.Sort") { + if !code.IsCallTo(pass, node, "sort.Sort") { return true } if isPermissibleSort(pass, node) { @@ -1708,14 +1708,14 @@ func CheckSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) { func CheckRedundantCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - callName := code.CallNameAST(pass, call) + callName := code.CallName(pass, call) switch callName { case "(net/http.Header).Add", "(net/http.Header).Del", "(net/http.Header).Get", "(net/http.Header).Set": default: return } - if !code.IsCallToAST(pass, call.Args[0], "net/http.CanonicalHeaderKey") { + if !code.IsCallTo(pass, call.Args[0], "net/http.CanonicalHeaderKey") { return } diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 2b3b5e867..5430d9ea8 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -830,7 +830,7 @@ func isInLoop(b *ir.BasicBlock) bool { func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !code.IsCallToAnyAST(pass, call, + if !code.IsCallToAny(pass, call, "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") { return } @@ -902,7 +902,7 @@ func CheckTemplate(pass *analysis.Pass) (interface{}, error) { call := node.(*ast.CallExpr) // OPT(dh): use integer for kind var kind string - switch code.CallNameAST(pass, call) { + switch code.CallName(pass, call) { case "(*text/template.Template).Parse": kind = "text" case "(*html/template.Template).Parse": @@ -911,7 +911,7 @@ func CheckTemplate(pass *analysis.Pass) (interface{}, error) { return } sel := call.Fun.(*ast.SelectorExpr) - if !code.IsCallToAnyAST(pass, sel.X, "text/template.New", "html/template.New") { + if !code.IsCallToAny(pass, sel.X, "text/template.New", "html/template.New") { // TODO(dh): this is a cheap workaround for templates with // different delims. A better solution with less false // negatives would use data flow analysis to see where the @@ -948,7 +948,7 @@ var ( func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !code.IsCallToAST(pass, call, "time.Sleep") { + if !code.IsCallTo(pass, call, "time.Sleep") { return } lit, ok := call.Args[knowledge.Arg("time.Sleep.d")].(*ast.BasicLit) @@ -1142,7 +1142,7 @@ func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) { arg = pass.TypesInfo.ObjectOf(node.Type.Params.List[0].Names[0]) return true case *ast.CallExpr: - if code.IsCallToAST(pass, node, "os.Exit") { + if code.IsCallTo(pass, node, "os.Exit") { callsExit = true return false } @@ -1188,7 +1188,7 @@ func isTestMain(pass *analysis.Pass, decl *ast.FuncDecl) bool { func CheckExec(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !code.IsCallToAST(pass, call, "os/exec.Command") { + if !code.IsCallTo(pass, call, "os/exec.Command") { return } val, ok := code.ExprToString(pass, call.Args[knowledge.Arg("os/exec.Command.name")]) @@ -1361,7 +1361,7 @@ func CheckScopedBreak(pass *analysis.Pass) (interface{}, error) { func CheckUnsafePrintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - name := code.CallNameAST(pass, call) + name := code.CallName(pass, call) var arg int switch name { diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 8e551fdf0..8ed7ad5af 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -502,7 +502,7 @@ func CheckErrorVarNames(pass *analysis.Pass) (interface{}, error) { for i, name := range spec.Names { val := spec.Values[i] - if !code.IsCallToAnyAST(pass, val, "errors.New", "fmt.Errorf") { + if !code.IsCallToAny(pass, val, "errors.New", "fmt.Errorf") { continue } @@ -596,7 +596,7 @@ func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) { call := node.(*ast.CallExpr) var arg int - switch code.CallNameAST(pass, call) { + switch code.CallName(pass, call) { case "net/http.Error": arg = 2 case "net/http.Redirect": From 41bedbdbf9e6154d6edae49010d46bd375b076f2 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 6 Jun 2020 14:01:35 +0200 Subject: [PATCH 058/111] unused: don't use typeutil.Map Under the current architecture of 'unused', we no longer need typeutil.Map to deduplicate certain types. Named types are canonical by default, we already didn't deduplicate structs or interfaces in our fork of typeutil.Map, and deduplicating any other type is merely an optimization. This optimization, too, doesn't matter anymore, because we no longer keep a single graph around for all packages, and the size of individual graphs isn't as impacted by the optimization. --- go/types/typeutil/example_test.go | 67 ------- go/types/typeutil/identical.go | 149 -------------- go/types/typeutil/map.go | 319 ------------------------------ go/types/typeutil/map_test.go | 174 ---------------- unused/unused.go | 39 ++-- 5 files changed, 20 insertions(+), 728 deletions(-) delete mode 100644 go/types/typeutil/example_test.go delete mode 100644 go/types/typeutil/identical.go delete mode 100644 go/types/typeutil/map.go delete mode 100644 go/types/typeutil/map_test.go diff --git a/go/types/typeutil/example_test.go b/go/types/typeutil/example_test.go deleted file mode 100644 index 60c4cb5a0..000000000 --- a/go/types/typeutil/example_test.go +++ /dev/null @@ -1,67 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeutil_test - -import ( - "fmt" - "go/ast" - "go/parser" - "go/token" - "go/types" - "sort" - - "honnef.co/go/tools/go/types/typeutil" -) - -func ExampleMap() { - const source = `package P - -var X []string -var Y []string - -const p, q = 1.0, 2.0 - -func f(offset int32) (value byte, ok bool) -func g(rune) (uint8, bool) -` - - // Parse and type-check the package. - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, "P.go", source, 0) - if err != nil { - panic(err) - } - pkg, err := new(types.Config).Check("P", fset, []*ast.File{f}, nil) - if err != nil { - panic(err) - } - - scope := pkg.Scope() - - // Group names of package-level objects by their type. - var namesByType typeutil.Map // value is []string - for _, name := range scope.Names() { - T := scope.Lookup(name).Type() - - names, _ := namesByType.At(T).([]string) - names = append(names, name) - namesByType.Set(T, names) - } - - // Format, sort, and print the map entries. - var lines []string - namesByType.Iterate(func(T types.Type, names interface{}) { - lines = append(lines, fmt.Sprintf("%s %s", names, T)) - }) - sort.Strings(lines) - for _, line := range lines { - fmt.Println(line) - } - - // Output: - // [X Y] []string - // [f g] func(offset int32) (value byte, ok bool) - // [p q] untyped float -} diff --git a/go/types/typeutil/identical.go b/go/types/typeutil/identical.go deleted file mode 100644 index 0cd82e8c0..000000000 --- a/go/types/typeutil/identical.go +++ /dev/null @@ -1,149 +0,0 @@ -package typeutil - -import ( - "go/types" -) - -// Unlike types.Identical, receivers of Signature types are not ignored. -// Unlike types.Identical, interfaces are compared via pointer equality (except for the empty interface, which gets deduplicated). -// Unlike types.Identical, structs are compared via pointer equality. -func identical0(x, y types.Type) bool { - if x == y { - return true - } - - switch x := x.(type) { - case *types.Basic: - // Basic types are singletons except for the rune and byte - // aliases, thus we cannot solely rely on the x == y check - // above. See also comment in TypeName.IsAlias. - if y, ok := y.(*types.Basic); ok { - return x.Kind() == y.Kind() - } - - case *types.Array: - // Two array types are identical if they have identical element types - // and the same array length. - if y, ok := y.(*types.Array); ok { - // If one or both array lengths are unknown (< 0) due to some error, - // assume they are the same to avoid spurious follow-on errors. - return (x.Len() < 0 || y.Len() < 0 || x.Len() == y.Len()) && identical0(x.Elem(), y.Elem()) - } - - case *types.Slice: - // Two slice types are identical if they have identical element types. - if y, ok := y.(*types.Slice); ok { - return identical0(x.Elem(), y.Elem()) - } - - case *types.Struct: - if y, ok := y.(*types.Struct); ok { - return x == y - } - - case *types.Pointer: - // Two pointer types are identical if they have identical base types. - if y, ok := y.(*types.Pointer); ok { - return identical0(x.Elem(), y.Elem()) - } - - case *types.Tuple: - // Two tuples types are identical if they have the same number of elements - // and corresponding elements have identical types. - if y, ok := y.(*types.Tuple); ok { - if x.Len() == y.Len() { - if x != nil { - for i := 0; i < x.Len(); i++ { - v := x.At(i) - w := y.At(i) - if !identical0(v.Type(), w.Type()) { - return false - } - } - } - return true - } - } - - case *types.Signature: - // Two function types are identical if they have the same number of parameters - // and result values, corresponding parameter and result types are identical, - // and either both functions are variadic or neither is. Parameter and result - // names are not required to match. - if y, ok := y.(*types.Signature); ok { - - return x.Variadic() == y.Variadic() && - identical0(x.Params(), y.Params()) && - identical0(x.Results(), y.Results()) && - (x.Recv() != nil && y.Recv() != nil && identical0(x.Recv().Type(), y.Recv().Type()) || x.Recv() == nil && y.Recv() == nil) - } - - case *types.Interface: - // The issue with interfaces, typeutil.Map and types.Identical - // - // types.Identical, when comparing two interfaces, only looks at the set - // of all methods, not differentiating between implicit (embedded) and - // explicit methods. - // - // When we see the following two types, in source order - // - // type I1 interface { foo() } - // type I2 interface { I1 } - // - // then we will first correctly process I1 and its underlying type. When - // we get to I2, we will see that its underlying type is identical to - // that of I1 and not process it again. This, however, means that we will - // not record the fact that I2 embeds I1. If only I2 is reachable via the - // graph root, then I1 will not be considered used. - // - // We choose to be lazy and compare interfaces by their - // pointers. This will obviously miss identical interfaces, - // but this only has a runtime cost, it doesn't affect - // correctness. - if y, ok := y.(*types.Interface); ok { - if x.NumEmbeddeds() == 0 && - y.NumEmbeddeds() == 0 && - x.NumMethods() == 0 && - y.NumMethods() == 0 { - // all truly empty interfaces are the same - return true - } - return x == y - } - - case *types.Map: - // Two map types are identical if they have identical key and value types. - if y, ok := y.(*types.Map); ok { - return identical0(x.Key(), y.Key()) && identical0(x.Elem(), y.Elem()) - } - - case *types.Chan: - // Two channel types are identical if they have identical value types - // and the same direction. - if y, ok := y.(*types.Chan); ok { - return x.Dir() == y.Dir() && identical0(x.Elem(), y.Elem()) - } - - case *types.Named: - // Two named types are identical if their type names originate - // in the same type declaration. - if y, ok := y.(*types.Named); ok { - return x.Obj() == y.Obj() - } - - case nil: - - default: - panic("unreachable") - } - - return false -} - -// Identical reports whether x and y are identical types. -// Unlike types.Identical, receivers of Signature types are not ignored. -// Unlike types.Identical, interfaces are compared via pointer equality (except for the empty interface, which gets deduplicated). -// Unlike types.Identical, structs are compared via pointer equality. -func Identical(x, y types.Type) (ret bool) { - return identical0(x, y) -} diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go deleted file mode 100644 index f929353cc..000000000 --- a/go/types/typeutil/map.go +++ /dev/null @@ -1,319 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package typeutil defines various utilities for types, such as Map, -// a mapping from types.Type to interface{} values. -package typeutil - -import ( - "bytes" - "fmt" - "go/types" - "reflect" -) - -// Map is a hash-table-based mapping from types (types.Type) to -// arbitrary interface{} values. The concrete types that implement -// the Type interface are pointers. Since they are not canonicalized, -// == cannot be used to check for equivalence, and thus we cannot -// simply use a Go map. -// -// Just as with map[K]V, a nil *Map is a valid empty map. -// -// Not thread-safe. -// -// This fork handles Signatures correctly, respecting method -// receivers. Furthermore, it doesn't deduplicate interfaces or -// structs. Interfaces aren't deduplicated as not to conflate implicit -// and explicit methods. Structs aren't deduplicated because we track -// fields of each type separately. -// -type Map struct { - hasher Hasher // shared by many Maps - table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused - length int // number of map entries -} - -// entry is an entry (key/value association) in a hash bucket. -type entry struct { - key types.Type - value interface{} -} - -// SetHasher sets the hasher used by Map. -// -// All Hashers are functionally equivalent but contain internal state -// used to cache the results of hashing previously seen types. -// -// A single Hasher created by MakeHasher() may be shared among many -// Maps. This is recommended if the instances have many keys in -// common, as it will amortize the cost of hash computation. -// -// A Hasher may grow without bound as new types are seen. Even when a -// type is deleted from the map, the Hasher never shrinks, since other -// types in the map may reference the deleted type indirectly. -// -// Hashers are not thread-safe, and read-only operations such as -// Map.Lookup require updates to the hasher, so a full Mutex lock (not a -// read-lock) is require around all Map operations if a shared -// hasher is accessed from multiple threads. -// -// If SetHasher is not called, the Map will create a private hasher at -// the first call to Insert. -// -func (m *Map) SetHasher(hasher Hasher) { - m.hasher = hasher -} - -// Delete removes the entry with the given key, if any. -// It returns true if the entry was found. -// -func (m *Map) Delete(key types.Type) bool { - if m != nil && m.table != nil { - hash := m.hasher.Hash(key) - bucket := m.table[hash] - for i, e := range bucket { - if e.key != nil && Identical(key, e.key) { - // We can't compact the bucket as it - // would disturb iterators. - bucket[i] = entry{} - m.length-- - return true - } - } - } - return false -} - -// At returns the map entry for the given key. -// The result is nil if the entry is not present. -// -func (m *Map) At(key types.Type) interface{} { - if m != nil && m.table != nil { - for _, e := range m.table[m.hasher.Hash(key)] { - if e.key != nil && Identical(key, e.key) { - return e.value - } - } - } - return nil -} - -// Set sets the map entry for key to val, -// and returns the previous entry, if any. -func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) { - if m.table != nil { - hash := m.hasher.Hash(key) - bucket := m.table[hash] - var hole *entry - for i, e := range bucket { - if e.key == nil { - hole = &bucket[i] - } else if Identical(key, e.key) { - prev = e.value - bucket[i].value = value - return - } - } - - if hole != nil { - *hole = entry{key, value} // overwrite deleted entry - } else { - m.table[hash] = append(bucket, entry{key, value}) - } - } else { - if m.hasher.memo == nil { - m.hasher = MakeHasher() - } - hash := m.hasher.Hash(key) - m.table = map[uint32][]entry{hash: {entry{key, value}}} - } - - m.length++ - return -} - -// Len returns the number of map entries. -func (m *Map) Len() int { - if m != nil { - return m.length - } - return 0 -} - -// Iterate calls function f on each entry in the map in unspecified order. -// -// If f should mutate the map, Iterate provides the same guarantees as -// Go maps: if f deletes a map entry that Iterate has not yet reached, -// f will not be invoked for it, but if f inserts a map entry that -// Iterate has not yet reached, whether or not f will be invoked for -// it is unspecified. -// -func (m *Map) Iterate(f func(key types.Type, value interface{})) { - if m != nil { - for _, bucket := range m.table { - for _, e := range bucket { - if e.key != nil { - f(e.key, e.value) - } - } - } - } -} - -// Keys returns a new slice containing the set of map keys. -// The order is unspecified. -func (m *Map) Keys() []types.Type { - keys := make([]types.Type, 0, m.Len()) - m.Iterate(func(key types.Type, _ interface{}) { - keys = append(keys, key) - }) - return keys -} - -func (m *Map) toString(values bool) string { - if m == nil { - return "{}" - } - var buf bytes.Buffer - fmt.Fprint(&buf, "{") - sep := "" - m.Iterate(func(key types.Type, value interface{}) { - fmt.Fprint(&buf, sep) - sep = ", " - fmt.Fprint(&buf, key) - if values { - fmt.Fprintf(&buf, ": %q", value) - } - }) - fmt.Fprint(&buf, "}") - return buf.String() -} - -// String returns a string representation of the map's entries. -// Values are printed using fmt.Sprintf("%v", v). -// Order is unspecified. -// -func (m *Map) String() string { - return m.toString(true) -} - -// KeysString returns a string representation of the map's key set. -// Order is unspecified. -// -func (m *Map) KeysString() string { - return m.toString(false) -} - -//////////////////////////////////////////////////////////////////////// -// Hasher - -// A Hasher maps each type to its hash value. -// For efficiency, a hasher uses memoization; thus its memory -// footprint grows monotonically over time. -// Hashers are not thread-safe. -// Hashers have reference semantics. -// Call MakeHasher to create a Hasher. -type Hasher struct { - memo map[types.Type]uint32 -} - -// MakeHasher returns a new Hasher instance. -func MakeHasher() Hasher { - return Hasher{make(map[types.Type]uint32)} -} - -// Hash computes a hash value for the given type t such that -// Identical(t, t') => Hash(t) == Hash(t'). -func (h Hasher) Hash(t types.Type) uint32 { - hash, ok := h.memo[t] - if !ok { - hash = h.hashFor(t) - h.memo[t] = hash - } - return hash -} - -// hashString computes the Fowler–Noll–Vo hash of s. -func hashString(s string) uint32 { - var h uint32 - for i := 0; i < len(s); i++ { - h ^= uint32(s[i]) - h *= 16777619 - } - return h -} - -// hashFor computes the hash of t. -func (h Hasher) hashFor(t types.Type) uint32 { - // See Identical for rationale. - switch t := t.(type) { - case *types.Basic: - return uint32(t.Kind()) - - case *types.Array: - return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) - - case *types.Slice: - return 9049 + 2*h.Hash(t.Elem()) - - case *types.Struct: - var hash uint32 = 9059 - for i, n := 0, t.NumFields(); i < n; i++ { - f := t.Field(i) - if f.Anonymous() { - hash += 8861 - } - hash += hashString(t.Tag(i)) - hash += hashString(f.Name()) // (ignore f.Pkg) - hash += h.Hash(f.Type()) - } - return hash - - case *types.Pointer: - return 9067 + 2*h.Hash(t.Elem()) - - case *types.Signature: - var hash uint32 = 9091 - if t.Variadic() { - hash *= 8863 - } - return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) - - case *types.Interface: - var hash uint32 = 9103 - for i, n := 0, t.NumMethods(); i < n; i++ { - // See go/types.identicalMethods for rationale. - // Method order is not significant. - // Ignore m.Pkg(). - m := t.Method(i) - hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) - } - return hash - - case *types.Map: - return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) - - case *types.Chan: - return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) - - case *types.Named: - // Not safe with a copying GC; objects may move. - return uint32(reflect.ValueOf(t.Obj()).Pointer()) - - case *types.Tuple: - return h.hashTuple(t) - } - panic(t) -} - -func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { - // See go/types.identicalTypes for rationale. - n := tuple.Len() - var hash uint32 = 9137 + 2*uint32(n) - for i := 0; i < n; i++ { - hash += 3 * h.Hash(tuple.At(i).Type()) - } - return hash -} diff --git a/go/types/typeutil/map_test.go b/go/types/typeutil/map_test.go deleted file mode 100644 index dcc10d9a0..000000000 --- a/go/types/typeutil/map_test.go +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package typeutil_test - -// TODO(adonovan): -// - test use of explicit hasher across two maps. -// - test hashcodes are consistent with equals for a range of types -// (e.g. all types generated by type-checking some body of real code). - -import ( - "go/types" - "testing" - - "honnef.co/go/tools/go/types/typeutil" -) - -var ( - tStr = types.Typ[types.String] // string - tPStr1 = types.NewPointer(tStr) // *string - tPStr2 = types.NewPointer(tStr) // *string, again - tInt = types.Typ[types.Int] // int - tChanInt1 = types.NewChan(types.RecvOnly, tInt) // <-chan int - tChanInt2 = types.NewChan(types.RecvOnly, tInt) // <-chan int, again -) - -func checkEqualButNotIdentical(t *testing.T, x, y types.Type, comment string) { - if !types.Identical(x, y) { - t.Errorf("%s: not equal: %s, %s", comment, x, y) - } - if x == y { - t.Errorf("%s: identical: %v, %v", comment, x, y) - } -} - -func TestAxioms(t *testing.T) { - checkEqualButNotIdentical(t, tPStr1, tPStr2, "tPstr{1,2}") - checkEqualButNotIdentical(t, tChanInt1, tChanInt2, "tChanInt{1,2}") -} - -func TestMap(t *testing.T) { - var tmap *typeutil.Map - - // All methods but Set are safe on on (*T)(nil). - _ = tmap.Len() - _ = tmap.At(tPStr1) - _ = tmap.Delete(tPStr1) - _ = tmap.KeysString() - _ = tmap.String() - - tmap = new(typeutil.Map) - - // Length of empty map. - if l := tmap.Len(); l != 0 { - t.Errorf("Len() on empty Map: got %d, want 0", l) - } - // At of missing key. - if v := tmap.At(tPStr1); v != nil { - t.Errorf("At() on empty Map: got %v, want nil", v) - } - // Deletion of missing key. - if tmap.Delete(tPStr1) { - t.Errorf("Delete() on empty Map: got true, want false") - } - // Set of new key. - if prev := tmap.Set(tPStr1, "*string"); prev != nil { - t.Errorf("Set() on empty Map returned non-nil previous value %s", prev) - } - - // Now: {*string: "*string"} - - // Length of non-empty map. - if l := tmap.Len(); l != 1 { - t.Errorf("Len(): got %d, want 1", l) - } - // At via insertion key. - if v := tmap.At(tPStr1); v != "*string" { - t.Errorf("At(): got %q, want \"*string\"", v) - } - // At via equal key. - if v := tmap.At(tPStr2); v != "*string" { - t.Errorf("At(): got %q, want \"*string\"", v) - } - // Iteration over sole entry. - tmap.Iterate(func(key types.Type, value interface{}) { - if key != tPStr1 { - t.Errorf("Iterate: key: got %s, want %s", key, tPStr1) - } - if want := "*string"; value != want { - t.Errorf("Iterate: value: got %s, want %s", value, want) - } - }) - - // Setion with key equal to present one. - if prev := tmap.Set(tPStr2, "*string again"); prev != "*string" { - t.Errorf("Set() previous value: got %s, want \"*string\"", prev) - } - - // Setion of another association. - if prev := tmap.Set(tChanInt1, "<-chan int"); prev != nil { - t.Errorf("Set() previous value: got %s, want nil", prev) - } - - // Now: {*string: "*string again", <-chan int: "<-chan int"} - - want1 := "{*string: \"*string again\", <-chan int: \"<-chan int\"}" - want2 := "{<-chan int: \"<-chan int\", *string: \"*string again\"}" - if s := tmap.String(); s != want1 && s != want2 { - t.Errorf("String(): got %s, want %s", s, want1) - } - - want1 = "{*string, <-chan int}" - want2 = "{<-chan int, *string}" - if s := tmap.KeysString(); s != want1 && s != want2 { - t.Errorf("KeysString(): got %s, want %s", s, want1) - } - - // Keys(). - I := types.Identical - switch k := tmap.Keys(); { - case I(k[0], tChanInt1) && I(k[1], tPStr1): // ok - case I(k[1], tChanInt1) && I(k[0], tPStr1): // ok - default: - t.Errorf("Keys(): got %v, want %s", k, want2) - } - - if l := tmap.Len(); l != 2 { - t.Errorf("Len(): got %d, want 1", l) - } - // At via original key. - if v := tmap.At(tPStr1); v != "*string again" { - t.Errorf("At(): got %q, want \"*string again\"", v) - } - hamming := 1 - tmap.Iterate(func(key types.Type, value interface{}) { - switch { - case I(key, tChanInt1): - hamming *= 2 // ok - case I(key, tPStr1): - hamming *= 3 // ok - } - }) - if hamming != 6 { - t.Errorf("Iterate: hamming: got %d, want %d", hamming, 6) - } - - if v := tmap.At(tChanInt2); v != "<-chan int" { - t.Errorf("At(): got %q, want \"<-chan int\"", v) - } - // Deletion with key equal to present one. - if !tmap.Delete(tChanInt2) { - t.Errorf("Delete() of existing key: got false, want true") - } - - // Now: {*string: "*string again"} - - if l := tmap.Len(); l != 1 { - t.Errorf("Len(): got %d, want 1", l) - } - // Deletion again. - if !tmap.Delete(tPStr2) { - t.Errorf("Delete() of existing key: got false, want true") - } - - // Now: {} - - if l := tmap.Len(); l != 0 { - t.Errorf("Len(): got %d, want %d", l, 0) - } - if s := tmap.String(); s != "{}" { - t.Errorf("Len(): got %q, want %q", s, "") - } -} diff --git a/unused/unused.go b/unused/unused.go index c0cb7fc98..6c49574d3 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -549,9 +549,9 @@ func run(pass *analysis.Pass) (interface{}, error) { for _, v := range c.graph.Nodes { debugNode(v) } - c.graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { - debugNode(value.(*node)) - }) + for _, node := range c.graph.TypeNodes { + debugNode(node) + } debugf("}\n") } @@ -561,10 +561,9 @@ func run(pass *analysis.Pass) (interface{}, error) { func (c *checker) results() (used, unused []types.Object) { c.graph.color(c.graph.Root) - c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { - node := value.(*node) + for _, node := range c.graph.TypeNodes { if node.seen { - return + continue } switch obj := node.obj.(type) { case *types.Struct: @@ -581,7 +580,7 @@ func (c *checker) results() (used, unused []types.Object) { } } } - }) + } // OPT(dh): can we find meaningful initial capacities for the used and unused slices? @@ -617,9 +616,9 @@ func (c *checker) results() (used, unused []types.Object) { type graph struct { Root *node - seenTypes typeutil.Map + seenTypes map[types.Type]struct{} - TypeNodes typeutil.Map + TypeNodes map[types.Type]*node Nodes map[interface{}]*node // context @@ -630,9 +629,11 @@ type graph struct { func newGraph(pkg *pkg) *graph { g := &graph{ - Nodes: map[interface{}]*node{}, - seenFns: map[string]struct{}{}, - pkg: pkg, + Nodes: map[interface{}]*node{}, + seenFns: map[string]struct{}{}, + seenTypes: map[types.Type]struct{}{}, + TypeNodes: map[types.Type]*node{}, + pkg: pkg, } g.Root = g.newNode(nil) return g @@ -683,11 +684,11 @@ func (g *graph) nodeMaybe(obj types.Object) (*node, bool) { func (g *graph) node(obj interface{}) (n *node, new bool) { switch obj := obj.(type) { case types.Type: - if v := g.TypeNodes.At(obj); v != nil { - return v.(*node), false + if v := g.TypeNodes[obj]; v != nil { + return v, false } n = g.newNode(obj) - g.TypeNodes.Set(obj, n) + g.TypeNodes[obj] = n return n, true case types.Object: // OPT(dh): the types.Object and default cases are identical @@ -1128,7 +1129,7 @@ func (g *graph) entry(pkg *pkg) { var ifaces []*types.Interface var notIfaces []types.Type - g.seenTypes.Iterate(func(t types.Type, _ interface{}) { + for t := range g.seenTypes { switch t := t.(type) { case *types.Interface: // OPT(dh): (8.1) we only need interfaces that have unexported methods @@ -1138,7 +1139,7 @@ func (g *graph) entry(pkg *pkg) { notIfaces = append(notIfaces, t) } } - }) + } // (8.0) handle interfaces for _, t := range notIfaces { @@ -1278,7 +1279,7 @@ func (g *graph) function(fn *ir.Function) { } func (g *graph) typ(t types.Type, parent types.Type) { - if g.seenTypes.At(t) != nil { + if _, ok := g.seenTypes[t]; ok { return } @@ -1288,7 +1289,7 @@ func (g *graph) typ(t types.Type, parent types.Type) { } } - g.seenTypes.Set(t, struct{}{}) + g.seenTypes[t] = struct{}{} if isIrrelevant(t) { return } From 13ec319710618d910d2e3bf5ff1396d2092ee6c9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 20 May 2020 12:08:36 +0200 Subject: [PATCH 059/111] SA5012: check for slice parameters that have to be of even length --- staticcheck/analysis.go | 14 +- staticcheck/doc.go | 9 + staticcheck/lint.go | 246 ++++++++++++++++++ staticcheck/lint_test.go | 1 + .../CheckEvenSliceLength.go | 50 ++++ 5 files changed, 315 insertions(+), 5 deletions(-) create mode 100644 staticcheck/testdata/src/CheckEvenSliceLength/CheckEvenSliceLength.go diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index 76dcd4964..a860e8af3 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -183,6 +183,10 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ Run: CheckSingleArgAppend, Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, }, + "SA4022": { + Run: CheckAddressIsNil, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + }, "SA5000": { Run: CheckNilMaps, @@ -225,6 +229,11 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ Run: CheckMaybeNil, Requires: []*analysis.Analyzer{buildir.Analyzer}, }, + "SA5012": { + Run: CheckEvenSliceLength, + FactTypes: []analysis.Fact{new(evenElements)}, + Requires: []*analysis.Analyzer{buildir.Analyzer}, + }, "SA6000": makeCallCheckerAnalyzer(checkRegexpMatchLoopRules), "SA6001": { @@ -263,9 +272,4 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ Run: CheckStaticBitShift, Requires: []*analysis.Analyzer{inspect.Analyzer}, }, - - "SA4022": { - Run: CheckAddressIsNil, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - }, }) diff --git a/staticcheck/doc.go b/staticcheck/doc.go index a9b89fa31..cc04d77b7 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -661,6 +661,15 @@ popular package.`, Since: "2020.1", }, + "SA5012": { + Title: "Passing odd-sized slice to function expecting even size", + Text: `Some functions that take slices as parameters expect the slices to have an even number of elements. +Often, these functions treat elements in a slice as pairs. +For example, strings.NewReplacer takes pairs of old and new strings, +and calling it with an odd number of elements would be an error.`, + Since: "Unreleased", + }, + "SA6000": { Title: `Using regexp.Match or related in a loop, should use regexp.Compile`, Since: "2017.1", diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 5430d9ea8..481a504bc 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -3914,3 +3914,249 @@ func CheckStaticBitShift(pass *analysis.Pass) (interface{}, error) { return nil, nil } + +func findSliceLenChecks(pass *analysis.Pass) { + // mark all function parameters that have to be of even length + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + for _, b := range fn.Blocks { + // all paths go through this block + if !b.Dominates(fn.Exit) { + continue + } + + // if foo % 2 != 0 + ifi, ok := b.Control().(*ir.If) + if !ok { + continue + } + cmp, ok := ifi.Cond.(*ir.BinOp) + if !ok { + continue + } + var needle uint64 + switch cmp.Op { + case token.NEQ: + // look for != 0 + needle = 0 + case token.EQL: + // look for == 1 + needle = 1 + default: + continue + } + + rem, ok1 := cmp.X.(*ir.BinOp) + k, ok2 := cmp.Y.(*ir.Const) + if ok1 != ok2 { + continue + } + if !ok1 { + rem, ok1 = cmp.Y.(*ir.BinOp) + k, ok2 = cmp.X.(*ir.Const) + } + if !ok1 || !ok2 || rem.Op != token.REM || k.Value.Kind() != constant.Int || k.Uint64() != needle { + continue + } + k, ok = rem.Y.(*ir.Const) + if !ok || k.Value.Kind() != constant.Int || k.Uint64() != 2 { + continue + } + + // if len(foo) % 2 != 0 + call, ok := rem.X.(*ir.Call) + if !ok || !irutil.IsCallTo(call.Common(), "len") { + continue + } + + // we're checking the length of a parameter that is a slice + // TODO(dh): support parameters that have flown through sigmas and phis + param, ok := call.Call.Args[0].(*ir.Parameter) + if !ok { + continue + } + if _, ok := param.Type().Underlying().(*types.Slice); !ok { + continue + } + + // if len(foo) % 2 != 0 then panic + if _, ok := b.Succs[0].Control().(*ir.Panic); !ok { + continue + } + + pass.ExportObjectFact(param.Object(), new(evenElements)) + } + } +} + +func findIndirectSliceLenChecks(pass *analysis.Pass) { + seen := map[*ir.Function]struct{}{} + + var doFunction func(fn *ir.Function) + doFunction = func(fn *ir.Function) { + if _, ok := seen[fn]; ok { + return + } + seen[fn] = struct{}{} + + for _, b := range fn.Blocks { + // all paths go through this block + if !b.Dominates(fn.Exit) { + continue + } + + for _, instr := range b.Instrs { + call, ok := instr.(*ir.Call) + if !ok { + continue + } + callee := call.Call.StaticCallee() + if callee == nil { + continue + } + + if callee.Pkg == fn.Pkg { + // TODO(dh): are we missing interesting wrappers + // because wrappers don't have Pkg set? + doFunction(callee) + } + + for argi, arg := range call.Call.Args { + if callee.Signature.Recv() != nil { + if argi == 0 { + continue + } + argi-- + } + + // TODO(dh): support parameters that have flown through sigmas and phis + param, ok := arg.(*ir.Parameter) + if !ok { + continue + } + if _, ok := param.Type().Underlying().(*types.Slice); !ok { + continue + } + + // We can't use callee.Params to look up the + // parameter, because Params is not populated for + // external functions. In our modular analysis. + // any function in any package that isn't the + // current package is consided "external", as it + // has been loaded from export data only. + sigParams := callee.Signature.Params() + + if !pass.ImportObjectFact(sigParams.At(argi), new(evenElements)) { + continue + } + pass.ExportObjectFact(param.Object(), new(evenElements)) + } + } + } + } + + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + doFunction(fn) + } +} + +func findSliceLength(v ir.Value) int { + // TODO(dh): VRP would help here + + val := func(v ir.Value) int { + if v, ok := v.(*ir.Const); ok { + return int(v.Int64()) + } + return -1 + } + switch v := v.(type) { + case *ir.Slice: + low := 0 + high := -1 + if v.Low != nil { + low = val(v.Low) + } + if v.High != nil { + high = val(v.High) + } else { + switch vv := v.X.(type) { + case *ir.Alloc: + high = int(typeutil.Dereference(vv.Type()).Underlying().(*types.Array).Len()) + case *ir.Slice: + high = findSliceLength(vv) + } + } + if low == -1 || high == -1 { + return -1 + } + return high - low + default: + return -1 + } +} + +type evenElements struct{} + +func (evenElements) AFact() {} + +func (evenElements) String() string { return "needs even elements" } + +func flagSliceLens(pass *analysis.Pass) { + var tag evenElements + + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + call, ok := instr.(ir.CallInstruction) + if !ok { + continue + } + callee := call.Common().StaticCallee() + if callee == nil { + continue + } + for argi, arg := range call.Common().Args { + if callee.Signature.Recv() != nil { + if argi == 0 { + continue + } + argi-- + } + + _, ok := arg.Type().Underlying().(*types.Slice) + if !ok { + continue + } + param := callee.Signature.Params().At(argi) + if !pass.ImportObjectFact(param, &tag) { + continue + } + + // we know the argument has to have even length. + // now let's try to find its length + if n := findSliceLength(arg); n > -1 && n%2 != 0 { + src := call.Source().(*ast.CallExpr).Args[argi] + sig := call.Common().Signature() + var label string + if argi == sig.Params().Len()-1 && sig.Variadic() { + label = "variadic argument" + } else { + label = "argument" + } + // Note that param.Name() is guaranteed to not + // be empty, otherwise the function couldn't + // have enforced its length. + report.Report(pass, src, fmt.Sprintf("%s %q is expected to have even number of elements, but has %d elements", label, param.Name(), n)) + } + } + } + } + } +} + +func CheckEvenSliceLength(pass *analysis.Pass) (interface{}, error) { + findSliceLenChecks(pass) + findIndirectSliceLenChecks(pass) + flagSliceLens(pass) + + return nil, nil +} diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index 2a1615696..c79aaa0e5 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -80,6 +80,7 @@ func TestAll(t *testing.T) { "SA5009": {{Dir: "CheckPrintf"}}, "SA5010": {{Dir: "CheckImpossibleTypeAssertion"}}, "SA5011": {{Dir: "CheckMaybeNil"}}, + "SA5012": {{Dir: "CheckEvenSliceLength"}}, "SA6000": {{Dir: "CheckRegexpMatchLoop"}}, "SA6001": {{Dir: "CheckMapBytesKey"}}, "SA6002": {{Dir: "CheckSyncPoolValue"}}, diff --git a/staticcheck/testdata/src/CheckEvenSliceLength/CheckEvenSliceLength.go b/staticcheck/testdata/src/CheckEvenSliceLength/CheckEvenSliceLength.go new file mode 100644 index 000000000..1de808980 --- /dev/null +++ b/staticcheck/testdata/src/CheckEvenSliceLength/CheckEvenSliceLength.go @@ -0,0 +1,50 @@ +package pkg + +import "strings" + +func fnVariadic(s string, args ...interface{}) { // want args:"needs even elements" + if len(args)%2 != 0 { + panic("I'm one of those annoying logging APIs") + } +} + +func fnSlice(s string, args []interface{}) { // want args:"needs even elements" + if len(args)%2 != 0 { + panic("I'm one of those annoying logging APIs") + } +} + +func fnIndirect(s string, args ...interface{}) { // want args:"needs even elements" + fnSlice(s, args) +} + +func fn2(bleh []interface{}, arr1 [3]interface{}) { // want bleh:"needs even elements" + fnVariadic("%s", 1, 2, 3) // want `variadic argument "args".+ but has 3 elements` + args := []interface{}{1, 2, 3} + fnVariadic("", args...) // want `variadic argument "args".+ but has 3 elements` + fnVariadic("", args[:1]...) // want `variadic argument "args".+ but has 1 elements` + fnVariadic("", args[:2]...) + fnVariadic("", args[0:1]...) // want `variadic argument "args".+ but has 1 elements` + fnVariadic("", args[0:]...) // want `variadic argument "args".+ but has 3 elements` + fnVariadic("", args[:]...) // want `variadic argument "args".+ but has 3 elements` + fnVariadic("", bleh...) + fnVariadic("", bleh[:1]...) // want `variadic argument "args".+ but has 1 elements` + fnVariadic("", bleh[0:1]...) // want `variadic argument "args".+ but has 1 elements` + fnVariadic("", bleh[0:]...) + fnVariadic("", bleh[:]...) + fnVariadic("", bleh) // want `variadic argument "args".+ but has 1 elements` + fnVariadic("", make([]interface{}, 3)...) // want `variadic argument "args".+ but has 3 elements` + fnVariadic("", make([]interface{}, 4)...) + var arr2 [3]interface{} + fnVariadic("", arr1[:]...) // want `variadic argument "args".+ but has 3 elements` + fnVariadic("", arr2[:]...) // want `variadic argument "args".+ but has 3 elements` + + fnSlice("", []interface{}{1, 2, 3}) // want `argument "args".+ but has 3 elements` + fnSlice("", []interface{}{1, 2, 3, 4}) + + fnIndirect("%s", 1, 2, 3) // want `argument "args".+ but has 3 elements` + fnIndirect("%s", 1, 2) + + strings.NewReplacer("one") // want `variadic argument "oldnew".+ but has 1 elements` + strings.NewReplacer("one", "two") +} From 9b21780a6dea54d84dd05263ec284266081c9679 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 8 Jun 2020 11:50:12 +0200 Subject: [PATCH 060/111] lintcmd: don't mention -explain flag if we only encountered compile errors The -explain flag does not provide any useful information on compiler errors, so don't mention it. Closes gh-776 --- lintcmd/cmd.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lintcmd/cmd.go b/lintcmd/cmd.go index f44b3c726..ccb49757d 100644 --- a/lintcmd/cmd.go +++ b/lintcmd/cmd.go @@ -659,6 +659,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { } var ( + numCompiles int numErrors int numWarnings int numIgnored int @@ -670,7 +671,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { analyzerNames[i] = a.Name } shouldExit := filterAnalyzerNames(analyzerNames, fail) - shouldExit["compile"] = true for _, p := range ps { if p.Category == "compile" && debugNoCompile { @@ -680,7 +680,9 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { numIgnored++ continue } - if shouldExit[p.Category] { + if p.Category == "compile" { + numCompiles++ + } else if shouldExit[p.Category] { numErrors++ } else { p.Severity = severityWarning @@ -689,14 +691,14 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { f.Format(p) } if f, ok := f.(statter); ok { - f.Stats(len(ps), numErrors, numWarnings, numIgnored) + f.Stats(len(ps), numErrors+numCompiles, numWarnings, numIgnored) } if f, ok := f.(documentationMentioner); ok && (numErrors > 0 || numWarnings > 0) && len(os.Args) > 0 { f.MentionCheckDocumentation(os.Args[0]) } - if numErrors > 0 { + if numErrors > 0 || numCompiles > 0 { exit(1) } exit(0) From eb569e2d3e52015681a2ec1afe4ed09c9fc40b3d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 8 Jun 2020 12:21:28 +0200 Subject: [PATCH 061/111] internal/robustio: reimport from upstream This imports robustio from the Go project at commit 608cdcaede1e7133dc994b5e8894272c2dce744b. This switches to using errors.As, and increases the timeout from 500ms to 2000ms as per commit 093049b3709eda7537ece92a2991918cf53782d6. Updates gh-558 --- internal/robustio/robustio_darwin.go | 14 +++----------- internal/robustio/robustio_flaky.go | 13 ++++++------- internal/robustio/robustio_other.go | 2 +- internal/robustio/robustio_windows.go | 14 ++++---------- 4 files changed, 14 insertions(+), 29 deletions(-) diff --git a/internal/robustio/robustio_darwin.go b/internal/robustio/robustio_darwin.go index 1ac0d10d7..99fd8ebc2 100644 --- a/internal/robustio/robustio_darwin.go +++ b/internal/robustio/robustio_darwin.go @@ -5,7 +5,7 @@ package robustio import ( - "os" + "errors" "syscall" ) @@ -13,16 +13,8 @@ const errFileNotFound = syscall.ENOENT // isEphemeralError returns true if err may be resolved by waiting. func isEphemeralError(err error) bool { - switch werr := err.(type) { - case *os.PathError: - err = werr.Err - case *os.LinkError: - err = werr.Err - case *os.SyscallError: - err = werr.Err - - } - if errno, ok := err.(syscall.Errno); ok { + var errno syscall.Errno + if errors.As(err, &errno) { return errno == errFileNotFound } return false diff --git a/internal/robustio/robustio_flaky.go b/internal/robustio/robustio_flaky.go index e0bf5b9b3..d4cb7e645 100644 --- a/internal/robustio/robustio_flaky.go +++ b/internal/robustio/robustio_flaky.go @@ -7,6 +7,7 @@ package robustio import ( + "errors" "io/ioutil" "math/rand" "os" @@ -14,9 +15,7 @@ import ( "time" ) -const arbitraryTimeout = 500 * time.Millisecond - -const ERROR_SHARING_VIOLATION = 32 +const arbitraryTimeout = 2000 * time.Millisecond // retry retries ephemeral errors from f up to an arbitrary timeout // to work around filesystem flakiness on Windows and Darwin. @@ -33,7 +32,8 @@ func retry(f func() (err error, mayRetry bool)) error { return err } - if errno, ok := err.(syscall.Errno); ok && (lowestErrno == 0 || errno < lowestErrno) { + var errno syscall.Errno + if errors.As(err, &errno) && (lowestErrno == 0 || errno < lowestErrno) { bestErr = err lowestErrno = errno } else if bestErr == nil { @@ -54,7 +54,7 @@ func retry(f func() (err error, mayRetry bool)) error { // rename is like os.Rename, but retries ephemeral errors. // -// On windows it wraps os.Rename, which (as of 2019-06-04) uses MoveFileEx with +// On Windows it wraps os.Rename, which (as of 2019-06-04) uses MoveFileEx with // MOVEFILE_REPLACE_EXISTING. // // Windows also provides a different system call, ReplaceFile, @@ -79,8 +79,7 @@ func readFile(filename string) ([]byte, error) { // Unlike in rename, we do not retry errFileNotFound here: it can occur // as a spurious error, but the file may also genuinely not exist, so the // increase in robustness is probably not worth the extra latency. - - return err, isEphemeralError(err) && err != errFileNotFound + return err, isEphemeralError(err) && !errors.Is(err, errFileNotFound) }) return b, err } diff --git a/internal/robustio/robustio_other.go b/internal/robustio/robustio_other.go index a2428856f..907b55685 100644 --- a/internal/robustio/robustio_other.go +++ b/internal/robustio/robustio_other.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//+build !windows,!darwin +// +build !windows,!darwin package robustio diff --git a/internal/robustio/robustio_windows.go b/internal/robustio/robustio_windows.go index a35237d44..200070a9e 100644 --- a/internal/robustio/robustio_windows.go +++ b/internal/robustio/robustio_windows.go @@ -5,23 +5,17 @@ package robustio import ( - "os" + "errors" "syscall" ) +const ERROR_SHARING_VIOLATION = 32 const errFileNotFound = syscall.ERROR_FILE_NOT_FOUND // isEphemeralError returns true if err may be resolved by waiting. func isEphemeralError(err error) bool { - switch werr := err.(type) { - case *os.PathError: - err = werr.Err - case *os.LinkError: - err = werr.Err - case *os.SyscallError: - err = werr.Err - } - if errno, ok := err.(syscall.Errno); ok { + var errno syscall.Errno + if errors.As(err, &errno) { switch errno { case syscall.ERROR_ACCESS_DENIED, syscall.ERROR_FILE_NOT_FOUND, From 6731d4c46f514e60d8d184fce025b94a40ecdad5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 8 Jun 2020 12:30:10 +0200 Subject: [PATCH 062/111] internal/renameio: reimport from upstream This imports renameio from the Go project at commit 608cdcaede1e7133dc994b5e8894272c2dce744b. Primarily, this switches us to using errors.As. Updates gh-558 --- internal/renameio/renameio.go | 26 +++++++++++++------------- internal/renameio/renameio_test.go | 11 +++++++---- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/internal/renameio/renameio.go b/internal/renameio/renameio.go index a279d1a1e..5e0dfa2f8 100644 --- a/internal/renameio/renameio.go +++ b/internal/renameio/renameio.go @@ -66,6 +66,19 @@ func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error) return robustio.Rename(f.Name(), filename) } +// ReadFile is like ioutil.ReadFile, but on Windows retries spurious errors that +// may occur if the file is concurrently replaced. +// +// Errors are classified heuristically and retries are bounded, so even this +// function may occasionally return a spurious error on Windows. +// If so, the error will likely wrap one of: +// - syscall.ERROR_ACCESS_DENIED +// - syscall.ERROR_FILE_NOT_FOUND +// - internal/syscall/windows.ERROR_SHARING_VIOLATION +func ReadFile(filename string) ([]byte, error) { + return robustio.ReadFile(filename) +} + // tempFile creates a new temporary file with given permission bits. func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) { for i := 0; i < 10000; i++ { @@ -78,16 +91,3 @@ func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) { } return } - -// ReadFile is like ioutil.ReadFile, but on Windows retries spurious errors that -// may occur if the file is concurrently replaced. -// -// Errors are classified heuristically and retries are bounded, so even this -// function may occasionally return a spurious error on Windows. -// If so, the error will likely wrap one of: -// - syscall.ERROR_ACCESS_DENIED -// - syscall.ERROR_FILE_NOT_FOUND -// - internal/syscall/windows.ERROR_SHARING_VIOLATION -func ReadFile(filename string) ([]byte, error) { - return robustio.ReadFile(filename) -} diff --git a/internal/renameio/renameio_test.go b/internal/renameio/renameio_test.go index afd6eec9e..2f69be4c7 100644 --- a/internal/renameio/renameio_test.go +++ b/internal/renameio/renameio_test.go @@ -8,6 +8,7 @@ package renameio import ( "encoding/binary" + "errors" "io/ioutil" "math/rand" "os" @@ -61,9 +62,10 @@ func TestConcurrentReadsAndWrites(t *testing.T) { atomic.AddInt64(&writeSuccesses, 1) } else if robustio.IsEphemeralError(err) { var ( - dup bool + errno syscall.Errno + dup bool ) - if errno, ok := err.(syscall.Errno); ok { + if errors.As(err, &errno) { _, dup = writeErrnoSeen.LoadOrStore(errno, true) } if !dup { @@ -79,9 +81,10 @@ func TestConcurrentReadsAndWrites(t *testing.T) { atomic.AddInt64(&readSuccesses, 1) } else if robustio.IsEphemeralError(err) { var ( - dup bool + errno syscall.Errno + dup bool ) - if errno, ok := err.(syscall.Errno); ok { + if errors.As(err, &errno) { _, dup = readErrnoSeen.LoadOrStore(errno, true) } if !dup { From bb930f5b1454f7f8280e9c865ccad124715ac4c9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 8 Jun 2020 12:46:51 +0200 Subject: [PATCH 063/111] internal/cache: reimport from upstream This imports cache from the Go project at commit 608cdcaede1e7133dc994b5e8894272c2dce744b.. --- internal/cache/cache.go | 70 ++++++++++++++++++++++++++---------- internal/cache/cache_test.go | 2 +- 2 files changed, 52 insertions(+), 20 deletions(-) diff --git a/internal/cache/cache.go b/internal/cache/cache.go index cfd4241f9..0031edc69 100644 --- a/internal/cache/cache.go +++ b/internal/cache/cache.go @@ -77,7 +77,22 @@ func (c *Cache) fileName(id [HashSize]byte, key string) string { return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key) } -var ErrMissing = errors.New("cache entry not found") +// An entryNotFoundError indicates that a cache entry was not found, with an +// optional underlying reason. +type entryNotFoundError struct { + Err error +} + +func (e *entryNotFoundError) Error() string { + if e.Err == nil { + return "cache entry not found" + } + return fmt.Sprintf("cache entry not found: %v", e.Err) +} + +func (e *entryNotFoundError) Unwrap() error { + return e.Err +} const ( // action entry file is "v1 \n" @@ -96,6 +111,8 @@ const ( // GODEBUG=gocacheverify=1. var verify = false +var errVerifyMode = errors.New("gocacheverify=1") + // DebugTest is set when GODEBUG=gocachetest=1 is in the environment. var DebugTest = false @@ -124,7 +141,7 @@ func initEnv() { // saved file for that output ID is still available. func (c *Cache) Get(id ActionID) (Entry, error) { if verify { - return Entry{}, ErrMissing + return Entry{}, &entryNotFoundError{Err: errVerifyMode} } return c.get(id) } @@ -137,20 +154,27 @@ type Entry struct { // get is Get but does not respect verify mode, so that Put can use it. func (c *Cache) get(id ActionID) (Entry, error) { - missing := func() (Entry, error) { - return Entry{}, ErrMissing + missing := func(reason error) (Entry, error) { + return Entry{}, &entryNotFoundError{Err: reason} } f, err := os.Open(c.fileName(id, "a")) if err != nil { - return missing() + return missing(err) } defer f.Close() entry := make([]byte, entrySize+1) // +1 to detect whether f is too long - if n, err := io.ReadFull(f, entry); n != entrySize || err != io.ErrUnexpectedEOF { - return missing() + if n, err := io.ReadFull(f, entry); n > entrySize { + return missing(errors.New("too long")) + } else if err != io.ErrUnexpectedEOF { + if err == io.EOF { + return missing(errors.New("file is empty")) + } + return missing(err) + } else if n < entrySize { + return missing(errors.New("entry file incomplete")) } if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' { - return missing() + return missing(errors.New("invalid header")) } eid, entry := entry[3:3+hexSize], entry[3+hexSize:] eout, entry := entry[1:1+hexSize], entry[1+hexSize:] @@ -158,27 +182,33 @@ func (c *Cache) get(id ActionID) (Entry, error) { //lint:ignore SA4006 See https://github.com/dominikh/go-tools/issues/465 etime, entry := entry[1:1+20], entry[1+20:] var buf [HashSize]byte - if _, err := hex.Decode(buf[:], eid); err != nil || buf != id { - return missing() + if _, err := hex.Decode(buf[:], eid); err != nil { + return missing(fmt.Errorf("decoding ID: %v", err)) + } else if buf != id { + return missing(errors.New("mismatched ID")) } if _, err := hex.Decode(buf[:], eout); err != nil { - return missing() + return missing(fmt.Errorf("decoding output ID: %v", err)) } i := 0 for i < len(esize) && esize[i] == ' ' { i++ } size, err := strconv.ParseInt(string(esize[i:]), 10, 64) - if err != nil || size < 0 { - return missing() + if err != nil { + return missing(fmt.Errorf("parsing size: %v", err)) + } else if size < 0 { + return missing(errors.New("negative size")) } i = 0 for i < len(etime) && etime[i] == ' ' { i++ } tm, err := strconv.ParseInt(string(etime[i:]), 10, 64) - if err != nil || tm < 0 { - return missing() + if err != nil { + return missing(fmt.Errorf("parsing timestamp: %v", err)) + } else if tm < 0 { + return missing(errors.New("negative timestamp")) } c.used(c.fileName(id, "a")) @@ -195,8 +225,11 @@ func (c *Cache) GetFile(id ActionID) (file string, entry Entry, err error) { } file = c.OutputFile(entry.OutputID) info, err := os.Stat(file) - if err != nil || info.Size() != entry.Size { - return "", Entry{}, ErrMissing + if err != nil { + return "", Entry{}, &entryNotFoundError{Err: err} + } + if info.Size() != entry.Size { + return "", Entry{}, &entryNotFoundError{Err: errors.New("file incomplete")} } return file, entry, nil } @@ -211,7 +244,7 @@ func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) { } data, _ := ioutil.ReadFile(c.OutputFile(entry.OutputID)) if sha256.Sum256(data) != entry.OutputID { - return nil, entry, ErrMissing + return nil, entry, &entryNotFoundError{Err: errors.New("bad checksum")} } return data, entry, nil } @@ -327,7 +360,6 @@ func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify // are entirely reproducible. As just noted, this may be unrealistic // in some cases but the check is also useful for shaking out real bugs. entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()) - if verify && allowVerify { old, err := c.get(id) if err == nil && (old.OutputID != out || old.Size != size) { diff --git a/internal/cache/cache_test.go b/internal/cache/cache_test.go index 7229bc4ce..1988c3450 100644 --- a/internal/cache/cache_test.go +++ b/internal/cache/cache_test.go @@ -78,7 +78,7 @@ func TestGrowth(t *testing.T) { n := 10000 if testing.Short() { - n = 1000 + n = 10 } for i := 0; i < n; i++ { From d803454e4d17b11dfd56d966e85db98402fce9fe Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 13 Jun 2020 18:43:02 +0200 Subject: [PATCH 064/111] Make benchmarks more automatic - Benchmark multiple package patterns at once. - Support CSV output - Test varying values of GOGC --- _benchmarks/bench.sh | 81 +++++++++++++++++++++++++++++++++----------- 1 file changed, 61 insertions(+), 20 deletions(-) diff --git a/_benchmarks/bench.sh b/_benchmarks/bench.sh index 4471a7dd6..5f3c9c024 100755 --- a/_benchmarks/bench.sh +++ b/_benchmarks/bench.sh @@ -1,31 +1,72 @@ -#!/usr/bin/env sh +#!/usr/bin/env bash set -e -# PKG="k8s.io/kubernetes/pkg/..." -# LABEL="k8s" -PKG="std" -LABEL=$PKG -MIN_CORES=16 + +declare -A PKGS=( + ["strconv"]="strconv" + ["std"]="std" + ["k8s"]="k8s.io/kubernetes/pkg/..." +) + +MIN_CORES=1 MAX_CORES=16 +MIN_GOGC=10 +MAX_GOGC=100 SAMPLES=5 WIPE_CACHE=1 +FORMAT=csv BIN=$(realpath ./silent-staticcheck.sh) +SMT=1 -go build ../cmd/staticcheck -export GO111MODULE=off +runBenchmark() { + local pkg="$1" + local label="$2" + local gc="$3" + local cores="$4" + local wipe="$5" + + if [ $wipe -ne 0 ]; then + rm -rf ~/.cache/staticcheck + fi -for cores in $(seq $MIN_CORES $MAX_CORES); do - for i in $(seq 1 $SAMPLES); do + local procs + if [ $SMT -ne 0 ]; then procs=$((cores*2)) - if [ $WIPE_CACHE -ne 0 ]; then - rm -rf ~/.cache/staticcheck - fi - - out=$(env time -f "%e %M" taskset -c 0-$((procs-1)) $BIN $PKG 2>&1) - t=$(echo "$out" | cut -f1 -d" ") - m=$(echo "$out" | cut -f2 -d" ") - ns=$(printf "%s 1000000000 * p" $t | dc) - b=$((m * 1024)) - printf "BenchmarkStaticcheck-%s-%d 1 %.0f ns/op %.0f B/op\n" "$LABEL" "$procs" "$ns" "$b" + else + procs=$cores + fi + + local out=$(GOGC=$gc env time -f "%e %M" taskset -c 0-$((procs-1)) $BIN $pkg 2>&1) + local t=$(echo "$out" | cut -f1 -d" ") + local m=$(echo "$out" | cut -f2 -d" ") + local ns=$(printf "%s 1000000000 * p" $t | dc) + local b=$((m * 1024)) + + case $FORMAT in + bench) + printf "BenchmarkStaticcheck-%s-GOGC%d-wiped%d-%d 1 %.0f ns/op %.0f B/op\n" "$label" "$gc" "$wipe" "$procs" "$ns" "$b" + ;; + csv) + printf "%s,%d,%d,%d,%.0f,%.0f\n" "$label" "$gc" "$procs" "$wipe" "$ns" "$b" + ;; + esac +} + +go build ../cmd/staticcheck +export GO111MODULE=off + +if [ "$FORMAT" = "csv" ]; then + printf "packages,gogc,procs,wipe-cache,time,memory\n" +fi + +for label in "${!PKGS[@]}"; do + pkg=${PKGS[$label]} + for gc in $(seq $MIN_GOGC 10 $MAX_GOGC); do + for cores in $(seq $MIN_CORES $MAX_CORES); do + for i in $(seq 1 $SAMPLES); do + runBenchmark "$pkg" "$label" "$gc" "$cores" 1 + runBenchmark "$pkg" "$label" "$gc" "$cores" 0 + done + done done done From 8477a449cafdad3b31eec8a7daf735c2b23520af Mon Sep 17 00:00:00 2001 From: Sourya Vatsyayan Date: Mon, 1 Jun 2020 10:39:27 +0530 Subject: [PATCH 065/111] SA1012: fix tests for suggested fixes Signed-off-by: Sourya Vatsyayan Closes: gh-771 [via git-merge-pr] --- .../checkStdlibUsageNilContext.go.golden | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go.golden b/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go.golden index cef6d7a0b..4f6d38a5b 100644 --- a/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go.golden +++ b/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go.golden @@ -1,3 +1,4 @@ +-- use context.Background -- package pkg import "context" @@ -22,3 +23,28 @@ func fn3() { _ = (func())(nil) (*T).Foo(nil) } +-- use context.TODO -- +package pkg + +import "context" + +func fn1(ctx context.Context) {} +func fn2(x string, ctx context.Context) {} +func fn4() {} + +type T struct{} + +func (*T) Foo() {} + +func fn3() { + fn1(context.TODO()) // want `do not pass a nil Context` + fn1(context.TODO()) + fn2("", nil) + fn4() + + // don't flag this conversion + _ = (func(context.Context))(nil) + // and don't crash on these + _ = (func())(nil) + (*T).Foo(nil) +} From ea50789e60ce4e9dd60f65f49d7ff5217e0586e4 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 13 Jun 2020 18:52:25 +0200 Subject: [PATCH 066/111] Update version of golang.org/x/tools --- go.mod | 2 +- go.sum | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 3b3c8b956..044b4f28e 100644 --- a/go.mod +++ b/go.mod @@ -8,5 +8,5 @@ require ( github.com/kisielk/gotool v1.0.0 github.com/rogpeppe/go-internal v1.3.0 golang.org/x/mod v0.2.0 - golang.org/x/tools v0.0.0-20200427214658-4697a2867c88 + golang.org/x/tools v0.0.0-20200609164405-eb789aa7ce50 ) diff --git a/go.sum b/go.sum index 6e1c076f7..fce3b1197 100644 --- a/go.sum +++ b/go.sum @@ -42,6 +42,8 @@ golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e h1:3Dzrrxi54Io7Aoyb0PYLsI4 golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200427214658-4697a2867c88 h1:Nj7oNnL9tSACMt2JvszZN6P4IXiy1t6E/YRMr7YtaSw= golang.org/x/tools v0.0.0-20200427214658-4697a2867c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200609164405-eb789aa7ce50 h1:59syOWj4+Fl+op4LL8fX1kO7HmbdEWfxlw4tcGvH+y0= +golang.org/x/tools v0.0.0-20200609164405-eb789aa7ce50/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= From 863fc4d130ce5853512b2c97884469e360f3c96a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 13 Jun 2020 19:13:48 +0200 Subject: [PATCH 067/111] CI: target Go 1.13 as lowest supported version --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fe36b76d1..96122a813 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,4 +30,4 @@ jobs: with: fetch-depth: 1 - run: "go vet ./..." - - run: "$(go env GOPATH)/bin/staticcheck -go 1.11 ./..." + - run: "$(go env GOPATH)/bin/staticcheck -go 1.13 ./..." From 66f0fd38314f0dbc92c1e7144070553f0c161047 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 13 Jun 2020 19:26:53 +0200 Subject: [PATCH 068/111] lintcmd: exit non-zero if we encounter unmatched ignore directives This has been the behavior when we originally added ignore directives. In Staticcheck 2019.2 we somehow lost the behavior. Restore it. --- lintcmd/cmd.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lintcmd/cmd.go b/lintcmd/cmd.go index ccb49757d..b5ce6d8fa 100644 --- a/lintcmd/cmd.go +++ b/lintcmd/cmd.go @@ -257,7 +257,7 @@ func filterIgnored(problems []problem, res runner.ResultData, allowedAnalyzers m Diagnostic: runner.Diagnostic{ Position: ig.Pos, Message: "this linter directive didn't match anything; should it be removed?", - Category: "", + Category: "staticcheck", }, } moreProblems = append(moreProblems, p) @@ -671,6 +671,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { analyzerNames[i] = a.Name } shouldExit := filterAnalyzerNames(analyzerNames, fail) + shouldExit["staticcheck"] = true for _, p := range ps { if p.Category == "compile" && debugNoCompile { From 9cc924ae9826f784f093cce37c36a4dd01b48371 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 14 Jun 2020 02:51:21 +0200 Subject: [PATCH 069/111] lintcmd: print reference to -explain check to stderr Separate diagnostics from help output. The output of the plain formatter is supposed to be machine readable, so don't emit text meant strictly for humans. For the stylish formatter we could've gone either way, because its output is not meant to be processed by machines, but we copy the behavior of the plain formatter for the sake of consistency. --- lintcmd/cmd.go | 4 ++-- lintcmd/format.go | 24 +++++++++++++----------- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/lintcmd/cmd.go b/lintcmd/cmd.go index b5ce6d8fa..6699fd9f7 100644 --- a/lintcmd/cmd.go +++ b/lintcmd/cmd.go @@ -632,9 +632,9 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { var f formatter switch theFormatter { case "text": - f = textFormatter{W: os.Stdout} + f = textFormatter{Diagnostics: os.Stdout, UI: os.Stderr} case "stylish": - f = &stylishFormatter{W: os.Stdout} + f = &stylishFormatter{Diagnostics: os.Stdout, UI: os.Stderr} case "json": f = jsonFormatter{W: os.Stdout} default: diff --git a/lintcmd/format.go b/lintcmd/format.go index f3d95345a..7f47e1d30 100644 --- a/lintcmd/format.go +++ b/lintcmd/format.go @@ -48,18 +48,19 @@ type documentationMentioner interface { } type textFormatter struct { - W io.Writer + Diagnostics io.Writer + UI io.Writer } func (o textFormatter) Format(p problem) { - fmt.Fprintf(o.W, "%s: %s\n", relativePositionString(p.Position), p.String()) + fmt.Fprintf(o.Diagnostics, "%s: %s\n", relativePositionString(p.Position), p.String()) for _, r := range p.Related { - fmt.Fprintf(o.W, "\t%s: %s\n", relativePositionString(r.Position), r.Message) + fmt.Fprintf(o.Diagnostics, "\t%s: %s\n", relativePositionString(r.Position), r.Message) } } func (o textFormatter) MentionCheckDocumentation(cmd string) { - fmt.Fprintf(o.W, "\nRun '%s -explain ' or visit https://staticcheck.io/docs/checks for documentation on checks.\n", cmd) + fmt.Fprintf(o.UI, "\nRun '%s -explain ' or visit https://staticcheck.io/docs/checks for documentation on checks.\n", cmd) } type jsonFormatter struct { @@ -118,7 +119,8 @@ func (o jsonFormatter) Format(p problem) { } type stylishFormatter struct { - W io.Writer + Diagnostics io.Writer + UI io.Writer prevFile string tw *tabwriter.Writer @@ -133,11 +135,11 @@ func (o *stylishFormatter) Format(p problem) { if pos.Filename != o.prevFile { if o.prevFile != "" { o.tw.Flush() - fmt.Fprintln(o.W) + fmt.Fprintln(o.Diagnostics) } - fmt.Fprintln(o.W, pos.Filename) + fmt.Fprintln(o.Diagnostics, pos.Filename) o.prevFile = pos.Filename - o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0) + o.tw = tabwriter.NewWriter(o.Diagnostics, 0, 4, 2, ' ', 0) } fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", pos.Line, pos.Column, p.Category, p.Message) for _, r := range p.Related { @@ -146,14 +148,14 @@ func (o *stylishFormatter) Format(p problem) { } func (o *stylishFormatter) MentionCheckDocumentation(cmd string) { - textFormatter{W: o.W}.MentionCheckDocumentation(cmd) + textFormatter{UI: o.UI}.MentionCheckDocumentation(cmd) } func (o *stylishFormatter) Stats(total, errors, warnings, ignored int) { if o.tw != nil { o.tw.Flush() - fmt.Fprintln(o.W) + fmt.Fprintln(o.Diagnostics) } - fmt.Fprintf(o.W, " ✖ %d problems (%d errors, %d warnings, %d ignored)\n", + fmt.Fprintf(o.Diagnostics, " ✖ %d problems (%d errors, %d warnings, %d ignored)\n", total, errors, warnings, ignored) } From d49b8deb7c27ceff53894fbbbb0f604587efb075 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 14 Jun 2020 18:52:52 +0200 Subject: [PATCH 070/111] lintcmd/runner: use GOMAXPROCS instead of NumCPU GOMAXPROCS subsumes NumCPU for the purpose of sizing semaphores. If users set CPU affinity, then GOMAXPROCS will reflect that. If users only set GOMAXPROCS, then NumCPU would be inaccurate. Additionally, there are plans to make GOMAXPROCS aware of CPU quotas (https://github.com/golang/go/issues/33803). Users are still advised to set CPU affinity instead of relying on GOMAXPROCS to limit CPU usage, because Staticcheck shells out to the underlying build system, which together with Staticcheck would be able to use more CPU than intended if limited by just GOMAXPROCS. --- lintcmd/runner/runner.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lintcmd/runner/runner.go b/lintcmd/runner/runner.go index b3c245a43..867d192f0 100644 --- a/lintcmd/runner/runner.go +++ b/lintcmd/runner/runner.go @@ -42,13 +42,13 @@ // // Actions are executed in parallel where the dependency graph allows. // Overall parallelism is bounded by a semaphore, sized according to -// runtime.NumCPU(). Each concurrently processed package takes up a +// GOMAXPROCS. Each concurrently processed package takes up a // token, as does each analyzer – but a package can always execute at // least one analyzer, using the package's token. // -// Depending on the overall shape of the graph, there may be NumCPU +// Depending on the overall shape of the graph, there may be GOMAXPROCS // packages running a single analyzer each, a single package running -// NumCPU analyzers, or anything in between. +// GOMAXPROCS analyzers, or anything in between. // // Total memory consumption grows roughly linearly with the number of // CPUs, while total execution time is inversely proportional to the @@ -353,7 +353,7 @@ func New(cfg config.Config) (*Runner, error) { return &Runner{ cfg: cfg, cache: cache, - semaphore: tsync.NewSemaphore(runtime.NumCPU()), + semaphore: tsync.NewSemaphore(runtime.GOMAXPROCS(0)), }, nil } From 925e3c1d1b98eafb5e5e2678b6545d4da084325a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 15 Jun 2020 03:48:26 +0200 Subject: [PATCH 071/111] Cache staticcheck export data between CI runs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This doesn't really make our CI run any faster, because running the tests takes much longer than running staticcheck, even uncached. However, our approach to caching should serve as a good reference to other people using staticcheck, hopefully with faster tests. We experimented with caching test results, but that was complicated by how cache keys for tests are computed. The test binary generates a log of interactions with the environment, such as statting files, and the environment gets captured in the cache key. In the case of stat, this includes the ctime and mtime, which will be different for every run of CI. We could set all ctimes and mtimes to a fixed timestamp, but that would require different scripts for different operating systems – or a helper utility. We also experimented with caching $GOPATH/pkg/mod, but action/setup-go doesn't make it easy to figure out what GOPATH is, and downloading our dependencies is not enough of a bottleneck to warrant complicating our workflows further. --- .github/workflows/ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 96122a813..852081232 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,5 +29,11 @@ jobs: - uses: actions/checkout@v1 with: fetch-depth: 1 + - uses: actions/cache@v2 + with: + path: ~/.cache/staticcheck + key: staticcheck-${{ github.sha }} + restore-keys: | + staticcheck- - run: "go vet ./..." - run: "$(go env GOPATH)/bin/staticcheck -go 1.13 ./..." From c6b60f9125c8c162016777adc4d8b1d9e65c319f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 19 Jun 2020 07:49:04 +0200 Subject: [PATCH 072/111] unused: remove unnecessary checker type --- unused/unused.go | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 6c49574d3..ae203b13c 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -493,10 +493,6 @@ func serializeObject(pass *analysis.Pass, fset *token.FileSet, obj types.Object) } } -type checker struct { - graph *graph -} - func debugf(f string, v ...interface{}) { if Debug != nil { fmt.Fprintf(Debug, f, v...) @@ -517,12 +513,9 @@ func run(pass *analysis.Pass) (interface{}, error) { Directives: dirs, } - c := &checker{ - graph: newGraph(pkg), - } - - c.graph.entry(pkg) - used, unused := c.results() + g := newGraph(pkg) + g.entry(pkg) + used, unused := results(g) if Debug != nil { debugNode := func(n *node) { @@ -545,11 +538,11 @@ func run(pass *analysis.Pass) (interface{}, error) { } debugf("digraph{\n") - debugNode(c.graph.Root) - for _, v := range c.graph.Nodes { + debugNode(g.Root) + for _, v := range g.Nodes { debugNode(v) } - for _, node := range c.graph.TypeNodes { + for _, node := range g.TypeNodes { debugNode(node) } @@ -559,23 +552,23 @@ func run(pass *analysis.Pass) (interface{}, error) { return Result{Used: used, Unused: unused}, nil } -func (c *checker) results() (used, unused []types.Object) { - c.graph.color(c.graph.Root) - for _, node := range c.graph.TypeNodes { +func results(g *graph) (used, unused []types.Object) { + g.color(g.Root) + for _, node := range g.TypeNodes { if node.seen { continue } switch obj := node.obj.(type) { case *types.Struct: for i := 0; i < obj.NumFields(); i++ { - if node, ok := c.graph.nodeMaybe(obj.Field(i)); ok { + if node, ok := g.nodeMaybe(obj.Field(i)); ok { node.quiet = true } } case *types.Interface: for i := 0; i < obj.NumExplicitMethods(); i++ { m := obj.ExplicitMethod(i) - if node, ok := c.graph.nodeMaybe(m); ok { + if node, ok := g.nodeMaybe(m); ok { node.quiet = true } } @@ -584,7 +577,7 @@ func (c *checker) results() (used, unused []types.Object) { // OPT(dh): can we find meaningful initial capacities for the used and unused slices? - for _, n := range c.graph.Nodes { + for _, n := range g.Nodes { if obj, ok := n.obj.(types.Object); ok { switch obj := obj.(type) { case *types.Var: @@ -602,7 +595,7 @@ func (c *checker) results() (used, unused []types.Object) { if n.seen { used = append(used, obj) } else if !n.quiet { - if obj.Pkg() != c.graph.pkg.Pkg { + if obj.Pkg() != g.pkg.Pkg { continue } unused = append(unused, obj) From e9bf2c7b0e3c3a391b27acfdc19980c757834734 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 19 Jun 2020 08:01:04 +0200 Subject: [PATCH 073/111] unused: set g.pkg in entry, reduce uses of g.pkg --- unused/unused.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index ae203b13c..13ff4245f 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -513,7 +513,7 @@ func run(pass *analysis.Pass) (interface{}, error) { Directives: dirs, } - g := newGraph(pkg) + g := newGraph() g.entry(pkg) used, unused := results(g) @@ -620,13 +620,12 @@ type graph struct { nodeCounter uint64 } -func newGraph(pkg *pkg) *graph { +func newGraph() *graph { g := &graph{ Nodes: map[interface{}]*node{}, seenFns: map[string]struct{}{}, seenTypes: map[types.Type]struct{}{}, TypeNodes: map[types.Type]*node{}, - pkg: pkg, } g.Root = g.newNode(nil) return g @@ -828,6 +827,7 @@ func (g *graph) seeAndUse(used, by interface{}, kind edgeKind) *node { } func (g *graph) entry(pkg *pkg) { + g.pkg = pkg scopes := map[*types.Scope]*ir.Function{} for _, fn := range pkg.SrcFuncs { if fn.Object() != nil { @@ -929,7 +929,7 @@ func (g *graph) entry(pkg *pkg) { if obj == nil { continue } - path := g.pkg.Fset.File(obj.Pos()).Name() + path := pkg.Fset.File(obj.Pos()).Name() if strings.HasSuffix(path, "_test.go") { if obj.Parent() != nil && obj.Parent().Parent() != nil && obj.Parent().Parent().Parent() == nil { // object's scope is the package, whose @@ -1151,7 +1151,7 @@ func (g *graph) entry(pkg *pkg) { line int } ignores := map[ignoredKey]struct{}{} - for _, dir := range g.pkg.Directives { + for _, dir := range pkg.Directives { if dir.Command != "ignore" && dir.Command != "file-ignore" { continue } @@ -1160,7 +1160,7 @@ func (g *graph) entry(pkg *pkg) { } for _, check := range strings.Split(dir.Arguments[0], ",") { if check == "U1000" { - pos := g.pkg.Fset.PositionFor(dir.Node.Pos(), false) + pos := pkg.Fset.PositionFor(dir.Node.Pos(), false) var key ignoredKey switch dir.Command { case "ignore": @@ -1185,7 +1185,7 @@ func (g *graph) entry(pkg *pkg) { // all objects annotated with a //lint:ignore U1000 are considered used for obj := range g.Nodes { if obj, ok := obj.(types.Object); ok { - pos := g.pkg.Fset.PositionFor(obj.Pos(), false) + pos := pkg.Fset.PositionFor(obj.Pos(), false) key1 := ignoredKey{ pos.Filename, pos.Line, From 326afc66c7e7984b4ac78f6782b27b971397c904 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 20 Jun 2020 06:23:05 +0200 Subject: [PATCH 074/111] unused: index seenFns by *ir.Function, not a string representation --- unused/unused.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 13ff4245f..9392c1ffb 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -616,14 +616,14 @@ type graph struct { // context pkg *pkg - seenFns map[string]struct{} + seenFns map[*ir.Function]struct{} nodeCounter uint64 } func newGraph() *graph { g := &graph{ Nodes: map[interface{}]*node{}, - seenFns: map[string]struct{}{}, + seenFns: map[*ir.Function]struct{}{}, seenTypes: map[types.Type]struct{}{}, TypeNodes: map[types.Type]*node{}, } @@ -1252,11 +1252,10 @@ func (g *graph) function(fn *ir.Function) { return } - name := fn.RelString(nil) - if _, ok := g.seenFns[name]; ok { + if _, ok := g.seenFns[fn]; ok { return } - g.seenFns[name] = struct{}{} + g.seenFns[fn] = struct{}{} // (4.1) functions use all their arguments, return parameters and receivers g.signature(fn.Signature, owningObject(fn)) From 37af0885a7190cbe4065c18e51fa1a22463bcde6 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 20 Jun 2020 06:40:10 +0200 Subject: [PATCH 075/111] analysis/lint: include link to online documentation in output of -explain Closes gh-773 --- analysis/lint/lint.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/analysis/lint/lint.go b/analysis/lint/lint.go index fc256d747..fe3dc6863 100644 --- a/analysis/lint/lint.go +++ b/analysis/lint/lint.go @@ -94,7 +94,7 @@ func InitializeAnalyzers(docs map[string]*Documentation, analyzers map[string]*a if !ok { panic(fmt.Sprintf("missing documentation for check %s", k)) } - vc.Doc = doc.String() + vc.Doc = fmt.Sprintf("%s\nOnline documentation\n https://staticcheck.io/docs/checks#%s", doc.String(), k) if vc.Flags.Usage == nil { fs := flag.NewFlagSet("", flag.PanicOnError) fs.Var(newVersionFlag(), "go", "Target Go version") From a36e9300ad415d2b1946e7eeac3039e104ab61b3 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 27 Jul 2020 01:27:38 +0200 Subject: [PATCH 076/111] lintcmd: add null formatter The null formatter discards all output. It is useful for debugging. --- lintcmd/cmd.go | 2 ++ lintcmd/format.go | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/lintcmd/cmd.go b/lintcmd/cmd.go index 6699fd9f7..d80a3ed8b 100644 --- a/lintcmd/cmd.go +++ b/lintcmd/cmd.go @@ -637,6 +637,8 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { f = &stylishFormatter{Diagnostics: os.Stdout, UI: os.Stderr} case "json": f = jsonFormatter{W: os.Stdout} + case "null": + f = nullFormatter{} default: fmt.Fprintf(os.Stderr, "unsupported output format %q\n", theFormatter) exit(2) diff --git a/lintcmd/format.go b/lintcmd/format.go index 7f47e1d30..2d6afded0 100644 --- a/lintcmd/format.go +++ b/lintcmd/format.go @@ -63,6 +63,10 @@ func (o textFormatter) MentionCheckDocumentation(cmd string) { fmt.Fprintf(o.UI, "\nRun '%s -explain ' or visit https://staticcheck.io/docs/checks for documentation on checks.\n", cmd) } +type nullFormatter struct{} + +func (nullFormatter) Format(problem) {} + type jsonFormatter struct { W io.Writer } From 3b5b85696619f20fcdd0def0c457afbf096fe904 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 27 Jul 2020 03:48:54 +0200 Subject: [PATCH 077/111] lintcmd: add -debug.trace flag --- lintcmd/cmd.go | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/lintcmd/cmd.go b/lintcmd/cmd.go index d80a3ed8b..4bda6e62d 100644 --- a/lintcmd/cmd.go +++ b/lintcmd/cmd.go @@ -14,6 +14,7 @@ import ( "regexp" "runtime" "runtime/pprof" + "runtime/trace" "sort" "strconv" "strings" @@ -511,6 +512,7 @@ func FlagSet(name string) *flag.FlagSet { flags.Bool("debug.version", false, "Print detailed version information about this program") flags.Bool("debug.no-compile-errors", false, "Don't print compile errors") flags.String("debug.measure-analyzers", "", "Write analysis measurements to `file`. `file` will be opened for appending if it already exists.") + flags.String("debug.trace", "", "Write trace to `file`") checks := list{"inherit"} fail := list{"all"} @@ -550,6 +552,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool) debugNoCompile := fs.Lookup("debug.no-compile-errors").Value.(flag.Getter).Get().(bool) + traceOut := fs.Lookup("debug.trace").Value.(flag.Getter).Get().(string) var measureAnalyzers func(analysis *analysis.Analyzer, pkg *loader.PackageSpec, d time.Duration) if path := fs.Lookup("debug.measure-analyzers").Value.(flag.Getter).Get().(string); path != "" { @@ -584,6 +587,9 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { runtime.GC() pprof.WriteHeapProfile(f) } + if traceOut != "" { + trace.Stop() + } os.Exit(code) } if cpuProfile != "" { @@ -593,6 +599,13 @@ func ProcessFlagSet(cs []*analysis.Analyzer, fs *flag.FlagSet) { } pprof.StartCPUProfile(f) } + if traceOut != "" { + f, err := os.Create(traceOut) + if err != nil { + log.Fatal(err) + } + trace.Start(f) + } if debugVersion { version.Verbose() From 4d9a72ff003a05c69f1301c1d7e414a885263af9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 30 Jul 2020 22:10:23 +0200 Subject: [PATCH 078/111] pattern: support matching field lists Closes gh-806 --- pattern/match.go | 22 +++++++++++++++++++ .../LintUnnecessaryGuard.go | 9 ++++++++ .../LintUnnecessaryGuard.go.golden | 9 ++++++++ 3 files changed, 40 insertions(+) diff --git a/pattern/match.go b/pattern/match.go index f0fda0619..88c0818fb 100644 --- a/pattern/match.go +++ b/pattern/match.go @@ -240,6 +240,28 @@ func match(m *Matcher, l, r interface{}) (interface{}, bool) { } } + { + ln, ok1 := l.([]*ast.Field) + rn, ok2 := r.([]*ast.Field) + if ok1 || ok2 { + if ok1 && !ok2 { + rn = []*ast.Field{r.(*ast.Field)} + } else if !ok1 && ok2 { + ln = []*ast.Field{l.(*ast.Field)} + } + + if len(ln) != len(rn) { + return nil, false + } + for i, ll := range ln { + if _, ok := match(m, ll, rn[i]); !ok { + return nil, false + } + } + return r, true + } + } + panic(fmt.Sprintf("unsupported comparison: %T and %T", l, r)) } diff --git a/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go b/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go index 662b3be63..18530cd44 100644 --- a/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go +++ b/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go @@ -61,3 +61,12 @@ func fn() { m2["k"] = 1 } } + +// this used to cause a panic in the pattern package +func fn2() { + var obj interface{} + + if _, ok := obj.(map[string]interface{})["items"]; ok { + obj.(map[string]interface{})["version"] = 1 + } +} diff --git a/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go.golden b/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go.golden index 85446b042..1876b3c52 100644 --- a/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go.golden +++ b/simple/testdata/src/CheckUnnecessaryGuard/LintUnnecessaryGuard.go.golden @@ -45,3 +45,12 @@ func fn() { m2["k"] = 1 } } + +// this used to cause a panic in the pattern package +func fn2() { + var obj interface{} + + if _, ok := obj.(map[string]interface{})["items"]; ok { + obj.(map[string]interface{})["version"] = 1 + } +} From 2c5dee2fb55d663d8730567654c2ff5a0c285609 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 30 Jul 2020 22:21:20 +0200 Subject: [PATCH 079/111] Add 2020.1.5 release notes --- doc/2020.1.html | 8 ++++++++ doc/staticcheck.html | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/doc/2020.1.html b/doc/2020.1.html index b908002ad..3df791b92 100644 --- a/doc/2020.1.html +++ b/doc/2020.1.html @@ -11,6 +11,7 @@
  • Staticcheck 2020.1.2 release notes
  • Staticcheck 2020.1.3 release notes
  • Staticcheck 2020.1.4 release notes
  • +
  • Staticcheck 2020.1.5 release notes
  • Introduction to Staticcheck 2020.1

    @@ -221,3 +222,10 @@

    Staticcheck 2020.1.4 release notes

    You can find more information about this in the upstream issue.

    + +

    Staticcheck 2020.1.5 release notes

    + +

    + This release fixes a crash in the pattern matching engine + and a false positive in SA4006. +

    diff --git a/doc/staticcheck.html b/doc/staticcheck.html index 22cfdc7e3..c56a52142 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -22,7 +22,7 @@

    Installation

    If you use Go modules, you can simply run go get honnef.co/go/tools/cmd/staticcheck to obtain the latest released version. If you're still using a GOPATH-based workflow, then the above command will instead fetch the master branch. - It is suggested that you explicitly check out the latest release branch instead, which is currently 2020.1.4. + It is suggested that you explicitly check out the latest release branch instead, which is currently 2020.1.5. One way of doing so would be as follows:

    From ab2caa318cd8bff808b7fcdb9b98ac6049f51c0a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 22 Aug 2020 19:53:22 +0200 Subject: [PATCH 080/111] Commit our new logo The engineer, making sure your software is safe and sound. Made by @egonelbre and based on the Go gopher by Renee French, which is licensed under the Creative Commons Attribution 3.0. --- images/logo.svg | 483 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 483 insertions(+) create mode 100644 images/logo.svg diff --git a/images/logo.svg b/images/logo.svg new file mode 100644 index 000000000..d92523702 --- /dev/null +++ b/images/logo.svg @@ -0,0 +1,483 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 81508471876c7902b8ca236ae35f897b1777c65a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 22 Aug 2020 21:10:40 +0200 Subject: [PATCH 081/111] Widen the sign in the logo Unfortunately, the sign was a bit too small to be readable in all contexts. On the flip side, his feet are more protected now. --- images/logo.svg | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/images/logo.svg b/images/logo.svg index d92523702..f1755464d 100644 --- a/images/logo.svg +++ b/images/logo.svg @@ -60,7 +60,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -89,7 +89,7 @@ - + @@ -202,9 +202,11 @@ - + + + - + From de0c0b8534e058390f43e48debd8ed32e5c9dec9 Mon Sep 17 00:00:00 2001 From: Valentin Deleplace Date: Mon, 24 Aug 2020 16:38:54 +0200 Subject: [PATCH 082/111] Latest tag 2020.1.5 --- doc/staticcheck.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/staticcheck.html b/doc/staticcheck.html index c56a52142..230778a20 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -22,12 +22,12 @@

    Installation

    If you use Go modules, you can simply run go get honnef.co/go/tools/cmd/staticcheck to obtain the latest released version. If you're still using a GOPATH-based workflow, then the above command will instead fetch the master branch. - It is suggested that you explicitly check out the latest release branch instead, which is currently 2020.1.5. + It is suggested that you explicitly check out the latest release tag instead, which is currently 2020.1.5. One way of doing so would be as follows:

    cd $GOPATH/src/honnef.co/go/tools/cmd/staticcheck
    -git checkout 2020.1.4
    +git checkout 2020.1.5
     go get
     go install
     
    From 8040d28b8db8066b19e5b7b89dcb7d59373a203c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 14 Sep 2020 05:55:52 +0200 Subject: [PATCH 083/111] Update README Add our logo and reword the introduction. --- README.md | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index ba3eae718..ba8172a6c 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,20 @@ -# honnef.co/go/tools +
    +

    Staticcheck logo
    + The advanced Go linter +

    +
    -`honnef.co/go/tools/...` is a collection of tools and libraries for -working with Go code, including linters and static analysis, most -prominently staticcheck. +Staticcheck is a state of the art linter for the [Go programming +language](https://go.dev/). Using static analysis, it finds bugs and performance issues, +offers simplifications, and enforces style rules. -**These tools are financially supported by [private and corporate sponsors](http://staticcheck.io/sponsors) to ensure its continued development. -Please consider [becoming a sponsor](https://github.com/users/dominikh/sponsorship) if you or your company relies on the tools.** +**Financial support by [private and corporate sponsors](http://staticcheck.io/sponsors) guarantees the tool's continued development. +Please [become a sponsor](https://github.com/users/dominikh/sponsorship) if you or your company rely on Staticcheck.** ## Documentation -You can find extensive documentation on these tools, in particular staticcheck, on [staticcheck.io](https://staticcheck.io/docs/). - +You can find extensive documentation on Staticcheck on [its website](https://staticcheck.io/docs/). ## Installation From ac8689e39794d79f6b43ed7ea81e959b3f8887e0 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 17 Sep 2020 22:17:01 +0200 Subject: [PATCH 084/111] SA9004: don't flag constant groups with conflicting types Closes gh-828 --- staticcheck/lint.go | 28 +++++++++++++++---- .../CheckMissingEnumTypesInDeclaration.go | 18 ++++++++++++ ...eckMissingEnumTypesInDeclaration.go.golden | 18 ++++++++++++ 3 files changed, 58 insertions(+), 6 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 481a504bc..34c2e82b3 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -3306,6 +3306,18 @@ func CheckSillyRegexp(pass *analysis.Pass) (interface{}, error) { } func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error) { + convertibleTo := func(V, T types.Type) bool { + if types.ConvertibleTo(V, T) { + return true + } + // Go <1.16 returns false for untyped string to string conversion + if V, ok := V.(*types.Basic); ok && V.Kind() == types.UntypedString { + if T, ok := T.Underlying().(*types.Basic); ok && T.Kind() == types.String { + return true + } + } + return false + } fn := func(node ast.Node) { decl := node.(*ast.GenDecl) if !decl.Lparen.IsValid() { @@ -3325,11 +3337,21 @@ func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error // first constant doesn't have a type continue groupLoop } + + firstType := pass.TypesInfo.TypeOf(group[0].(*ast.ValueSpec).Values[0]) for i, spec := range group { spec := spec.(*ast.ValueSpec) + if i > 0 && spec.Type != nil { + continue groupLoop + } if len(spec.Names) != 1 || len(spec.Values) != 1 { continue groupLoop } + + if !convertibleTo(pass.TypesInfo.TypeOf(spec.Values[0]), firstType) { + continue groupLoop + } + switch v := spec.Values[0].(type) { case *ast.BasicLit: case *ast.UnaryExpr: @@ -3341,12 +3363,6 @@ func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error // time.Microsecond = 1000 * Nanosecond continue groupLoop } - if i == 0 { - continue - } - if spec.Type != nil { - continue groupLoop - } } var edits []analysis.TextEdit typ := group[0].(*ast.ValueSpec).Type diff --git a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go index 923e0c43b..bc6f5f0ed 100644 --- a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go +++ b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go @@ -66,3 +66,21 @@ const ( c36 int = 2 ) + +const ( + c37 int = 1 + c38 = "2" +) + +const ( + c39 int8 = 1.0 // want `only the first constant in this group has an explicit type` + c40 = 'a' + c41 = 3 +) + +type String string + +const ( + c42 String = "" // want `only the first constant in this group has an explicit type` + c43 = "" +) diff --git a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden index f5f3c53ee..371ccd2e5 100644 --- a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden +++ b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden @@ -66,3 +66,21 @@ const ( c36 int = 2 ) + +const ( + c37 int = 1 + c38 = "2" +) + +const ( + c39 int8 = 1.0 // want `only the first constant in this group has an explicit type` + c40 int8 = 'a' + c41 int8 = 3 +) + +type String string + +const ( + c42 String = "" // want `only the first constant in this group has an explicit type` + c43 String = "" +) From 2b63335d5ff1534e205dd0743e1b16c79c175c7e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 17 Sep 2020 22:17:53 +0200 Subject: [PATCH 085/111] lintcmd/runner: don't hang if we have no analyzers to run --- lintcmd/runner/runner.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lintcmd/runner/runner.go b/lintcmd/runner/runner.go index 867d192f0..b74cdce7d 100644 --- a/lintcmd/runner/runner.go +++ b/lintcmd/runner/runner.go @@ -978,6 +978,12 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an } } + // Don't hang if there are no analyzers to run; for example + // because we are analyzing a dependency but have no analyzers + // that produce facts. + if len(all) == 0 { + close(queue) + } for item := range queue { b := r.semaphore.AcquireMaybe() if b { From 146f620c9673a52224cc21f8d8ac03025c47a654 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 6 Oct 2020 21:27:09 +0200 Subject: [PATCH 086/111] SA4023: flag impossible comparisons of interface values with untyped nil --- analysis/facts/nilness/nilness.go | 210 +++++++++++++++ analysis/facts/nilness/nilness_test.go | 11 + .../nilness/testdata/src/Nilness/Nilness.go | 90 +++++++ .../testdata/src/Typedness/Typedness.go | 236 +++++++++++++++++ analysis/facts/typedness/typedness.go | 242 ++++++++++++++++++ analysis/facts/typedness/typedness_test.go | 11 + analysis/report/report.go | 21 ++ analysis/report/report_test.go | 19 ++ debug/debug.go | 16 ++ go.mod | 2 +- go/ir/func.go | 2 +- go/types/typeutil/util.go | 2 +- staticcheck/analysis.go | 6 + staticcheck/doc.go | 62 +++++ staticcheck/lint.go | 105 +++++++- staticcheck/lint_test.go | 10 + .../CheckTypedNilInterface.go | 200 +++++++++++++++ .../CheckTypedNilInterface/i26000/26000.go | 34 +++ .../CheckTypedNilInterface/i27815/27815.go | 28 ++ .../CheckTypedNilInterface/i28241/28241.go | 39 +++ .../CheckTypedNilInterface/i31873/31873.go | 26 ++ .../CheckTypedNilInterface/i33965/33965.go | 26 ++ .../CheckTypedNilInterface/i33994/33994.go | 35 +++ .../CheckTypedNilInterface/i35217/35217.go | 24 ++ .../src/CheckTypedNilInterface/real.go | 42 +++ 25 files changed, 1495 insertions(+), 4 deletions(-) create mode 100644 analysis/facts/nilness/nilness.go create mode 100644 analysis/facts/nilness/nilness_test.go create mode 100644 analysis/facts/nilness/testdata/src/Nilness/Nilness.go create mode 100644 analysis/facts/typedness/testdata/src/Typedness/Typedness.go create mode 100644 analysis/facts/typedness/typedness.go create mode 100644 analysis/facts/typedness/typedness_test.go create mode 100644 analysis/report/report_test.go create mode 100644 debug/debug.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/CheckTypedNilInterface.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/i26000/26000.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/i27815/27815.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/i28241/28241.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/i31873/31873.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/i33965/33965.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/i33994/33994.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/i35217/35217.go create mode 100644 staticcheck/testdata/src/CheckTypedNilInterface/real.go diff --git a/analysis/facts/nilness/nilness.go b/analysis/facts/nilness/nilness.go new file mode 100644 index 000000000..aa2301a09 --- /dev/null +++ b/analysis/facts/nilness/nilness.go @@ -0,0 +1,210 @@ +package nilness + +import ( + "fmt" + "go/token" + "go/types" + "reflect" + + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/types/typeutil" + "honnef.co/go/tools/internal/passes/buildir" + + "golang.org/x/tools/go/analysis" +) + +// neverReturnsNilFact denotes that a function's return value will never +// be nil (typed or untyped). The analysis errs on the side of false +// negatives. +type neverReturnsNilFact struct { + Rets uint8 +} + +func (*neverReturnsNilFact) AFact() {} +func (fact *neverReturnsNilFact) String() string { + return fmt.Sprintf("never returns nil: %08b", fact.Rets) +} + +type Result struct { + m map[*types.Func]uint8 +} + +var Analysis = &analysis.Analyzer{ + Name: "nilness", + Doc: "Annotates return values that will never be nil (typed or untyped)", + Run: run, + Requires: []*analysis.Analyzer{buildir.Analyzer}, + FactTypes: []analysis.Fact{(*neverReturnsNilFact)(nil)}, + ResultType: reflect.TypeOf((*Result)(nil)), +} + +// MayReturnNil reports whether the ret's return value of fn might be +// a typed or untyped nil value. The value of ret is zero-based. +// +// The analysis has false positives: MayReturnNil can incorrectly +// report true, but never incorrectly reports false. +func (r *Result) MayReturnNil(fn *types.Func, ret int) bool { + if !typeutil.IsPointerLike(fn.Type().(*types.Signature).Results().At(ret).Type()) { + return false + } + return (r.m[fn] & (1 << ret)) == 0 +} + +func run(pass *analysis.Pass) (interface{}, error) { + seen := map[*ir.Function]struct{}{} + out := &Result{ + m: map[*types.Func]uint8{}, + } + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + impl(pass, fn, seen) + } + + for _, fact := range pass.AllObjectFacts() { + out.m[fact.Object.(*types.Func)] = fact.Fact.(*neverReturnsNilFact).Rets + } + + return out, nil +} + +func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) (out uint8) { + if fn.Signature.Results().Len() > 8 { + return 0 + } + if fn.Object() == nil { + // TODO(dh): support closures + return 0 + } + if fact := new(neverReturnsNilFact); pass.ImportObjectFact(fn.Object(), fact) { + return fact.Rets + } + if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg { + return 0 + } + if fn.Blocks == nil { + return 0 + } + if _, ok := seenFns[fn]; ok { + // break recursion + return 0 + } + + seenFns[fn] = struct{}{} + defer func() { + for i := 0; i < fn.Signature.Results().Len(); i++ { + if !typeutil.IsPointerLike(fn.Signature.Results().At(i).Type()) { + // we don't need facts to know that non-pointer types + // can't be nil. zeroing out those bits may result in + // all bits being zero, in which case we don't have to + // save any fact. + out &= ^(1 << i) + } + } + if out > 0 { + pass.ExportObjectFact(fn.Object(), &neverReturnsNilFact{out}) + } + }() + + seen := map[ir.Value]struct{}{} + var mightReturnNil func(v ir.Value) bool + mightReturnNil = func(v ir.Value) bool { + if _, ok := seen[v]; ok { + // break cycle + return true + } + if !typeutil.IsPointerLike(v.Type()) { + return false + } + seen[v] = struct{}{} + switch v := v.(type) { + case *ir.MakeInterface: + return mightReturnNil(v.X) + case *ir.Convert: + return mightReturnNil(v.X) + case *ir.Slice: + return mightReturnNil(v.X) + case *ir.Phi: + for _, e := range v.Edges { + if mightReturnNil(e) { + return true + } + } + return false + case *ir.Extract: + switch d := v.Tuple.(type) { + case *ir.Call: + if callee := d.Call.StaticCallee(); callee != nil { + return impl(pass, callee, seenFns)&(1< 8 { + return 0 + } + if fn.Object() == nil { + // TODO(dh): support closures + return 0 + } + if fact := new(alwaysTypedFact); pass.ImportObjectFact(fn.Object(), fact) { + return fact.Rets + } + if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg { + return 0 + } + if fn.Blocks == nil { + return 0 + } + if irutil.IsStub(fn) { + return 0 + } + if _, ok := seenFns[fn]; ok { + // break recursion + return 0 + } + + seenFns[fn] = struct{}{} + defer func() { + for i := 0; i < fn.Signature.Results().Len(); i++ { + if _, ok := fn.Signature.Results().At(i).Type().Underlying().(*types.Interface); !ok { + // we don't need facts to know that non-interface + // types can't be untyped nil. zeroing out those bits + // may result in all bits being zero, in which case we + // don't have to save any fact. + out &= ^(1 << i) + } + } + if out > 0 { + pass.ExportObjectFact(fn.Object(), &alwaysTypedFact{out}) + } + }() + + isUntypedNil := func(v ir.Value) bool { + k, ok := v.(*ir.Const) + if !ok { + return false + } + if _, ok := k.Type().Underlying().(*types.Interface); !ok { + return false + } + return k.Value == nil + } + + var do func(v ir.Value, seen map[ir.Value]struct{}) bool + do = func(v ir.Value, seen map[ir.Value]struct{}) bool { + if _, ok := seen[v]; ok { + // break cycle + return false + } + seen[v] = struct{}{} + switch v := v.(type) { + case *ir.Const: + // can't be a typed nil, because then we'd be returning the + // result of MakeInterface. + return false + case *ir.ChangeInterface: + return do(v.X, seen) + case *ir.Extract: + call, ok := v.Tuple.(*ir.Call) + if !ok { + // We only care about extracts of function results. For + // everything else (e.g. channel receives and map + // lookups), we can either not deduce any information, or + // will see a MakeInterface. + return false + } + if callee := call.Call.StaticCallee(); callee != nil { + return impl(pass, callee, seenFns)&(1<interface conversions, which + // don't tell us anything about the nilness. + return false + case *ir.MapLookup, *ir.Index, *ir.Recv, *ir.Parameter, *ir.Load, *ir.Field: + // All other instructions that tell us nothing about the + // typedness of interface values. + return false + default: + panic(fmt.Sprintf("internal error: unhandled type %T", v)) + } + } + + ret := fn.Exit.Control().(*ir.Return) + for i, v := range ret.Results { + if _, ok := fn.Signature.Results().At(i).Type().Underlying().(*types.Interface); ok { + if do(v, map[ir.Value]struct{}{}) { + out |= 1 << i + } + } + } + return out +} diff --git a/analysis/facts/typedness/typedness_test.go b/analysis/facts/typedness/typedness_test.go new file mode 100644 index 000000000..0979ce271 --- /dev/null +++ b/analysis/facts/typedness/typedness_test.go @@ -0,0 +1,11 @@ +package typedness + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" +) + +func TestTypedness(t *testing.T) { + analysistest.Run(t, analysistest.TestData(), Analysis, "Typedness") +} diff --git a/analysis/report/report.go b/analysis/report/report.go index 2985334d3..af0ca337a 100644 --- a/analysis/report/report.go +++ b/analysis/report/report.go @@ -6,6 +6,7 @@ import ( "go/printer" "go/token" "path/filepath" + "strconv" "strings" "honnef.co/go/tools/analysis/facts" @@ -201,3 +202,23 @@ func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { return pos } + +func Ordinal(n int) string { + suffix := "th" + if n < 10 || n > 20 { + switch n % 10 { + case 0: + suffix = "th" + case 1: + suffix = "st" + case 2: + suffix = "nd" + case 3: + suffix = "rd" + default: + suffix = "th" + } + } + + return strconv.Itoa(n) + suffix +} diff --git a/analysis/report/report_test.go b/analysis/report/report_test.go new file mode 100644 index 000000000..3985eae70 --- /dev/null +++ b/analysis/report/report_test.go @@ -0,0 +1,19 @@ +package report + +import "testing" + +func TestOrdinal(t *testing.T) { + tests := []struct { + num int + want string + }{ + {0, "0th"}, {1, "1st"}, {2, "2nd"}, {3, "3rd"}, {4, "4th"}, {5, "5th"}, {6, "6th"}, {7, "7th"}, {8, "8th"}, {9, "9th"}, + {10, "10th"}, {11, "11th"}, {12, "12th"}, {13, "13th"}, {14, "14th"}, {15, "15th"}, {16, "16th"}, {17, "17th"}, {18, "18th"}, {19, "19th"}, + {20, "20th"}, {21, "21st"}, {22, "22nd"}, {23, "23rd"}, {24, "24th"}, {25, "25th"}, {26, "26th"}, {27, "27th"}, {28, "28th"}, {29, "29th"}, + } + for _, tt := range tests { + if got := Ordinal(tt.num); got != tt.want { + t.Errorf("Ordinal(%d) = %s, want %s", tt.num, got, tt.want) + } + } +} diff --git a/debug/debug.go b/debug/debug.go new file mode 100644 index 000000000..d9fcfeb24 --- /dev/null +++ b/debug/debug.go @@ -0,0 +1,16 @@ +package debug + +import ( + "fmt" + "go/token" + + "golang.org/x/tools/go/analysis" +) + +type Positioner interface { + Pos() token.Pos +} + +func PrintPosition(pass *analysis.Pass, obj Positioner) { + fmt.Println(pass.Fset.PositionFor(obj.Pos(), false)) +} diff --git a/go.mod b/go.mod index 044b4f28e..e0a172149 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module honnef.co/go/tools -go 1.11 +go 1.14 require ( github.com/BurntSushi/toml v0.3.1 diff --git a/go/ir/func.go b/go/ir/func.go index b99dc6ae1..14ec132bc 100644 --- a/go/ir/func.go +++ b/go/ir/func.go @@ -33,7 +33,7 @@ func (b *BasicBlock) Control() Instruction { return b.Instrs[len(b.Instrs)-1] } -// SIgmaFor returns the sigma node for v coming from pred. +// SigmaFor returns the sigma node for v coming from pred. func (b *BasicBlock) SigmaFor(v Value, pred *BasicBlock) *Sigma { for _, instr := range b.Instrs { sigma, ok := instr.(*Sigma) diff --git a/go/types/typeutil/util.go b/go/types/typeutil/util.go index c96c1a7d3..b0aca16bd 100644 --- a/go/types/typeutil/util.go +++ b/go/types/typeutil/util.go @@ -85,7 +85,7 @@ func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == func IsPointerLike(T types.Type) bool { switch T := T.Underlying().(type) { - case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer: + case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer, *types.Slice: return true case *types.Basic: return T.Kind() == types.UnsafePointer diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index a860e8af3..e19e9b12e 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -2,6 +2,8 @@ package staticcheck import ( "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/analysis/facts/nilness" + "honnef.co/go/tools/analysis/facts/typedness" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/internal/passes/buildir" @@ -187,6 +189,10 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ Run: CheckAddressIsNil, Requires: []*analysis.Analyzer{inspect.Analyzer}, }, + "SA4023": { + Run: CheckTypedNilInterface, + Requires: []*analysis.Analyzer{buildir.Analyzer, typedness.Analysis, nilness.Analysis}, + }, "SA5000": { Run: CheckNilMaps, diff --git a/staticcheck/doc.go b/staticcheck/doc.go index cc04d77b7..aedd28348 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -498,6 +498,68 @@ and therefore doSomething()'s return value implements both.`, Since: "2020.1", }, + "SA4023": { + Title: `Impossible comparison of interface value with untyped nil`, + Text: `Under the covers, interfaces are implemented as two elements, a +type T and a value V. V is a concrete value such as an int, +struct or pointer, never an interface itself, and has type T. For +instance, if we store the int value 3 in an interface, the +resulting interface value has, schematically, (T=int, V=3). The +value V is also known as the interface's dynamic value, since a +given interface variable might hold different values V (and +corresponding types T) during the execution of the program. + +An interface value is nil only if the V and T are both +unset, (T=nil, V is not set), In particular, a nil interface will +always hold a nil type. If we store a nil pointer of type *int +inside an interface value, the inner type will be *int regardless +of the value of the pointer: (T=*int, V=nil). Such an interface +value will therefore be non-nil even when the pointer value V +inside is nil. + +This situation can be confusing, and arises when a nil value is +stored inside an interface value such as an error return: + + func returnsError() error { + var p *MyError = nil + if bad() { + p = ErrBad + } + return p // Will always return a non-nil error. + } + +If all goes well, the function returns a nil p, so the return +value is an error interface value holding (T=*MyError, V=nil). +This means that if the caller compares the returned error to nil, +it will always look as if there was an error even if nothing bad +happened. To return a proper nil error to the caller, the +function must return an explicit nil: + + func returnsError() error { + if bad() { + return ErrBad + } + return nil + } + +It's a good idea for functions that return errors always to use +the error type in their signature (as we did above) rather than a +concrete type such as *MyError, to help guarantee the error is +created correctly. As an example, os.Open returns an error even +though, if not nil, it's always of concrete type *os.PathError. + +Similar situations to those described here can arise whenever +interfaces are used. Just keep in mind that if any concrete value +has been stored in the interface, the interface will not be nil. +For more information, see The Laws of +Reflection (https://golang.org/doc/articles/laws_of_reflection.html). + +This text has been copied from +https://golang.org/doc/faq#nil_error, licensed under the Creative +Commons Attribution 3.0 License.`, + Since: "Unreleased", + }, + "SA5000": { Title: `Assignment to nil map`, Since: "2017.1", diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 34c2e82b3..448cd98c3 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -21,6 +21,8 @@ import ( "honnef.co/go/tools/analysis/code" "honnef.co/go/tools/analysis/edit" "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/analysis/facts/nilness" + "honnef.co/go/tools/analysis/facts/typedness" "honnef.co/go/tools/analysis/lint" "honnef.co/go/tools/analysis/report" "honnef.co/go/tools/go/ast/astutil" @@ -153,7 +155,8 @@ var ( "(*sync.Pool).Put": func(call *Call) { arg := call.Args[knowledge.Arg("(*sync.Pool).Put.x")] typ := arg.Value.Value.Type() - if !typeutil.IsPointerLike(typ) { + _, isSlice := typ.Underlying().(*types.Slice) + if !typeutil.IsPointerLike(typ) || isSlice { arg.Invalid("argument should be pointer-like to avoid allocations") } }, @@ -4147,6 +4150,8 @@ func flagSliceLens(pass *analysis.Pass) { continue } + // TODO handle stubs + // we know the argument has to have even length. // now let's try to find its length if n := findSliceLength(arg); n > -1 && n%2 != 0 { @@ -4176,3 +4181,101 @@ func CheckEvenSliceLength(pass *analysis.Pass) (interface{}, error) { return nil, nil } + +func CheckTypedNilInterface(pass *analysis.Pass) (interface{}, error) { + // The comparison 'fn() == nil' can never be true if fn() returns + // an interface value and only returns typed nils. This is usually + // a mistake in the function itself, but all we can say for + // certain is that the comparison is pointless. + // + // Flag results if no untyped nils are being returned, but either + // known typed nils, or typed unknown nilness are being returned. + + irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR) + typedness := pass.ResultOf[typedness.Analysis].(*typedness.Result) + nilness := pass.ResultOf[nilness.Analysis].(*nilness.Result) + for _, fn := range irpkg.SrcFuncs { + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + binop, ok := instr.(*ir.BinOp) + if !ok || !(binop.Op == token.EQL || binop.Op == token.NEQ) { + continue + } + if _, ok := binop.X.Type().Underlying().(*types.Interface); !ok { + // TODO support swapped X and Y + continue + } + + k, ok := binop.Y.(*ir.Const) + if !ok || !k.IsNil() { + // if binop.X is an interface, then binop.Y can + // only be a Const if its untyped. A typed nil + // constant would first be passed to + // MakeInterface. + continue + } + + var idx int + var obj *types.Func + switch x := binop.X.(type) { + case *ir.Call: + callee := x.Call.StaticCallee() + if callee == nil { + continue + } + obj, _ = callee.Object().(*types.Func) + idx = 0 + case *ir.Extract: + call, ok := x.Tuple.(*ir.Call) + if !ok { + continue + } + callee := call.Call.StaticCallee() + if callee == nil { + continue + } + obj, _ = callee.Object().(*types.Func) + idx = x.Index + case *ir.MakeInterface: + var qualifier string + switch binop.Op { + case token.EQL: + qualifier = "never" + case token.NEQ: + qualifier = "always" + default: + panic("unreachable") + } + report.Report(pass, binop, fmt.Sprintf("this comparison is %s true", qualifier), + report.Related(x.X, "the lhs of the comparison gets its value from here and has a concrete type")) + continue + } + if obj == nil { + continue + } + + if typedness.MustReturnTyped(obj, idx) && nilness.MayReturnNil(obj, idx) && !code.IsInTest(pass, binop) { + // Don't flag these comparisons in tests. Tests + // may be explicitly enforcing the invariant that + // a value isn't nil. + + var qualifier string + switch binop.Op { + case token.EQL: + qualifier = "never" + case token.NEQ: + qualifier = "always" + default: + panic("unreachable") + } + report.Report(pass, binop, fmt.Sprintf("this comparison is %s true", qualifier), + // TODO support swapped X and Y + report.Related(binop.X, fmt.Sprintf("the lhs of the comparison is the %s return value of this function call", report.Ordinal(idx+1))), + report.Related(obj, fmt.Sprintf("%s never returns a nil interface value", typeutil.FuncName(obj)))) + } + } + } + } + + return nil, nil +} diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index c79aaa0e5..122c2f56e 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -69,6 +69,16 @@ func TestAll(t *testing.T) { "SA4020": {{Dir: "CheckUnreachableTypeCases"}}, "SA4021": {{Dir: "CheckSingleArgAppend"}}, "SA4022": {{Dir: "CheckAddressIsNil"}}, + "SA4023": { + {Dir: "CheckTypedNilInterface"}, + {Dir: "CheckTypedNilInterface/i26000"}, + {Dir: "CheckTypedNilInterface/i27815"}, + {Dir: "CheckTypedNilInterface/i28241"}, + {Dir: "CheckTypedNilInterface/i31873"}, + {Dir: "CheckTypedNilInterface/i33965"}, + {Dir: "CheckTypedNilInterface/i33994"}, + {Dir: "CheckTypedNilInterface/i35217"}, + }, "SA5000": {{Dir: "CheckNilMaps"}}, "SA5001": {{Dir: "CheckEarlyDefer"}}, "SA5002": {{Dir: "CheckInfiniteEmptyLoop"}}, diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/CheckTypedNilInterface.go b/staticcheck/testdata/src/CheckTypedNilInterface/CheckTypedNilInterface.go new file mode 100644 index 000000000..75970c005 --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/CheckTypedNilInterface.go @@ -0,0 +1,200 @@ +package pkg + +import ( + "errors" + "os/exec" +) + +type T struct{ x *int } + +func fn1() *int { return nil } +func fn2() (int, *int, int) { return 0, nil, 0 } + +func fn3() (int, error) { return 0, nil } +func fn4() error { return nil } + +func gen1() interface{} { + // don't flag, returning a concrete value + return 0 +} + +func gen2() interface{} { + // don't flag, returning a concrete value + return &T{} +} + +func gen3() interface{} { + // flag, always returns a typed value + m := map[int]*int{} + return m[0] +} + +func gen4() (int, interface{}, *int) { + // flag ret[1], always a typed value + m := map[int]*int{} + return 0, m[0], nil +} + +func gen5() interface{} { + // flag, propagate gen3 + return gen3() +} + +func gen6(b bool) interface{} { + // don't flag, sometimes returns untyped nil + if b { + m := map[int]*int{} + return m[0] + } else { + return nil + } +} + +func gen7() interface{} { + // flag, always returns a typed value + return fn1() +} + +func gen8(x *int) interface{} { + // flag + if x == nil { + return x + } + return x +} + +func gen9() interface{} { + // flag + var x *int + return x +} + +func gen10() interface{} { + // flag + var x *int + if x == nil { + return x + } + return errors.New("") + + // This is a tricky one. we should flag this, because it never + // returns a nil error, but if errors.New could return untyped + // nils, then we shouldn't flag it. we need to consider the + // implementation of the called function. +} + +func gen11() interface{} { + // don't flag, we sometimes return untyped nil + if true { + return nil + } else { + return (*int)(nil) + } +} + +func gen12(b bool) interface{} { + // flag, all branches return typed nils + var x interface{} + if b { + x = (*int)(nil) + } else { + x = (*string)(nil) + } + return x +} + +func gen13() interface{} { + // flag, always returns a typed value + _, x, _ := fn2() + return x +} + +func gen14(ch chan *int) interface{} { + // flag + return <-ch +} + +func gen15() interface{} { + // flag + t := &T{} + return t.x +} + +var g *int = new(int) + +func gen16() interface{} { + // don't flag. returning a global is akin to returning &T{}. + return g +} + +func gen17(x interface{}) interface{} { + // don't flag + if x != nil { + return x + } + return x +} + +func gen18() (int, error) { + // don't flag + _, err := fn3() + if err != nil { + return 0, errors.New("yo") + } + return 0, err +} + +func gen19() (out interface{}) { + // don't flag + if true { + return (*int)(nil) + } + return +} + +func gen20() (out interface{}) { + // don't flag + if true { + return (*int)(nil) + } + return +} + +func gen21() error { + if false { + return (*exec.Error)(nil) + } + return fn4() +} + +func test() { + _ = gen1() == nil + _ = gen2() == nil + _ = gen3() == nil // want `never true` + { + _, r2, r3 := gen4() + _ = r2 == nil // want `never true` + _ = r3 == nil + } + _ = gen5() == nil // want `never true` + _ = gen6(false) == nil + _ = gen7() == nil // want `never true` + _ = gen8(nil) == nil // want `never true` + _ = gen9() == nil // want `never true` + _ = gen10() == nil // want `never true` + _ = gen11() == nil + _ = gen12(true) == nil // want `never true` + _ = gen13() == nil // want `never true` + _ = gen14(nil) == nil // want `never true` + _ = gen15() == nil // want `never true` + // TODO(dh): fix the following case, ideally it doesn't cause a diagnostic + _ = gen16() == nil // want `never true` + _ = gen17(nil) == nil + { + _, r2 := gen18() + _ = r2 == nil + } + _ = gen19() == nil + _ = gen20() == nil + _ = gen21() == nil +} diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/i26000/26000.go b/staticcheck/testdata/src/CheckTypedNilInterface/i26000/26000.go new file mode 100644 index 000000000..a086f8e3c --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/i26000/26000.go @@ -0,0 +1,34 @@ +package main + +import ( + "fmt" + "os" +) + +type CustomError struct { + Err string +} + +func (ce CustomError) Error() string { + return ce.Err +} + +func SomeFunc() (string, *CustomError) { + return "hello", nil +} + +func main() { + // Do something that creates a variable err of type error + _, err := os.Open("/") + if err != nil { + panic(err) + } + + // Then replace the err type with *CustomError + val, err := SomeFunc() + if err != nil { // want `this comparison is always true` + panic(err) + } + + fmt.Println("No problem", val) +} diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/i27815/27815.go b/staticcheck/testdata/src/CheckTypedNilInterface/i27815/27815.go new file mode 100644 index 000000000..8418d1fdb --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/i27815/27815.go @@ -0,0 +1,28 @@ +package main + +import ( + "fmt" +) + +type MyError struct { + x string +} + +func (e *MyError) Error() string { + return e.x +} + +func f() *MyError { + return nil +} + +func main() { + var e error + e = f() + // e should be nil ? + if e != nil { // want `this comparison is always true` + fmt.Println("NOT NIL") + } else { + fmt.Println("NIL") + } +} diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/i28241/28241.go b/staticcheck/testdata/src/CheckTypedNilInterface/i28241/28241.go new file mode 100644 index 000000000..f74d593c3 --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/i28241/28241.go @@ -0,0 +1,39 @@ +package main + +import ( + "fmt" + "reflect" +) + +type Nil interface { + String() string +} + +func MakeNil() Nil { + var n *NilStruct + return n +} + +type NilStruct struct { + Data string +} + +func (n *NilStruct) String() string { + return n.Data +} + +func main() { + var n *NilStruct + fmt.Printf("%t %#v %s %t\n", + n == nil, + n, + reflect.ValueOf(n).Kind(), + reflect.ValueOf(n).IsNil()) + n2 := MakeNil() + fmt.Printf("%t %#v %s %t\n", + n2 == nil, // want `this comparison is never true` + n2, + reflect.ValueOf(n2).Kind(), + reflect.ValueOf(n2).IsNil()) + fmt.Println(n2.String()) +} diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/i31873/31873.go b/staticcheck/testdata/src/CheckTypedNilInterface/i31873/31873.go new file mode 100644 index 000000000..89d353838 --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/i31873/31873.go @@ -0,0 +1,26 @@ +package main + +import "fmt" + +type S struct{} + +func (s *S) Error() string { + return "error for S" +} + +func structNil() *S { + return nil +} + +func errorNil() error { + return nil +} + +func main() { + err := errorNil() + fmt.Println(err != nil) + err = structNil() + fmt.Println(err != nil) // want `this comparison is always true` + err = errorNil() + fmt.Println(err != nil) +} diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/i33965/33965.go b/staticcheck/testdata/src/CheckTypedNilInterface/i33965/33965.go new file mode 100644 index 000000000..b31d136e8 --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/i33965/33965.go @@ -0,0 +1,26 @@ +package pkg + +import ( + "fmt" + "testing" +) + +type customError struct { +} + +func (p *customError) Error() string { + return "custom error" +} + +func getNilCustomError() *customError { + return nil +} + +func TestWebSocketClient_basic(t *testing.T) { + err1 := getNilCustomError() + fmt.Println(err1 == nil) // ok is true + + err2 := error(nil) + err2 = getNilCustomError() + fmt.Println(err2 == nil) // want `this comparison is never true` +} diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/i33994/33994.go b/staticcheck/testdata/src/CheckTypedNilInterface/i33994/33994.go new file mode 100644 index 000000000..3f3353764 --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/i33994/33994.go @@ -0,0 +1,35 @@ +package main + +import ( + "errors" + "fmt" +) + +func main() { + var err = errors.New("errors msg") + name, err := GetName() + if err != nil { // want `this comparison is always true` + fmt.Println(err) + } else { + fmt.Println(name) + } +} + +type Error struct { + Message string +} + +func (e *Error) Error() string { + if e == nil { + return "Error is nil" + } + return e.Message +} + +func GetName() (string, *Error) { + var err = &Error{ + Message: "error msg", + } + err = nil + return "yixinin", err +} diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/i35217/35217.go b/staticcheck/testdata/src/CheckTypedNilInterface/i35217/35217.go new file mode 100644 index 000000000..c05b7e7ae --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/i35217/35217.go @@ -0,0 +1,24 @@ +package main + +import ( + "errors" + "fmt" +) + +type someError struct { + Msg string +} + +func (e *someError) Error() string { + return "someError: " + e.Msg +} + +func calculate() (int, *someError) { + return 42, nil +} + +func main() { + err := errors.New("ERROR") + num, err := calculate() + fmt.Println(num, err, err == nil) // want `this comparison is never true` +} diff --git a/staticcheck/testdata/src/CheckTypedNilInterface/real.go b/staticcheck/testdata/src/CheckTypedNilInterface/real.go new file mode 100644 index 000000000..a07a60f9a --- /dev/null +++ b/staticcheck/testdata/src/CheckTypedNilInterface/real.go @@ -0,0 +1,42 @@ +package pkg + +import "log" + +type iface interface{ m() } + +type t1 struct{ int } + +func (t *t1) m() { log.Println(t.int) } + +type internalMessage struct{ v *t1 } + +func f(msg chan internalMessage, input int) { + k := &t1{input} + + if input > 2 { + k = nil + } + msg <- internalMessage{k} + +} + +func SyncPublicMethod(input int) iface { + ch := make(chan internalMessage) + go f(ch, input) + answer := <-ch + // Problem: if answer.v == nil then this will created typed nil iface return value + return answer.v +} + +func main() { + for i := 0; i < 10; i++ { + k := SyncPublicMethod(i) + if k == nil { // want `this comparison is never true` + log.Println("never printed") + return + } + + // Will panic. + k.m() + } +} From 9cb6a0adc773f206ed870c696bcfc2ec594115ad Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 10 Oct 2020 04:59:07 +0200 Subject: [PATCH 087/111] lintcmd/runner: don't compute object paths more often than necessary Closes gh-843 --- lintcmd/runner/runner.go | 43 ++++++++++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/lintcmd/runner/runner.go b/lintcmd/runner/runner.go index b74cdce7d..63f4e47bb 100644 --- a/lintcmd/runner/runner.go +++ b/lintcmd/runner/runner.go @@ -139,6 +139,8 @@ import ( "golang.org/x/tools/go/types/objectpath" ) +const sanityCheck = false + type Diagnostic struct { Position token.Position End token.Position @@ -284,6 +286,11 @@ func (act *packageAction) String() string { return fmt.Sprintf("packageAction(%s)", act.Package) } +type objectFact struct { + fact analysis.Fact + path objectpath.Path +} + type objectFactKey struct { Obj types.Object Type reflect.Type @@ -316,7 +323,7 @@ type analyzerAction struct { // a package has been fully analyzed. Result interface{} Diagnostics []Diagnostic - ObjectFacts map[objectFactKey]analysis.Fact + ObjectFacts map[objectFactKey]objectFact PackageFacts map[packageFactKey]analysis.Fact Pass *analysis.Pass } @@ -435,7 +442,7 @@ func newAnalyzerAction(an *analysis.Analyzer, cache map[*analysis.Analyzer]*anal a := &analyzerAction{ Analyzer: an, - ObjectFacts: map[objectFactKey]analysis.Fact{}, + ObjectFacts: map[objectFactKey]objectFact{}, PackageFacts: map[packageFactKey]analysis.Fact{}, } cache[an] = a @@ -678,7 +685,7 @@ func pkgPaths(root *types.Package) map[string]*types.Package { return out } -func (r *Runner) loadFacts(root *types.Package, dep *packageAction, objFacts map[objectFactKey]analysis.Fact, pkgFacts map[packageFactKey]analysis.Fact) error { +func (r *Runner) loadFacts(root *types.Package, dep *packageAction, objFacts map[objectFactKey]objectFact, pkgFacts map[packageFactKey]analysis.Fact) error { // Load facts of all imported packages vetx, err := os.Open(dep.vetx) if err != nil { @@ -715,7 +722,7 @@ func (r *Runner) loadFacts(root *types.Package, dep *packageAction, objFacts map objFacts[objectFactKey{ Obj: obj, Type: reflect.TypeOf(gf.Fact), - }] = gf.Fact + }] = objectFact{gf.Fact, objectpath.Path(gf.ObjPath)} } } return nil @@ -767,7 +774,7 @@ type analyzerRunner struct { pkg *loader.Package // object facts of our dependencies; may contain facts of // analyzers other than the current one - depObjFacts map[objectFactKey]analysis.Fact + depObjFacts map[objectFactKey]objectFact // package facts of our dependencies; may contain facts of // analyzers other than the current one depPkgFacts map[packageFactKey]analysis.Fact @@ -846,10 +853,10 @@ func (ar *analyzerRunner) do(act action) error { Type: reflect.TypeOf(fact), } if f, ok := ar.depObjFacts[key]; ok { - reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f.fact).Elem()) return true } else if f, ok := a.ObjectFacts[key]; ok { - reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f.fact).Elem()) return true } return false @@ -873,7 +880,8 @@ func (ar *analyzerRunner) do(act action) error { Obj: obj, Type: reflect.TypeOf(fact), } - a.ObjectFacts[key] = fact + path, _ := objectpath.For(obj) + a.ObjectFacts[key] = objectFact{fact, path} }, ExportPackageFact: func(fact analysis.Fact) { key := packageFactKey{ @@ -904,7 +912,7 @@ func (ar *analyzerRunner) do(act action) error { if filterFactType(key.Type) { out = append(out, analysis.ObjectFact{ Object: key.Obj, - Fact: fact, + Fact: fact.fact, }) } } @@ -912,7 +920,7 @@ func (ar *analyzerRunner) do(act action) error { if filterFactType(key.Type) { out = append(out, analysis.ObjectFact{ Object: key.Obj, - Fact: fact, + Fact: fact.fact, }) } } @@ -937,7 +945,7 @@ type analysisResult struct { } func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (analysisResult, error) { - depObjFacts := map[objectFactKey]analysis.Fact{} + depObjFacts := map[objectFactKey]objectFact{} depPkgFacts := map[packageFactKey]analysis.Fact{} for _, dep := range pkgAct.deps { @@ -1014,14 +1022,19 @@ func (r *subrunner) runAnalyzers(pkgAct *packageAction, pkg *loader.Package) (an // OPT(dh): cull objects not reachable via the exported closure gobFacts := make([]gobFact, 0, len(depObjFacts)+len(depPkgFacts)) for key, fact := range depObjFacts { - objPath, err := objectpath.For(key.Obj) - if err != nil { + if fact.path == "" { continue } + if sanityCheck { + p, _ := objectpath.For(key.Obj) + if p != fact.path { + panic(fmt.Sprintf("got different object paths for %v. old: %q new: %q", key.Obj, fact.path, p)) + } + } gf := gobFact{ PkgPath: key.Obj.Pkg().Path(), - ObjPath: string(objPath), - Fact: fact, + ObjPath: string(fact.path), + Fact: fact.fact, } gobFacts = append(gobFacts, gf) } From 5dbe911b6be075217c0f749fc7e7e81c7306935d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 10 Oct 2020 22:06:06 +0200 Subject: [PATCH 088/111] SA4023: treat globals specially --- analysis/facts/nilness/nilness.go | 128 +++++++++++------- .../nilness/testdata/src/Nilness/Nilness.go | 33 +++-- staticcheck/lint.go | 3 +- .../CheckTypedNilInterface.go | 11 +- 4 files changed, 114 insertions(+), 61 deletions(-) diff --git a/analysis/facts/nilness/nilness.go b/analysis/facts/nilness/nilness.go index aa2301a09..973f1f507 100644 --- a/analysis/facts/nilness/nilness.go +++ b/analysis/facts/nilness/nilness.go @@ -17,16 +17,16 @@ import ( // be nil (typed or untyped). The analysis errs on the side of false // negatives. type neverReturnsNilFact struct { - Rets uint8 + Rets []neverNilness } func (*neverReturnsNilFact) AFact() {} func (fact *neverReturnsNilFact) String() string { - return fmt.Sprintf("never returns nil: %08b", fact.Rets) + return fmt.Sprintf("never returns nil: %v", fact.Rets) } type Result struct { - m map[*types.Func]uint8 + m map[*types.Func][]neverNilness } var Analysis = &analysis.Analyzer{ @@ -39,21 +39,28 @@ var Analysis = &analysis.Analyzer{ } // MayReturnNil reports whether the ret's return value of fn might be -// a typed or untyped nil value. The value of ret is zero-based. +// a typed or untyped nil value. The value of ret is zero-based. When +// globalOnly is true, the only possible nil values are global +// variables. // // The analysis has false positives: MayReturnNil can incorrectly // report true, but never incorrectly reports false. -func (r *Result) MayReturnNil(fn *types.Func, ret int) bool { +func (r *Result) MayReturnNil(fn *types.Func, ret int) (yes bool, globalOnly bool) { if !typeutil.IsPointerLike(fn.Type().(*types.Signature).Results().At(ret).Type()) { - return false + return false, false } - return (r.m[fn] & (1 << ret)) == 0 + if len(r.m[fn]) == 0 { + return true, false + } + + v := r.m[fn][ret] + return v != neverNil, v == onlyGlobal } func run(pass *analysis.Pass) (interface{}, error) { seen := map[*ir.Function]struct{}{} out := &Result{ - m: map[*types.Func]uint8{}, + m: map[*types.Func][]neverNilness{}, } for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { impl(pass, fn, seen) @@ -66,53 +73,66 @@ func run(pass *analysis.Pass) (interface{}, error) { return out, nil } -func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) (out uint8) { - if fn.Signature.Results().Len() > 8 { - return 0 +type neverNilness uint8 + +const ( + neverNil neverNilness = 1 + onlyGlobal neverNilness = 2 + nilly neverNilness = 3 +) + +func (n neverNilness) String() string { + switch n { + case neverNil: + return "never" + case onlyGlobal: + return "global" + case nilly: + return "nil" + default: + return "BUG" } +} + +func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) (out []neverNilness) { if fn.Object() == nil { // TODO(dh): support closures - return 0 + return nil } if fact := new(neverReturnsNilFact); pass.ImportObjectFact(fn.Object(), fact) { return fact.Rets } if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg { - return 0 + return nil } if fn.Blocks == nil { - return 0 + return nil } if _, ok := seenFns[fn]; ok { // break recursion - return 0 + return nil } seenFns[fn] = struct{}{} defer func() { - for i := 0; i < fn.Signature.Results().Len(); i++ { - if !typeutil.IsPointerLike(fn.Signature.Results().At(i).Type()) { - // we don't need facts to know that non-pointer types - // can't be nil. zeroing out those bits may result in - // all bits being zero, in which case we don't have to - // save any fact. - out &= ^(1 << i) + for i, v := range out { + if typeutil.IsPointerLike(fn.Signature.Results().At(i).Type()) && v != nilly { + pass.ExportObjectFact(fn.Object(), &neverReturnsNilFact{out}) + break } } - if out > 0 { - pass.ExportObjectFact(fn.Object(), &neverReturnsNilFact{out}) - } }() seen := map[ir.Value]struct{}{} - var mightReturnNil func(v ir.Value) bool - mightReturnNil = func(v ir.Value) bool { + + var mightReturnNil func(v ir.Value) neverNilness + mightReturnNil = func(v ir.Value) neverNilness { if _, ok := seen[v]; ok { // break cycle - return true + return nilly } if !typeutil.IsPointerLike(v.Type()) { - return false + return neverNil } seen[v] = struct{}{} switch v := v.(type) { @@ -123,44 +143,52 @@ func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{ case *ir.Slice: return mightReturnNil(v.X) case *ir.Phi: + ret := neverNil for _, e := range v.Edges { - if mightReturnNil(e) { - return true + if n := mightReturnNil(e); n > ret { + ret = n } } - return false + return ret case *ir.Extract: switch d := v.Tuple.(type) { case *ir.Call: if callee := d.Call.StaticCallee(); callee != nil { - return impl(pass, callee, seenFns)&(1< Date: Sat, 10 Oct 2020 19:06:01 +0200 Subject: [PATCH 089/111] doc: add 2020.1.6 release notes (cherry picked from commit f1158d1d6d6a118974de177b3cc56687ac234733) --- doc/2020.1.html | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/doc/2020.1.html b/doc/2020.1.html index 3df791b92..0aadfd8d8 100644 --- a/doc/2020.1.html +++ b/doc/2020.1.html @@ -12,6 +12,7 @@
  • Staticcheck 2020.1.3 release notes
  • Staticcheck 2020.1.4 release notes
  • Staticcheck 2020.1.5 release notes
  • +
  • Staticcheck 2020.1.6 release notes
  • Introduction to Staticcheck 2020.1

    @@ -229,3 +230,21 @@

    Staticcheck 2020.1.5 release notes

    This release fixes a crash in the pattern matching engine and a false positive in SA4006.

    + +

    Staticcheck 2020.1.6 release notes

    + +

    + This release makes the following fixes and improvements: +

    + +
      +
    • Staticcheck no longer panics when encountering files that have the following comment: // Code generated DO NOT EDIT.
    • +
    • {{ check "SA4016" }} no longer panics when checking bitwise operations that involve dot-imported identifiers.
    • +
    • Fixed the suggested fix offered by {{ check "S1004" }}.
    • +
    • Fixed a false positive involving byte arrays in {{ check "SA5009" }}.
    • +
    • Fixed a false positive involving named byte slice types in {{ check "SA5009" }}.
    • +
    • Added another heuristic to avoid flagging function names in error messages in {{ check "ST1005" }}.
    • +
    • {{ check "SA3000" }} will no longer flag missing calls to os.Exit in TestMain functions if targeting Go 1.15 or newer.
    • +
    + + From 9742087923ba67d71443bf066c6823d088506d53 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 12 Oct 2020 06:19:06 +0200 Subject: [PATCH 090/111] doc: the latest version is 2020.1.6 (cherry picked from commit d12f52a99afdb5e442dd0e318063f985459ddbf7) --- doc/staticcheck.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/staticcheck.html b/doc/staticcheck.html index 230778a20..81f28f52b 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -22,12 +22,12 @@

    Installation

    If you use Go modules, you can simply run go get honnef.co/go/tools/cmd/staticcheck to obtain the latest released version. If you're still using a GOPATH-based workflow, then the above command will instead fetch the master branch. - It is suggested that you explicitly check out the latest release tag instead, which is currently 2020.1.5. + It is suggested that you explicitly check out the latest release tag instead, which is currently 2020.1.6. One way of doing so would be as follows:

    cd $GOPATH/src/honnef.co/go/tools/cmd/staticcheck
    -git checkout 2020.1.5
    +git checkout 2020.1.6
     go get
     go install
     
    From 3ab5b7ad5849456ca4c682a3fea0586ff948130a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 12 Oct 2020 08:40:16 +0200 Subject: [PATCH 091/111] unused: remove superfluous nil check --- unused/unused.go | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 9392c1ffb..797697719 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1105,12 +1105,10 @@ func (g *graph) entry(pkg *pkg) { } g.function(m) case *ir.Type: - if m.Object() != nil { - g.see(m.Object()) - if m.Object().Exported() { - // (1.1) packages use exported named types - g.use(m.Object(), nil, edgeExportedType) - } + g.see(m.Object()) + if m.Object().Exported() { + // (1.1) packages use exported named types + g.use(m.Object(), nil, edgeExportedType) } g.typ(m.Type(), nil) default: From 542e8c572ba90537f28d6272df560105e64d5e9c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 13 Oct 2020 01:02:42 +0200 Subject: [PATCH 092/111] staticcheck: run CheckTestMain tests with explicit Go version Updates gh-846 --- staticcheck/lint_test.go | 10 +++++----- .../CheckTestMainExit-1.go | 0 .../CheckTestMainExit-2.go | 0 .../CheckTestMainExit-3.go | 0 .../CheckTestMainExit-4.go | 0 .../CheckTestMainExit-5.go | 0 6 files changed, 5 insertions(+), 5 deletions(-) rename staticcheck/testdata/src/{CheckTestMainExit-1 => CheckTestMainExit-1_go14}/CheckTestMainExit-1.go (100%) rename staticcheck/testdata/src/{CheckTestMainExit-2 => CheckTestMainExit-2_go14}/CheckTestMainExit-2.go (100%) rename staticcheck/testdata/src/{CheckTestMainExit-3 => CheckTestMainExit-3_go14}/CheckTestMainExit-3.go (100%) rename staticcheck/testdata/src/{CheckTestMainExit-4 => CheckTestMainExit-4_go14}/CheckTestMainExit-4.go (100%) rename staticcheck/testdata/src/{CheckTestMainExit-5 => CheckTestMainExit-5_go14}/CheckTestMainExit-5.go (100%) diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index 122c2f56e..b351a9a14 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -41,11 +41,11 @@ func TestAll(t *testing.T) { "SA2002": {{Dir: "CheckConcurrentTesting"}}, "SA2003": {{Dir: "CheckDeferLock"}}, "SA3000": { - {Dir: "CheckTestMainExit-1"}, - {Dir: "CheckTestMainExit-2"}, - {Dir: "CheckTestMainExit-3"}, - {Dir: "CheckTestMainExit-4"}, - {Dir: "CheckTestMainExit-5"}, + {Dir: "CheckTestMainExit-1_go14", Version: "1.4"}, + {Dir: "CheckTestMainExit-2_go14", Version: "1.4"}, + {Dir: "CheckTestMainExit-3_go14", Version: "1.4"}, + {Dir: "CheckTestMainExit-4_go14", Version: "1.4"}, + {Dir: "CheckTestMainExit-5_go14", Version: "1.4"}, {Dir: "CheckTestMainExit-1_go115", Version: "1.15"}, }, "SA3001": {{Dir: "CheckBenchmarkN"}}, diff --git a/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go b/staticcheck/testdata/src/CheckTestMainExit-1_go14/CheckTestMainExit-1.go similarity index 100% rename from staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go rename to staticcheck/testdata/src/CheckTestMainExit-1_go14/CheckTestMainExit-1.go diff --git a/staticcheck/testdata/src/CheckTestMainExit-2/CheckTestMainExit-2.go b/staticcheck/testdata/src/CheckTestMainExit-2_go14/CheckTestMainExit-2.go similarity index 100% rename from staticcheck/testdata/src/CheckTestMainExit-2/CheckTestMainExit-2.go rename to staticcheck/testdata/src/CheckTestMainExit-2_go14/CheckTestMainExit-2.go diff --git a/staticcheck/testdata/src/CheckTestMainExit-3/CheckTestMainExit-3.go b/staticcheck/testdata/src/CheckTestMainExit-3_go14/CheckTestMainExit-3.go similarity index 100% rename from staticcheck/testdata/src/CheckTestMainExit-3/CheckTestMainExit-3.go rename to staticcheck/testdata/src/CheckTestMainExit-3_go14/CheckTestMainExit-3.go diff --git a/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go b/staticcheck/testdata/src/CheckTestMainExit-4_go14/CheckTestMainExit-4.go similarity index 100% rename from staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go rename to staticcheck/testdata/src/CheckTestMainExit-4_go14/CheckTestMainExit-4.go diff --git a/staticcheck/testdata/src/CheckTestMainExit-5/CheckTestMainExit-5.go b/staticcheck/testdata/src/CheckTestMainExit-5_go14/CheckTestMainExit-5.go similarity index 100% rename from staticcheck/testdata/src/CheckTestMainExit-5/CheckTestMainExit-5.go rename to staticcheck/testdata/src/CheckTestMainExit-5_go14/CheckTestMainExit-5.go From 8ea76608cc772cf10723cda3c7b57dd104e4ef95 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 13 Oct 2020 01:07:25 +0200 Subject: [PATCH 093/111] CI: target Go 1.14 as the lowest supported version, also run with Go 1.15 --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 852081232..e93260999 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -7,7 +7,7 @@ jobs: strategy: matrix: os: ["windows-latest", "ubuntu-latest", "macOS-latest"] - go: ["1.13.x", "1.14.x"] + go: ["1.14.x", "1.15.x"] runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v1 @@ -24,7 +24,7 @@ jobs: steps: - uses: actions/setup-go@v1 with: - go-version: "1.13.x" + go-version: "1.14.x" - run: "GO111MODULE=on go get honnef.co/go/tools/cmd/staticcheck" - uses: actions/checkout@v1 with: @@ -36,4 +36,4 @@ jobs: restore-keys: | staticcheck- - run: "go vet ./..." - - run: "$(go env GOPATH)/bin/staticcheck -go 1.13 ./..." + - run: "$(go env GOPATH)/bin/staticcheck -go 1.14 ./..." From b5d04209c44b041579df5cc420df89dac1dd2a00 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 28 Oct 2020 03:38:19 +0100 Subject: [PATCH 094/111] cmd/go-module-query: delete No useful tool materialized here. Lets remove the prototype. --- cmd/go-module-query/main.go | 189 --------------------------- cmd/go-module-query/staticcheck.conf | 2 - 2 files changed, 191 deletions(-) delete mode 100644 cmd/go-module-query/main.go delete mode 100644 cmd/go-module-query/staticcheck.conf diff --git a/cmd/go-module-query/main.go b/cmd/go-module-query/main.go deleted file mode 100644 index aa59ba09e..000000000 --- a/cmd/go-module-query/main.go +++ /dev/null @@ -1,189 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "io" - "io/ioutil" - "log" - "net/http" - "os" - "path" - "path/filepath" - "strings" - "sync" - "sync/atomic" - "time" - - "github.com/google/renameio" - "github.com/rogpeppe/go-internal/modfile" - "golang.org/x/mod/module" -) - -/* -Q: which versions of our module are being used -A: find the latest version of every Go module, find the dependency on our module - -Q: what modules have stopped using our module -A: find every module where a version [0..N) uses us, but version N doesn't. -*/ - -func Fetch(since time.Time) ([]module.Version, time.Time, error) { - var out []module.Version - for { - out2, since2, err := fetch(since, out) - if err != nil { - return nil, since, err - } - if len(out) == len(out2) { - break - } - out = out2 - since = since2 - } - return out, since, nil -} - -func fetch(since time.Time, out []module.Version) ([]module.Version, time.Time, error) { - // +1µs because of bug in index.golang.org that returns results - // >=since instead of >since - ts := since.Add(1 * time.Microsecond) - u := `https://index.golang.org/index?since=` + ts.Format(time.RFC3339Nano) - resp, err := http.Get(u) - if err != nil { - return nil, since, err - } - defer resp.Body.Close() - dec := json.NewDecoder(resp.Body) - - var entry struct { - module.Version - Timestamp time.Time - } - for { - if err := dec.Decode(&entry); err != nil { - if err == io.EOF { - break - } - return out, since, err - } - - out = append(out, entry.Version) - since = entry.Timestamp - } - - return out, since, nil -} - -func main() { - cache, err := os.UserCacheDir() - if err != nil { - log.Fatal(err) - } - - var since time.Time - b, err := ioutil.ReadFile(filepath.Join(cache, "go-module-query", "last")) - if err == nil { - t, err := time.Parse(time.RFC3339Nano, string(b)) - if err != nil { - log.Fatal(err) - } - since = t - log.Println("Resuming at", since) - } else if !os.IsNotExist(err) { - log.Fatal(err) - } - - out, since, err := Fetch(since) - if err != nil { - log.Fatal(err) - } - - sem := make(chan struct{}, 8) - var wg sync.WaitGroup - var errs uint64 - for _, v := range out { - mpath, _ := module.EscapePath(v.Path) - p := filepath.Join(cache, "go-module-query", mpath, "@v", v.Version+".mod") - // XXX is this atomic? - if err := os.MkdirAll(filepath.Join(cache, "go-module-query", mpath, "@v"), 0777); err != nil { - log.Println(err) - continue - } - if _, err := os.Stat(p); os.IsNotExist(err) { - fmt.Println("Fetching", v) - sem <- struct{}{} - wg.Add(1) - go func(p string, v module.Version) { - defer wg.Done() - defer func() { <-sem }() - resp, err := http.Get("https://proxy.golang.org/" + path.Join(mpath, "@v", v.Version+".mod")) - if err != nil { - atomic.AddUint64(&errs, 1) - log.Println(err) - return - } - defer resp.Body.Close() - // XXX handle response code - pf, err := renameio.TempFile("", p) - if err != nil { - atomic.AddUint64(&errs, 1) - log.Println(err) - return - } - defer pf.Cleanup() - if _, err := io.Copy(pf, resp.Body); err != nil { - atomic.AddUint64(&errs, 1) - log.Println(err) - return - } - if err := pf.CloseAtomicallyReplace(); err != nil { - atomic.AddUint64(&errs, 1) - log.Println("Couldn't store go.mod:", err) - } - }(p, v) - } - } - - wg.Wait() - - if errs > 0 { - log.Println("Couldn't download all go.mod, not storing timestamp") - return - } - - if err := renameio.WriteFile(filepath.Join(cache, "go-module-query", "last"), []byte(since.Format(time.RFC3339Nano)), 0666); err != nil { - log.Println("Couldn't store timestamp:", err) - } -} - -func printGraph() { - cache, err := os.UserCacheDir() - if err != nil { - log.Fatal(err) - } - filepath.Walk(filepath.Join(cache, "go-module-query"), func(path string, info os.FileInfo, err error) error { - if err != nil { - return nil - } - if strings.HasSuffix(path, ".mod") { - name := filepath.Base(path) - name = name[:len(name)-4] - b, err := ioutil.ReadFile(path) - if err != nil { - log.Println(err) - return nil - } - f, err := modfile.Parse(path, b, nil) - if err != nil { - log.Println(err) - return nil - } - f.Module.Mod.Version = name - for _, dep := range f.Require { - fmt.Printf("%s@%s %s@%s\n", f.Module.Mod.Path, f.Module.Mod.Version, dep.Mod.Path, dep.Mod.Version) - } - } - return nil - }) -} diff --git a/cmd/go-module-query/staticcheck.conf b/cmd/go-module-query/staticcheck.conf deleted file mode 100644 index 16403869c..000000000 --- a/cmd/go-module-query/staticcheck.conf +++ /dev/null @@ -1,2 +0,0 @@ -# this package is WIP, unused code will occur. -checks = ["inherit", "-U1000"] From dc546986a51bad12b9de769571a7bc1b6b98da28 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 28 Oct 2020 03:42:39 +0100 Subject: [PATCH 095/111] Delete temporary files that Emacs created Closes gh-861 --- .../#CheckUntrappableSignal.go.golden# | 83 ------------------- .../.#CheckUntrappableSignal.go.golden | 1 - 2 files changed, 84 deletions(-) delete mode 100644 staticcheck/testdata/src/CheckUntrappableSignal/#CheckUntrappableSignal.go.golden# delete mode 120000 staticcheck/testdata/src/CheckUntrappableSignal/.#CheckUntrappableSignal.go.golden diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/#CheckUntrappableSignal.go.golden# b/staticcheck/testdata/src/CheckUntrappableSignal/#CheckUntrappableSignal.go.golden# deleted file mode 100644 index aff90a10c..000000000 --- a/staticcheck/testdata/src/CheckUntrappableSignal/#CheckUntrappableSignal.go.golden# +++ /dev/null @@ -1,83 +0,0 @@ --- remove syscall.SIGKILL from list of arguments -- -package main - -import ( - "os" - "os/signal" - "syscall" -) - -func fn() { - c := make(chan os.Signal, 1) - signal.Notify(c, os.Interrupt) - signal.Ignore() // want `cannot be trapped` - signal.Ignore(os.Kill) // want `cannot be trapped` - signal.Notify(c, os.Kill) // want `cannot be trapped` - signal.Reset(os.Kill) // want `cannot be trapped` - signal.Ignore() // want `cannot be trapped` - signal.Notify(c) // want `cannot be trapped` - signal.Reset() // want `cannot be trapped` -} - --- remove os.Kill from list of arguments -- -package main - -import ( - "os" - "os/signal" - "syscall" -) - -func fn() { - c := make(chan os.Signal, 1) - signal.Notify(c, os.Interrupt) - signal.Ignore(os.Signal(syscall.SIGKILL)) // want `cannot be trapped` - signal.Ignore() // want `cannot be trapped` - signal.Notify(c) // want `cannot be trapped` - signal.Reset() // want `cannot be trapped` - signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` - signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` - signal.Reset(syscall.SIGKILL) // want `cannot be trapped` -} - --- use syscall.SIGTERM instead of syscall.SIGKILL -- -package main - -import ( - "os" - "os/signal" - "syscall" -) - -func fn() { - c := make(chan os.Signal, 1) - signal.Notify(c, os.Interrupt) - signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` - signal.Ignore(os.Kill) // want `cannot be trapped` - signal.Notify(c, os.Kill) // want `cannot be trapped` - signal.Reset(os.Kill) // want `cannot be trapped` - signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` - signal.Notify(c, syscall.SIGTERM) // want `cannot be trapped` - signal.Reset(syscall.SIGTERM) // want `cannot be trapped` -} - --- use syscall.SIGTERM instead of os.Kill -- -package main - -import ( - "os" - "os/signal" - "syscall" -) - -func fn() { - c := make(chan os.Signal, 1) - signal.Notify(c, os.Interrupt) - signal.Ignore(os.Signal(syscall.SIGKILL)) // want `cannot be trapped` - signal.Ignore(syscall.SIGTERM) // want `cannot be trapped` - signal.Notify(c, syscall.SIGTERM) // want `cannot be trapped` - signal.Reset(syscall.SIGTERM) // want `cannot be trapped` - signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` - signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` - signal.Reset(syscall.SIGKILL) // want `cannot be trapped` -} diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/.#CheckUntrappableSignal.go.golden b/staticcheck/testdata/src/CheckUntrappableSignal/.#CheckUntrappableSignal.go.golden deleted file mode 120000 index ebff53d0d..000000000 --- a/staticcheck/testdata/src/CheckUntrappableSignal/.#CheckUntrappableSignal.go.golden +++ /dev/null @@ -1 +0,0 @@ -dominikh@nixos.1490:1586915592 \ No newline at end of file From a6ed285e57c7d3a0fb2922170942f39c5cd6eadb Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 10 Nov 2020 00:22:51 +0100 Subject: [PATCH 096/111] go/ir: improve analysis of functions that don't return This change adds support for functions that abort control flow, but use a mixture of exiting the process and panicking. In the past, we would only detect functions that either always exit or always panic. That is, we now correctly detect that the following function affects control flow: func fn(b bool) { if b { panic("") } else { syscall.Exit(1) } } This change requires the introduction of a new intrinsic, ir:noreturnWasPanic, which we use to encode the runtime behavior of functions that may either panic or exit. As part of these changes we happen to fix a bug: given two functions A and B, where A panics unconditionally and B calls A, but recovers from the panic, we would previously incorrectly mark B as always panicking also, ignoring the fact that it recovered from the panic. This change was triggered by os.Exit in Go 1.16 conditionally panicking or exiting. Instead of special-casing os.Exit, I decided to improve handling of such functions, so that user-written code may benefit from the improved analysis. Closes gh-872 --- go/ir/builder.go | 2 +- go/ir/exits.go | 213 ++++++++++-------- go/ir/ssa.go | 38 ++-- internal/passes/buildir/buildir.go | 30 +-- .../src/CheckMaybeNil/CheckMaybeNil.go | 27 ++- 5 files changed, 185 insertions(+), 125 deletions(-) diff --git a/go/ir/builder.go b/go/ir/builder.go index 407a10f81..24ff69988 100644 --- a/go/ir/builder.go +++ b/go/ir/builder.go @@ -2261,7 +2261,7 @@ func (b *builder) buildFunction(fn *Function) { // However, they aren't stubs, so buildExits ends up getting // called on them, so that's where we handle those special // cases. - fn.WillExit = true + fn.NoReturn = AlwaysExits } if body == nil { diff --git a/go/ir/exits.go b/go/ir/exits.go index 10cda7bb6..2166f74d6 100644 --- a/go/ir/exits.go +++ b/go/ir/exits.go @@ -10,13 +10,13 @@ func (b *builder) buildExits(fn *Function) { case "runtime": switch obj.Name() { case "exit": - fn.WillExit = true + fn.NoReturn = AlwaysExits return case "throw": - fn.WillExit = true + fn.NoReturn = AlwaysExits return case "Goexit": - fn.WillUnwind = true + fn.NoReturn = AlwaysUnwinds return } case "github.com/sirupsen/logrus": @@ -31,7 +31,7 @@ func (b *builder) buildExits(fn *Function) { // process, and that's what the vast majority of people // will use it for. We'll happily accept some false // negatives to avoid a lot of false positives. - fn.WillExit = true + fn.NoReturn = AlwaysExits return case "(*github.com/sirupsen/logrus.Logger).Panic", "(*github.com/sirupsen/logrus.Logger).Panicf", @@ -40,7 +40,7 @@ func (b *builder) buildExits(fn *Function) { // These methods will always panic, but that's not // statically known from the code alone, because they // take a detour through the generic Log methods. - fn.WillUnwind = true + fn.NoReturn = AlwaysUnwinds return case "(*github.com/sirupsen/logrus.Entry).Panicf", "(*github.com/sirupsen/logrus.Entry).Panicln": @@ -48,12 +48,12 @@ func (b *builder) buildExits(fn *Function) { // Entry.Panic has an explicit panic, but Panicf and // Panicln do not, relying fully on the generic Log // method. - fn.WillUnwind = true + fn.NoReturn = AlwaysUnwinds return case "(*github.com/sirupsen/logrus.Logger).Log", "(*github.com/sirupsen/logrus.Logger).Logf", "(*github.com/sirupsen/logrus.Logger).Logln": - // TODO(dh): we cannot handle these case. Whether they + // TODO(dh): we cannot handle these cases. Whether they // exit or unwind depends on the level, which is set // via the first argument. We don't currently support // call-site-specific exit information. @@ -61,8 +61,6 @@ func (b *builder) buildExits(fn *Function) { } } - buildDomTree(fn) - isRecoverCall := func(instr Instruction) bool { if instr, ok := instr.(*Call); ok { if builtin, ok := instr.Call.Value.(*Builtin); ok { @@ -74,66 +72,54 @@ func (b *builder) buildExits(fn *Function) { return false } - // All panics branch to the exit block, which means that if every - // possible path through the function panics, then all - // predecessors of the exit block must panic. - willPanic := true - for _, pred := range fn.Exit.Preds { - if _, ok := pred.Control().(*Panic); !ok { - willPanic = false - } - } - if willPanic { - recovers := false - recoverLoop: - for _, u := range fn.Blocks { - for _, instr := range u.Instrs { - if instr, ok := instr.(*Defer); ok { - call := instr.Call.StaticCallee() - if call == nil { - // not a static call, so we can't be sure the - // deferred call isn't calling recover - recovers = true - break recoverLoop - } - if len(call.Blocks) == 0 { - // external function, we don't know what's - // happening inside it - // - // TODO(dh): this includes functions from - // imported packages, due to how go/analysis - // works. We could introduce another fact, - // like we've done for exiting and unwinding, - // but it doesn't seem worth it. Virtually all - // uses of recover will be in closures. - recovers = true - break recoverLoop - } - for _, y := range call.Blocks { - for _, instr2 := range y.Instrs { - if isRecoverCall(instr2) { - recovers = true - break recoverLoop - } - } - } - } - } - } - if !recovers { - fn.WillUnwind = true - return - } - } - - // TODO(dh): don't check that any specific call dominates the exit - // block. instead, check that all calls combined cover every - // possible path through the function. + both := NewBlockSet(len(fn.Blocks)) exits := NewBlockSet(len(fn.Blocks)) unwinds := NewBlockSet(len(fn.Blocks)) + recovers := false for _, u := range fn.Blocks { for _, instr := range u.Instrs { - if instr, ok := instr.(CallInstruction); ok { + instrSwitch: + switch instr := instr.(type) { + case *Defer: + if recovers { + // avoid doing extra work, we already know that this function calls recover + continue + } + call := instr.Call.StaticCallee() + if call == nil { + // not a static call, so we can't be sure the + // deferred call isn't calling recover + recovers = true + break + } + if call.Package() == fn.Package() { + b.buildFunction(call) + } + if len(call.Blocks) == 0 { + // external function, we don't know what's + // happening inside it + // + // TODO(dh): this includes functions from + // imported packages, due to how go/analysis + // works. We could introduce another fact, + // like we've done for exiting and unwinding. + recovers = true + break + } + for _, y := range call.Blocks { + for _, instr2 := range y.Instrs { + if isRecoverCall(instr2) { + recovers = true + break instrSwitch + } + } + } + + case *Panic: + both.Add(u) + unwinds.Add(u) + + case CallInstruction: switch instr.(type) { case *Defer, *Call: default: @@ -162,19 +148,15 @@ func (b *builder) buildExits(fn *Function) { if call.Package() == fn.Package() { b.buildFunction(call) } - dom := u.Dominates(fn.Exit) - if call.WillExit { - if dom { - fn.WillExit = true - return - } + switch call.NoReturn { + case AlwaysExits: + both.Add(u) exits.Add(u) - } else if call.WillUnwind { - if dom { - fn.WillUnwind = true - return - } + case AlwaysUnwinds: + both.Add(u) unwinds.Add(u) + case NeverReturns: + both.Add(u) } } } @@ -202,24 +184,38 @@ func (b *builder) buildExits(fn *Function) { } return false } - - if exits.Num() > 0 { - if !findPath(fn.Blocks[0], exits) { - fn.WillExit = true - return + findPathEntry := func(root *BasicBlock, bl *BlockSet) bool { + if bl.Num() == 0 { + return true } - } - if unwinds.Num() > 0 { seen.Clear() - if !findPath(fn.Blocks[0], unwinds) { - fn.WillUnwind = true - return + return findPath(root, bl) + } + + if !findPathEntry(fn.Blocks[0], exits) { + fn.NoReturn = AlwaysExits + } else if !recovers { + // Only consider unwinding and "never returns" if we don't + // call recover. If we do call recover, then panics don't + // bubble up the stack. + + // TODO(dh): the position of the defer matters. If we + // unconditionally terminate before we defer a recover, then + // the recover is ineffective. + + if !findPathEntry(fn.Blocks[0], unwinds) { + fn.NoReturn = AlwaysUnwinds + } else if !findPathEntry(fn.Blocks[0], both) { + fn.NoReturn = NeverReturns } } } func (b *builder) addUnreachables(fn *Function) { + var unreachable *BasicBlock + for _, bb := range fn.Blocks { + instrLoop: for i, instr := range bb.Instrs { if instr, ok := instr.(*Call); ok { var call *Function @@ -236,7 +232,8 @@ func (b *builder) addUnreachables(fn *Function) { // make sure we have information on all functions in this package b.buildFunction(call) } - if call.WillExit { + switch call.NoReturn { + case AlwaysExits: // This call will cause the process to terminate. // Remove remaining instructions in the block and // replace any control flow with Unreachable. @@ -248,8 +245,9 @@ func (b *builder) addUnreachables(fn *Function) { bb.Instrs = bb.Instrs[:i+1] bb.emit(new(Unreachable), instr.Source()) addEdge(bb, fn.Exit) - break - } else if call.WillUnwind { + break instrLoop + + case AlwaysUnwinds: // This call will cause the goroutine to terminate // and defers to run (i.e. a panic or // runtime.Goexit). Remove remaining instructions @@ -263,7 +261,42 @@ func (b *builder) addUnreachables(fn *Function) { bb.Instrs = bb.Instrs[:i+1] bb.emit(new(Jump), instr.Source()) addEdge(bb, fn.Exit) - break + break instrLoop + + case NeverReturns: + // This call will either cause the goroutine to + // terminate, or the process to terminate. Remove + // remaining instructions in the block and replace + // any control flow with a conditional jump to + // either the exit block, or Unreachable. + for _, succ := range bb.Succs { + succ.removePred(bb) + } + bb.Succs = bb.Succs[:0] + + bb.Instrs = bb.Instrs[:i+1] + var c Call + c.Call.Value = &Builtin{ + name: "ir:noreturnWasPanic", + sig: types.NewSignature(nil, + types.NewTuple(), + types.NewTuple(anonVar(types.Typ[types.Bool])), + false, + ), + } + c.setType(types.Typ[types.Bool]) + + if unreachable == nil { + unreachable = fn.newBasicBlock("unreachable") + unreachable.emit(&Unreachable{}, nil) + addEdge(unreachable, fn.Exit) + } + + bb.emit(&c, instr.Source()) + bb.emit(&If{Cond: &c}, instr.Source()) + addEdge(bb, fn.Exit) + addEdge(bb, unreachable) + break instrLoop } } } diff --git a/go/ir/ssa.go b/go/ir/ssa.go index fc8e84114..2fea3e587 100644 --- a/go/ir/ssa.go +++ b/go/ir/ssa.go @@ -349,23 +349,31 @@ type Function struct { method *types.Selection // info about provenance of synthetic methods Signature *types.Signature - Synthetic Synthetic - parent *Function // enclosing function if anon; nil if global - Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) - Prog *Program // enclosing program - Params []*Parameter // function parameters; for methods, includes receiver - FreeVars []*FreeVar // free variables whose values must be supplied by closure - Locals []*Alloc // local variables of this function - Blocks []*BasicBlock // basic blocks of the function; nil => external - Exit *BasicBlock // The function's exit block - AnonFuncs []*Function // anonymous functions directly beneath this one - referrers []Instruction // referring instructions (iff Parent() != nil) - WillExit bool // Calling this function will always terminate the process - WillUnwind bool // Calling this function will always unwind (it will call runtime.Goexit or panic) + Synthetic Synthetic + parent *Function // enclosing function if anon; nil if global + Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) + Prog *Program // enclosing program + Params []*Parameter // function parameters; for methods, includes receiver + FreeVars []*FreeVar // free variables whose values must be supplied by closure + Locals []*Alloc // local variables of this function + Blocks []*BasicBlock // basic blocks of the function; nil => external + Exit *BasicBlock // The function's exit block + AnonFuncs []*Function // anonymous functions directly beneath this one + referrers []Instruction // referring instructions (iff Parent() != nil) + NoReturn NoReturn // Calling this function will always terminate control flow. *functionBody } +type NoReturn uint8 + +const ( + Returns NoReturn = iota + AlwaysExits + AlwaysUnwinds + NeverReturns +) + type functionBody struct { // The following fields are set transiently during building, // then cleared. @@ -518,6 +526,10 @@ type Global struct { // // (For use in indirection wrappers.) // func ir:wrapnilchk(ptr *T, recvType, methodName string) *T // +// // noreturnWasPanic returns true if the previously called +// // function panicked, false if it exited the process. +// func ir:noreturnWasPanic() bool +// // Object() returns a *types.Builtin for built-ins defined by the spec, // nil for others. // diff --git a/internal/passes/buildir/buildir.go b/internal/passes/buildir/buildir.go index 645e216a9..51dfaef53 100644 --- a/internal/passes/buildir/buildir.go +++ b/internal/passes/buildir/buildir.go @@ -20,21 +20,18 @@ import ( "golang.org/x/tools/go/analysis" ) -type willExit struct{} -type willUnwind struct{} - -func (*willExit) AFact() {} -func (*willUnwind) AFact() {} +type noReturn struct { + Kind ir.NoReturn +} -func (*willExit) String() string { return "will exit" } -func (*willUnwind) String() string { return "will unwind" } +func (*noReturn) AFact() {} var Analyzer = &analysis.Analyzer{ Name: "buildir", Doc: "build IR for later passes", Run: run, ResultType: reflect.TypeOf(new(IR)), - FactTypes: []analysis.Fact{new(willExit), new(willUnwind)}, + FactTypes: []analysis.Fact{new(noReturn)}, } // IR provides intermediate representation for all the @@ -72,13 +69,9 @@ func run(pass *analysis.Pass) (interface{}, error) { irpkg := prog.CreatePackage(p, nil, nil, true) for _, fn := range irpkg.Functions { if ast.IsExported(fn.Name()) { - var exit willExit - var unwind willUnwind - if pass.ImportObjectFact(fn.Object(), &exit) { - fn.WillExit = true - } - if pass.ImportObjectFact(fn.Object(), &unwind) { - fn.WillUnwind = true + var noRet noReturn + if pass.ImportObjectFact(fn.Object(), &noRet) { + fn.NoReturn = noRet.Kind } } } @@ -105,11 +98,8 @@ func run(pass *analysis.Pass) (interface{}, error) { } for _, fn := range irpkg.Functions { addAnons(fn) - if fn.WillExit { - pass.ExportObjectFact(fn.Object(), new(willExit)) - } - if fn.WillUnwind { - pass.ExportObjectFact(fn.Object(), new(willUnwind)) + if fn.NoReturn > 0 { + pass.ExportObjectFact(fn.Object(), &noReturn{fn.NoReturn}) } } diff --git a/staticcheck/testdata/src/CheckMaybeNil/CheckMaybeNil.go b/staticcheck/testdata/src/CheckMaybeNil/CheckMaybeNil.go index c44320874..b5fc1cb16 100644 --- a/staticcheck/testdata/src/CheckMaybeNil/CheckMaybeNil.go +++ b/staticcheck/testdata/src/CheckMaybeNil/CheckMaybeNil.go @@ -1,6 +1,9 @@ package pkg -import "os" +import ( + "os" + "syscall" +) func fn1(x *int) { _ = *x // want `possible nil pointer dereference` @@ -106,3 +109,25 @@ func fn11(x *int) { } _ = *x // want `possible nil pointer dereference` } + +func doPanic() { panic("") } +func doExit() { syscall.Exit(1) } + +func fn12(arg bool) { + if arg { + doPanic() + } else { + doExit() + } +} + +func fn13(arg bool) { + fn12(arg) +} + +func fn14(x *int) { + if x == nil { + fn13(true) + } + _ = *x +} From 4342b0b981d840719f78d42faf12755176bc1343 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 15 Nov 2020 09:11:26 +0100 Subject: [PATCH 097/111] staticcheck: don't use AST walking to find node corresponding to IR call instruction Before c14c261fd1fd7dec2e23b44bbf8c7b49cbd4e239, the only way to map from an IR instruction to an AST node was by using position information and walking the AST, usually by using astutil.PathEnclosingInterval. Back then, the position information stored with instructions was designed to map uniquely to AST nodes. For example, in 'fn1().fn2().fn3()`, the call of fn3() would have its position on the dot immediately before fn3. Since the change, however, The start position will be at the beginning of the sequence of expressions. Using that position to find the correct call is wrong and will yield the wrong node. For defer and go statements, we might even accidentally look at the surrounding code. In the best case, this would lead to incorrect positions in our output. In the worst case, it would lead to out of bounds crashes due to a mismatch in argument counts. Note that many IR instructions may map to the same AST node, for example because of method calls of embedded methods. This, however, shouldn't be a problem here, as the AST node should always correspond to the one we're interested in. Closes gh-873 --- staticcheck/lint.go | 21 +++++++++++++------ .../testdata/src/CheckRegexps/CheckRegexps.go | 4 ++++ .../CheckSyncPoolValue/CheckSyncPoolValue.go | 14 +++++++++++++ .../checkStdlibUsageRegexpFindAll.go | 4 ++++ 4 files changed, 37 insertions(+), 6 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index e190051ba..332ba6e4c 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -3014,14 +3014,23 @@ func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, e Parent: site.Parent(), } r(call) - path, _ := goastutil.PathEnclosingInterval(code.File(pass, site), site.Pos(), site.Pos()) + var astcall *ast.CallExpr - for _, el := range path { - if expr, ok := el.(*ast.CallExpr); ok { - astcall = expr - break - } + switch source := site.Source().(type) { + case *ast.CallExpr: + astcall = source + case *ast.DeferStmt: + astcall = source.Call + case *ast.GoStmt: + astcall = source.Call + case nil: + // TODO(dh): I am not sure this can actually happen. If it + // can't, we should remove this case, and also stop + // checking for astcall == nil in the code that follows. + default: + panic(fmt.Sprintf("unhandled case %T", source)) } + for idx, arg := range call.Args { for _, e := range arg.invalids { if astcall != nil { diff --git a/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go b/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go index e5338d0c3..4719c6301 100644 --- a/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go +++ b/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go @@ -40,3 +40,7 @@ func (T) init() {} // this will become a synthetic init function, that we don't want to // ignore var _ = regexp.MustCompile("(") // want `error parsing regexp` + +func fn2() { + regexp.MustCompile("foo(").FindAll(nil, 0) // want `error parsing regexp` +} diff --git a/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go b/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go index 44d8d20b7..afff47379 100644 --- a/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go +++ b/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go @@ -36,3 +36,17 @@ func fn() { var basic int p.Put(basic) // want `argument should be pointer-like` } + +func fn2() { + // https://github.com/dominikh/go-tools/issues/873 + var pool sync.Pool + func() { + defer pool.Put([]byte{}) // want `argument should be pointer-like` + }() +} + +func fn3() { + var pool sync.Pool + defer pool.Put([]byte{}) // want `argument should be pointer-like` + go pool.Put([]byte{}) // want `argument should be pointer-like` +} diff --git a/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go b/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go index 86056e577..cfb7fdac3 100644 --- a/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go +++ b/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go @@ -6,3 +6,7 @@ func fn() { var r *regexp.Regexp _ = r.FindAll(nil, 0) //want `calling a FindAll method with n == 0 will return no results` } + +func fn2() { + regexp.MustCompile("foo(").FindAll(nil, 0) // want `calling a FindAll` +} From b453a5fa9d66c93694354593db1bed0f87388499 Mon Sep 17 00:00:00 2001 From: pkositsyn Date: Sat, 14 Nov 2020 01:48:42 +0300 Subject: [PATCH 098/111] simple: fix negate for GEQ from LEQ to LSS Closes: gh-875 [via git-merge-pr] Closes gh-874 --- simple/lint.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simple/lint.go b/simple/lint.go index 8bff7b08d..ea7248675 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -470,7 +470,7 @@ func negate(expr ast.Expr) ast.Expr { case token.LEQ: out.Op = token.GTR case token.GEQ: - out.Op = token.LEQ + out.Op = token.LSS } return &out case *ast.Ident, *ast.CallExpr, *ast.IndexExpr: From d0bdcc714807a26596953fa2dbf7cab4932b508f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 15 Nov 2020 09:28:59 +0100 Subject: [PATCH 099/111] simple: add test case for correct negation of >= --- simple/testdata/src/if-return/if-return.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/simple/testdata/src/if-return/if-return.go b/simple/testdata/src/if-return/if-return.go index f8cf98f8d..6502c3bca 100644 --- a/simple/testdata/src/if-return/if-return.go +++ b/simple/testdata/src/if-return/if-return.go @@ -93,3 +93,10 @@ func fn13(a, b int) bool { } return true } + +func fn14(a, b int) bool { + if a >= b { // want `should use 'return a < b' instead of 'if a >= b` + return false + } + return true +} From 6054ba0e94b60a7700a3eeb9e041319e172ceb9f Mon Sep 17 00:00:00 2001 From: Sourya Vatsyayan Date: Tue, 3 Nov 2020 02:22:25 +0530 Subject: [PATCH 100/111] staticcheck: fix incorrect range on suggested edit Signed-off-by: sourya Closes: gh-867 [via git-merge-pr] Closes gh-866 --- staticcheck/lint.go | 3 +++ .../CheckMissingEnumTypesInDeclaration.go | 2 +- .../CheckMissingEnumTypesInDeclaration.go.golden | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 332ba6e4c..394f203c4 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -3381,6 +3381,9 @@ func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error for _, spec := range group[1:] { nspec := *spec.(*ast.ValueSpec) nspec.Type = typ + // The position of `spec` node excludes comments (if any). + // However, on generating the source back from the node, the comments are included. Setting `Comment` to nil ensures deduplication of comments. + nspec.Comment = nil edits = append(edits, edit.ReplaceWithNode(pass.Fset, spec, &nspec)) } report.Report(pass, group[0], "only the first constant in this group has an explicit type", report.Fixes(edit.Fix("add type to all constants in group", edits...))) diff --git a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go index bc6f5f0ed..a393be80a 100644 --- a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go +++ b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go @@ -11,7 +11,7 @@ const ( const ( c6 int = 1 // want `only the first constant in this group has an explicit type` - c7 = 2 + c7 = 2 // comment for testing https://github.com/dominikh/go-tools/issues/866 c8 = 3 ) diff --git a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden index 371ccd2e5..3ed508f3b 100644 --- a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden +++ b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go.golden @@ -11,7 +11,7 @@ const ( const ( c6 int = 1 // want `only the first constant in this group has an explicit type` - c7 int = 2 + c7 int = 2 // comment for testing https://github.com/dominikh/go-tools/issues/866 c8 int = 3 ) From e068dc2034e7ef39930d3bdaae516754596e5050 Mon Sep 17 00:00:00 2001 From: Ainar Garipov Date: Wed, 21 Oct 2020 22:52:37 +0300 Subject: [PATCH 101/111] stylecheck: don't flag identifiers with no letters in ST1003 Closes gh-858 Closes: gh-859 [via git-merge-pr] --- stylecheck/names.go | 6 +++++- stylecheck/testdata/src/CheckNames/CheckNames.go | 2 ++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/stylecheck/names.go b/stylecheck/names.go index 594bdf1f4..f038d0632 100644 --- a/stylecheck/names.go +++ b/stylecheck/names.go @@ -31,12 +31,16 @@ func CheckNames(pass *analysis.Pass) (interface{}, error) { // licensed under the BSD 3-clause license. allCaps := func(s string) bool { + hasUppercaseLetters := false for _, r := range s { + if !hasUppercaseLetters && r >= 'A' && r <= 'Z' { + hasUppercaseLetters = true + } if !((r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '_') { return false } } - return true + return hasUppercaseLetters } check := func(id *ast.Ident, thing string, initialisms map[string]bool) { diff --git a/stylecheck/testdata/src/CheckNames/CheckNames.go b/stylecheck/testdata/src/CheckNames/CheckNames.go index a61b5d4a0..d4ffb591b 100644 --- a/stylecheck/testdata/src/CheckNames/CheckNames.go +++ b/stylecheck/testdata/src/CheckNames/CheckNames.go @@ -17,6 +17,8 @@ var Foo_BAR int // want `var Foo_BAR should be FooBAR` var foo_bar int // want `foo_bar should be fooBar` var kFoobar int // not a check we inherited from golint. more false positives than true ones. +var _1000 int // issue 858 + func fn(x []int) { var ( a_b = 1 // want `var a_b should be aB` From 56b7c78ddcd8b1d79be0507087b2b4ea5c74169f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 24 Nov 2020 08:33:30 +0100 Subject: [PATCH 102/111] lintcmd: improve message when patterns matched no packages Updates gh-722 --- lintcmd/cmd.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lintcmd/cmd.go b/lintcmd/cmd.go index 4bda6e62d..603372b91 100644 --- a/lintcmd/cmd.go +++ b/lintcmd/cmd.go @@ -292,7 +292,9 @@ func (l *linter) Lint(cfg *packages.Config, patterns []string) (problems []probl if len(results) == 0 && err == nil { // TODO(dh): emulate Go's behavior more closely once we have // access to go list's Match field. - fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", patterns) + for _, pattern := range patterns { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } } analyzerNames := make([]string, len(l.Checkers)) From bde4814064e47542ef9fe3582a9e5914930ff103 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 25 Nov 2020 03:20:28 +0100 Subject: [PATCH 103/111] SA9006: add missing "Since" field to documentation --- staticcheck/doc.go | 1 + 1 file changed, 1 insertion(+) diff --git a/staticcheck/doc.go b/staticcheck/doc.go index aedd28348..0bc14eb39 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -970,5 +970,6 @@ positives in somewhat exotic but valid bit twiddling tricks: v = v << 32 return i-v }`, + Since: "Unreleased", }, } From 0767310fbbbb033e38429f2061261c6b7dbe7552 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 8 Dec 2020 02:03:37 +0100 Subject: [PATCH 104/111] go/ir: rebuild fake exits after optimizing blocks Block optimization can remove blocks, which means we need to recompute fake exits to be able to build a post-dominator tree. Before this change, building the IR form of the following program caused a panic: package pkg import ( "syscall" ) func fn() { if true { syscall.Exit(1) } else { _ = 0 } var err error if err != nil { return } for { } } Closes gh-882 --- go/ir/func.go | 1 + 1 file changed, 1 insertion(+) diff --git a/go/ir/func.go b/go/ir/func.go index 14ec132bc..dca883d4b 100644 --- a/go/ir/func.go +++ b/go/ir/func.go @@ -557,6 +557,7 @@ func (f *Function) finishBody() { f.Locals = f.Locals[:j] optimizeBlocks(f) + buildFakeExits(f) buildReferrers(f) buildDomTree(f) buildPostDomTree(f) From fd953f579ecb81254b82a79951702a5e211dd314 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 13 Dec 2020 17:17:42 +0100 Subject: [PATCH 105/111] lintcmd/version: add support for having two versions We are switching to using two versioning schemes: our original one and a proper Semantic Versioning compatible one. See #777 for the motivation. --- lintcmd/version/version.go | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/lintcmd/version/version.go b/lintcmd/version/version.go index a12f70fb4..cb12e2121 100644 --- a/lintcmd/version/version.go +++ b/lintcmd/version/version.go @@ -8,29 +8,30 @@ import ( ) const Version = "devel" +const MachineVersion = "devel" // version returns a version descriptor and reports whether the // version is a known release. -func version() (string, bool) { +func version() (human, machine string, known bool) { if Version != "devel" { - return Version, true + return Version, MachineVersion, true } v, ok := buildInfoVersion() if ok { - return v, false + return v, "", false } - return "devel", false + return "devel", "", false } func Print() { - v, release := version() + human, machine, release := version() if release { - fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), v) - } else if v == "devel" { + fmt.Printf("%s %s (%s)\n", filepath.Base(os.Args[0]), human, machine) + } else if human == "devel" { fmt.Printf("%s (no version)\n", filepath.Base(os.Args[0])) } else { - fmt.Printf("%s (devel, %s)\n", filepath.Base(os.Args[0]), v) + fmt.Printf("%s (devel, %s)\n", filepath.Base(os.Args[0]), human) } } From 93fa3e0cacb5fb41d78dcf4c73ddcb93c465b873 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 14 Dec 2020 08:11:52 +0100 Subject: [PATCH 106/111] staticcheck: update Since field of checks that are new in 2020.2 --- staticcheck/doc.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/staticcheck/doc.go b/staticcheck/doc.go index 0bc14eb39..582b0ae18 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -557,7 +557,7 @@ Reflection (https://golang.org/doc/articles/laws_of_reflection.html). This text has been copied from https://golang.org/doc/faq#nil_error, licensed under the Creative Commons Attribution 3.0 License.`, - Since: "Unreleased", + Since: "2020.2", }, "SA5000": { @@ -729,7 +729,7 @@ popular package.`, Often, these functions treat elements in a slice as pairs. For example, strings.NewReplacer takes pairs of old and new strings, and calling it with an odd number of elements would be an error.`, - Since: "Unreleased", + Since: "2020.2", }, "SA6000": { @@ -970,6 +970,6 @@ positives in somewhat exotic but valid bit twiddling tricks: v = v << 32 return i-v }`, - Since: "Unreleased", + Since: "2020.2", }, } From 900aaa96e309e18854b69137fcdfa848528f6a64 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 14 Dec 2020 08:55:31 +0100 Subject: [PATCH 107/111] knowledge: update list of deprecated objects for Go 1.16 --- knowledge/deprecated.go | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/knowledge/deprecated.go b/knowledge/deprecated.go index ffed387c9..365abaf88 100644 --- a/knowledge/deprecated.go +++ b/knowledge/deprecated.go @@ -69,6 +69,13 @@ var StdlibDeprecations = map[string]Deprecation{ "net/http.CloseNotifier": {11, 7}, "net/http.ProtocolError": {8, 8}, "(crypto/x509.CertificateRequest).Attributes": {5, 3}, + + // These functions have no direct alternative, but they are insecure and should no longer be used. + "crypto/x509.IsEncryptedPEMBlock": {16, 0}, + "crypto/x509.DecryptPEMBlock": {16, 0}, + "crypto/x509.EncryptPEMBlock": {16, 0}, + "crypto/dsa": {16, 0}, + // This function has no alternative, but also no purpose. "(*crypto/rc4.Cipher).Reset": {12, 0}, "(net/http/httptest.ResponseRecorder).HeaderMap": {11, 7}, @@ -79,7 +86,12 @@ var StdlibDeprecations = map[string]Deprecation{ "crypto/tls.VersionSSL30": {13, 0}, "(crypto/tls.Config).NameToCertificate": {14, 14}, "(*crypto/tls.Config).BuildNameToCertificate": {14, 14}, - "image/jpeg.Reader": {4, 0}, + "(crypto/tls.Config).SessionTicketKey": {16, 5}, + // No alternative, no use + "(crypto/tls.ConnectionState).NegotiatedProtocolIsMutual": {16, 0}, + // No alternative, but insecure + "(crypto/tls.ConnectionState).TLSUnique": {16, 0}, + "image/jpeg.Reader": {4, 0}, // All of these have been deprecated in favour of external libraries "syscall.AttachLsf": {7, 0}, @@ -116,4 +128,7 @@ var StdlibDeprecations = map[string]Deprecation{ "syscall.InterfaceAnnounceMessage": {7, 0}, "syscall.InterfaceMulticastAddrMessage": {7, 0}, "syscall.FormatMessage": {5, 0}, + + // Not marked as deprecated with a recognizable header, but deprecated nonetheless. + "io/ioutil": {16, 16}, } From c13dc1b8b68765fbd26395f96f7e3abfd2c0cdbf Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 14 Dec 2020 10:11:54 +0100 Subject: [PATCH 108/111] staticcheck: more tailored deprecation diagnostics Print different diagnostics depending on the precise combination of "deprecated since" and "alternative available since". --- knowledge/deprecated.go | 148 +++++++++--------- staticcheck/lint.go | 52 ++++-- staticcheck/lint_test.go | 7 +- .../CheckDeprecated_go13/CheckDeprecated.go | 14 ++ .../CheckDeprecated_go14/CheckDeprecated.go | 3 - .../CheckDeprecated_go18/CheckDeprecated.go | 4 +- 6 files changed, 142 insertions(+), 86 deletions(-) create mode 100644 staticcheck/testdata/src/CheckDeprecated_go13/CheckDeprecated.go diff --git a/knowledge/deprecated.go b/knowledge/deprecated.go index 365abaf88..439ab603d 100644 --- a/knowledge/deprecated.go +++ b/knowledge/deprecated.go @@ -1,5 +1,10 @@ package knowledge +const ( + DeprecatedNeverUse = -1 + DeprecatedUseNoLonger = -2 +) + type Deprecation struct { DeprecatedSince int AlternativeAvailableSince int @@ -8,90 +13,91 @@ type Deprecation struct { var StdlibDeprecations = map[string]Deprecation{ // FIXME(dh): AllowBinary isn't being detected as deprecated // because the comment has a newline right after "Deprecated:" - "go/build.AllowBinary": {7, 7}, - "(archive/zip.FileHeader).CompressedSize": {1, 1}, - "(archive/zip.FileHeader).UncompressedSize": {1, 1}, - "(archive/zip.FileHeader).ModifiedTime": {10, 10}, - "(archive/zip.FileHeader).ModifiedDate": {10, 10}, - "(*archive/zip.FileHeader).ModTime": {10, 10}, - "(*archive/zip.FileHeader).SetModTime": {10, 10}, - "(go/doc.Package).Bugs": {1, 1}, - "os.SEEK_SET": {7, 7}, - "os.SEEK_CUR": {7, 7}, - "os.SEEK_END": {7, 7}, - "(net.Dialer).Cancel": {7, 7}, - "runtime.CPUProfile": {9, 0}, - "compress/flate.ReadError": {6, 6}, - "compress/flate.WriteError": {6, 6}, - "path/filepath.HasPrefix": {0, 0}, - "(net/http.Transport).Dial": {7, 7}, - "(*net/http.Transport).CancelRequest": {6, 5}, - "net/http.ErrWriteAfterFlush": {7, 0}, - "net/http.ErrHeaderTooLong": {8, 0}, - "net/http.ErrShortBody": {8, 0}, - "net/http.ErrMissingContentLength": {8, 0}, - "net/http/httputil.ErrPersistEOF": {0, 0}, - "net/http/httputil.ErrClosed": {0, 0}, - "net/http/httputil.ErrPipeline": {0, 0}, - "net/http/httputil.ServerConn": {0, 0}, - "net/http/httputil.NewServerConn": {0, 0}, - "net/http/httputil.ClientConn": {0, 0}, - "net/http/httputil.NewClientConn": {0, 0}, - "net/http/httputil.NewProxyClientConn": {0, 0}, - "(net/http.Request).Cancel": {7, 7}, - "(text/template/parse.PipeNode).Line": {1, 1}, - "(text/template/parse.ActionNode).Line": {1, 1}, - "(text/template/parse.BranchNode).Line": {1, 1}, - "(text/template/parse.TemplateNode).Line": {1, 1}, - "database/sql/driver.ColumnConverter": {9, 9}, - "database/sql/driver.Execer": {8, 8}, - "database/sql/driver.Queryer": {8, 8}, - "(database/sql/driver.Conn).Begin": {8, 8}, - "(database/sql/driver.Stmt).Exec": {8, 8}, - "(database/sql/driver.Stmt).Query": {8, 8}, - "syscall.StringByteSlice": {1, 1}, - "syscall.StringBytePtr": {1, 1}, - "syscall.StringSlicePtr": {1, 1}, - "syscall.StringToUTF16": {1, 1}, - "syscall.StringToUTF16Ptr": {1, 1}, - "(*regexp.Regexp).Copy": {12, 12}, - "(archive/tar.Header).Xattrs": {10, 10}, - "archive/tar.TypeRegA": {11, 1}, - "go/types.NewInterface": {11, 11}, - "(*go/types.Interface).Embedded": {11, 11}, - "go/importer.For": {12, 12}, - "encoding/json.InvalidUTF8Error": {2, 2}, - "encoding/json.UnmarshalFieldError": {2, 2}, - "encoding/csv.ErrTrailingComma": {2, 2}, - "(encoding/csv.Reader).TrailingComma": {2, 2}, - "(net.Dialer).DualStack": {12, 12}, - "net/http.ErrUnexpectedTrailer": {12, 12}, - "net/http.CloseNotifier": {11, 7}, - "net/http.ProtocolError": {8, 8}, + "go/build.AllowBinary": {7, 7}, + "(archive/zip.FileHeader).CompressedSize": {1, 1}, + "(archive/zip.FileHeader).UncompressedSize": {1, 1}, + "(archive/zip.FileHeader).ModifiedTime": {10, 10}, + "(archive/zip.FileHeader).ModifiedDate": {10, 10}, + "(*archive/zip.FileHeader).ModTime": {10, 10}, + "(*archive/zip.FileHeader).SetModTime": {10, 10}, + "(go/doc.Package).Bugs": {1, 1}, + "os.SEEK_SET": {7, 7}, + "os.SEEK_CUR": {7, 7}, + "os.SEEK_END": {7, 7}, + "(net.Dialer).Cancel": {7, 7}, + "runtime.CPUProfile": {9, 0}, + "compress/flate.ReadError": {6, DeprecatedUseNoLonger}, + "compress/flate.WriteError": {6, DeprecatedUseNoLonger}, + "path/filepath.HasPrefix": {0, DeprecatedNeverUse}, + "(net/http.Transport).Dial": {7, 7}, + "(*net/http.Transport).CancelRequest": {6, 5}, + "net/http.ErrWriteAfterFlush": {7, DeprecatedUseNoLonger}, + "net/http.ErrHeaderTooLong": {8, DeprecatedUseNoLonger}, + "net/http.ErrShortBody": {8, DeprecatedUseNoLonger}, + "net/http.ErrMissingContentLength": {8, DeprecatedUseNoLonger}, + "net/http/httputil.ErrPersistEOF": {0, DeprecatedUseNoLonger}, + "net/http/httputil.ErrClosed": {0, DeprecatedUseNoLonger}, + "net/http/httputil.ErrPipeline": {0, DeprecatedUseNoLonger}, + "net/http/httputil.ServerConn": {0, 0}, + "net/http/httputil.NewServerConn": {0, 0}, + "net/http/httputil.ClientConn": {0, 0}, + "net/http/httputil.NewClientConn": {0, 0}, + "net/http/httputil.NewProxyClientConn": {0, 0}, + "(net/http.Request).Cancel": {7, 7}, + "(text/template/parse.PipeNode).Line": {1, DeprecatedUseNoLonger}, + "(text/template/parse.ActionNode).Line": {1, DeprecatedUseNoLonger}, + "(text/template/parse.BranchNode).Line": {1, DeprecatedUseNoLonger}, + "(text/template/parse.TemplateNode).Line": {1, DeprecatedUseNoLonger}, + "database/sql/driver.ColumnConverter": {9, 9}, + "database/sql/driver.Execer": {8, 8}, + "database/sql/driver.Queryer": {8, 8}, + "(database/sql/driver.Conn).Begin": {8, 8}, + "(database/sql/driver.Stmt).Exec": {8, 8}, + "(database/sql/driver.Stmt).Query": {8, 8}, + "syscall.StringByteSlice": {1, 1}, + "syscall.StringBytePtr": {1, 1}, + "syscall.StringSlicePtr": {1, 1}, + "syscall.StringToUTF16": {1, 1}, + "syscall.StringToUTF16Ptr": {1, 1}, + "(*regexp.Regexp).Copy": {12, DeprecatedUseNoLonger}, + "(archive/tar.Header).Xattrs": {10, 10}, + "archive/tar.TypeRegA": {11, 1}, + "go/types.NewInterface": {11, 11}, + "(*go/types.Interface).Embedded": {11, 11}, + "go/importer.For": {12, 12}, + "encoding/json.InvalidUTF8Error": {2, DeprecatedUseNoLonger}, + "encoding/json.UnmarshalFieldError": {2, DeprecatedUseNoLonger}, + "encoding/csv.ErrTrailingComma": {2, DeprecatedUseNoLonger}, + "(encoding/csv.Reader).TrailingComma": {2, DeprecatedUseNoLonger}, + "(net.Dialer).DualStack": {12, 12}, + "net/http.ErrUnexpectedTrailer": {12, DeprecatedUseNoLonger}, + "net/http.CloseNotifier": {11, 7}, + // This is hairy. The notice says "Not all errors in the http package related to protocol errors are of type ProtocolError", but doesn't that imply that + "net/http.ProtocolError": {8, DeprecatedUseNoLonger}, "(crypto/x509.CertificateRequest).Attributes": {5, 3}, // These functions have no direct alternative, but they are insecure and should no longer be used. - "crypto/x509.IsEncryptedPEMBlock": {16, 0}, - "crypto/x509.DecryptPEMBlock": {16, 0}, - "crypto/x509.EncryptPEMBlock": {16, 0}, - "crypto/dsa": {16, 0}, + "crypto/x509.IsEncryptedPEMBlock": {16, DeprecatedNeverUse}, + "crypto/x509.DecryptPEMBlock": {16, DeprecatedNeverUse}, + "crypto/x509.EncryptPEMBlock": {16, DeprecatedNeverUse}, + "crypto/dsa": {16, DeprecatedNeverUse}, // This function has no alternative, but also no purpose. - "(*crypto/rc4.Cipher).Reset": {12, 0}, + "(*crypto/rc4.Cipher).Reset": {12, DeprecatedNeverUse}, "(net/http/httptest.ResponseRecorder).HeaderMap": {11, 7}, "image.ZP": {13, 0}, "image.ZR": {13, 0}, "(*debug/gosym.LineTable).LineToPC": {2, 2}, "(*debug/gosym.LineTable).PCToLine": {2, 2}, - "crypto/tls.VersionSSL30": {13, 0}, - "(crypto/tls.Config).NameToCertificate": {14, 14}, - "(*crypto/tls.Config).BuildNameToCertificate": {14, 14}, + "crypto/tls.VersionSSL30": {13, DeprecatedNeverUse}, + "(crypto/tls.Config).NameToCertificate": {14, DeprecatedUseNoLonger}, + "(*crypto/tls.Config).BuildNameToCertificate": {14, DeprecatedUseNoLonger}, "(crypto/tls.Config).SessionTicketKey": {16, 5}, // No alternative, no use - "(crypto/tls.ConnectionState).NegotiatedProtocolIsMutual": {16, 0}, + "(crypto/tls.ConnectionState).NegotiatedProtocolIsMutual": {16, DeprecatedNeverUse}, // No alternative, but insecure - "(crypto/tls.ConnectionState).TLSUnique": {16, 0}, - "image/jpeg.Reader": {4, 0}, + "(crypto/tls.ConnectionState).TLSUnique": {16, DeprecatedNeverUse}, + "image/jpeg.Reader": {4, DeprecatedNeverUse}, // All of these have been deprecated in favour of external libraries "syscall.AttachLsf": {7, 0}, diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 394f203c4..fd43bd790 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -2929,14 +2929,37 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { return true } if depr, ok := deprs.Objects[obj]; ok { - // Look for the first available alternative, not the first - // version something was deprecated in. If a function was - // deprecated in Go 1.6, an alternative has been available - // already in 1.0, and we're targeting 1.2, it still - // makes sense to use the alternative from 1.0, to be - // future-proof. - minVersion := knowledge.StdlibDeprecations[code.SelectorName(pass, sel)].AlternativeAvailableSince - if !code.IsGoVersion(pass, minVersion) { + std, ok := knowledge.StdlibDeprecations[code.SelectorName(pass, sel)] + if ok { + switch std.AlternativeAvailableSince { + case knowledge.DeprecatedNeverUse: + // This should never be used, regardless of the + // targeted Go version. Examples include insecure + // cryptography or inherently broken APIs. + // + // We always want to flag these. + case knowledge.DeprecatedUseNoLonger: + // This should no longer be used. Using it with + // older Go versions might still make sense. + if !code.IsGoVersion(pass, std.DeprecatedSince) { + return true + } + default: + if std.AlternativeAvailableSince < 0 { + panic(fmt.Sprintf("unhandled case %d", std.AlternativeAvailableSince)) + } + // Look for the first available alternative, not the first + // version something was deprecated in. If a function was + // deprecated in Go 1.6, an alternative has been available + // already in 1.0, and we're targeting 1.2, it still + // makes sense to use the alternative from 1.0, to be + // future-proof. + if !code.IsGoVersion(pass, std.AlternativeAvailableSince) { + return true + } + } + } + if ok && !code.IsGoVersion(pass, std.AlternativeAvailableSince) { return true } @@ -2947,7 +2970,18 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { return true } } - report.Report(pass, sel, fmt.Sprintf("%s is deprecated: %s", report.Render(pass, sel), depr.Msg)) + + if ok { + if std.AlternativeAvailableSince == knowledge.DeprecatedNeverUse { + report.Report(pass, sel, fmt.Sprintf("%s has been deprecated since Go 1.%d because it shouldn't be used: %s", report.Render(pass, sel), std.DeprecatedSince, depr.Msg)) + } else if std.AlternativeAvailableSince == std.DeprecatedSince || std.AlternativeAvailableSince == knowledge.DeprecatedUseNoLonger { + report.Report(pass, sel, fmt.Sprintf("%s has been deprecated since Go 1.%d: %s", report.Render(pass, sel), std.DeprecatedSince, depr.Msg)) + } else { + report.Report(pass, sel, fmt.Sprintf("%s has been deprecated since Go 1.%d and an alternative has been available since Go 1.%d: %s", report.Render(pass, sel), std.DeprecatedSince, std.AlternativeAvailableSince, depr.Msg)) + } + } else { + report.Report(pass, sel, fmt.Sprintf("%s is deprecated: %s", report.Render(pass, sel), depr.Msg)) + } return true } return true diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index b351a9a14..0aefdb04b 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -26,7 +26,12 @@ func TestAll(t *testing.T) { "SA1016": {{Dir: "CheckUntrappableSignal"}}, "SA1017": {{Dir: "CheckUnbufferedSignalChan"}}, "SA1018": {{Dir: "CheckStringsReplaceZero"}}, - "SA1019": {{Dir: "CheckDeprecated"}, {Dir: "CheckDeprecated_go14", Version: "1.4"}, {Dir: "CheckDeprecated_go18", Version: "1.8"}}, + "SA1019": { + {Dir: "CheckDeprecated"}, + {Dir: "CheckDeprecated_go13", Version: "1.3"}, + {Dir: "CheckDeprecated_go14", Version: "1.4"}, + {Dir: "CheckDeprecated_go18", Version: "1.8"}, + }, "SA1020": {{Dir: "CheckListenAddress"}}, "SA1021": {{Dir: "CheckBytesEqualIP"}}, "SA1023": {{Dir: "CheckWriterBufferModified"}}, diff --git a/staticcheck/testdata/src/CheckDeprecated_go13/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go13/CheckDeprecated.go new file mode 100644 index 000000000..4558ef1f9 --- /dev/null +++ b/staticcheck/testdata/src/CheckDeprecated_go13/CheckDeprecated.go @@ -0,0 +1,14 @@ +package pkg + +import ( + "crypto/x509" + "net/http/httputil" + "path/filepath" +) + +func fn() { + filepath.HasPrefix("", "") // want `filepath.HasPrefix has been deprecated since Go 1.0 because it shouldn't be used:` + _ = httputil.ErrPersistEOF // want `httputil.ErrPersistEOF has been deprecated since Go 1.0:` + _ = httputil.ServerConn{} // want `httputil.ServerConn has been deprecated since Go 1.0:` + _ = x509.CertificateRequest{}.Attributes // want `x509.CertificateRequest{}.Attributes has been deprecated since Go 1.5 and an alternative has been available since Go 1.3:` +} diff --git a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go index 21aa784e2..e8473dd8b 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go @@ -15,9 +15,6 @@ func fn1(err error) { _ = r.Cancel // want `If a Request's Cancel field and context are both` _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` _ = os.SEEK_SET - if err == http.ErrWriteAfterFlush { // want `ErrWriteAfterFlush is no longer` - println() - } var _ flate.ReadError var tr *http.Transport diff --git a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go index e67a9b1f7..f5dedb9da 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go @@ -21,10 +21,10 @@ func fn1(err error) { var _ flate.ReadError // want `No longer returned` var tr *http.Transport - tr.CancelRequest(nil) // want `CancelRequest is deprecated` + tr.CancelRequest(nil) // want `CancelRequest has been deprecated` var conn driver.Conn - conn.Begin() // want `Begin is deprecated` + conn.Begin() // want `Begin has been deprecated` } // Deprecated: Don't use this. From 911c7885a6d3cb88d8246c03de1ef070b49ccc7e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 14 Dec 2020 16:55:34 +0100 Subject: [PATCH 109/111] Tweaks to our benchmarking script --- _benchmarks/bench.sh | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/_benchmarks/bench.sh b/_benchmarks/bench.sh index 5f3c9c024..89d8dc6c8 100755 --- a/_benchmarks/bench.sh +++ b/_benchmarks/bench.sh @@ -3,20 +3,21 @@ set -e declare -A PKGS=( ["strconv"]="strconv" + ["net/http"]="net/http" + ["image/color"]="image/color" ["std"]="std" ["k8s"]="k8s.io/kubernetes/pkg/..." ) -MIN_CORES=1 -MAX_CORES=16 -MIN_GOGC=10 +MIN_CORES=32 +MAX_CORES=32 +INCR_CORES=2 +MIN_GOGC=100 MAX_GOGC=100 -SAMPLES=5 +SAMPLES=10 WIPE_CACHE=1 -FORMAT=csv +FORMAT=bench BIN=$(realpath ./silent-staticcheck.sh) -SMT=1 - runBenchmark() { local pkg="$1" @@ -29,14 +30,7 @@ runBenchmark() { rm -rf ~/.cache/staticcheck fi - local procs - if [ $SMT -ne 0 ]; then - procs=$((cores*2)) - else - procs=$cores - fi - - local out=$(GOGC=$gc env time -f "%e %M" taskset -c 0-$((procs-1)) $BIN $pkg 2>&1) + local out=$(GOGC=$gc GOMAXPROCS=$cores env time -f "%e %M" $BIN $pkg 2>&1) local t=$(echo "$out" | cut -f1 -d" ") local m=$(echo "$out" | cut -f2 -d" ") local ns=$(printf "%s 1000000000 * p" $t | dc) @@ -44,25 +38,24 @@ runBenchmark() { case $FORMAT in bench) - printf "BenchmarkStaticcheck-%s-GOGC%d-wiped%d-%d 1 %.0f ns/op %.0f B/op\n" "$label" "$gc" "$wipe" "$procs" "$ns" "$b" + printf "BenchmarkStaticcheck-%s-GOGC%d-wiped%d-%d 1 %.0f ns/op %.0f B/op\n" "$label" "$gc" "$wipe" "$cores" "$ns" "$b" ;; csv) - printf "%s,%d,%d,%d,%.0f,%.0f\n" "$label" "$gc" "$procs" "$wipe" "$ns" "$b" + printf "%s,%d,%d,%d,%.0f,%.0f\n" "$label" "$gc" "$cores" "$wipe" "$ns" "$b" ;; esac } -go build ../cmd/staticcheck export GO111MODULE=off if [ "$FORMAT" = "csv" ]; then - printf "packages,gogc,procs,wipe-cache,time,memory\n" + printf "packages,gogc,gomaxprocs,wipe-cache,time,memory\n" fi for label in "${!PKGS[@]}"; do pkg=${PKGS[$label]} for gc in $(seq $MIN_GOGC 10 $MAX_GOGC); do - for cores in $(seq $MIN_CORES $MAX_CORES); do + for cores in $(seq $MIN_CORES $INCR_CORES $MAX_CORES); do for i in $(seq 1 $SAMPLES); do runBenchmark "$pkg" "$label" "$gc" "$cores" 1 runBenchmark "$pkg" "$label" "$gc" "$cores" 0 From ead01d5c4d42534f47be66499c157a33ed05ee98 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 14 Dec 2020 16:56:18 +0100 Subject: [PATCH 110/111] doc: add 2020.2 release notes --- doc/2020.2.html | 562 +++++++++++++++++++++++++++++++++++++++++++ doc/staticcheck.html | 4 +- 2 files changed, 564 insertions(+), 2 deletions(-) create mode 100644 doc/2020.2.html diff --git a/doc/2020.2.html b/doc/2020.2.html new file mode 100644 index 000000000..72955dae0 --- /dev/null +++ b/doc/2020.2.html @@ -0,0 +1,562 @@ + + +

    Performance improvements

    + +

    + The primary focus of this release is a major improvement in performance, significantly reducing memory usage while also reducing runtimes. +

    + +
    + Benchmarks comparing the previous and current releases of Staticcheck + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Uncached, GOMAXPROCS=1
    Package2020.1.62020.2DeltaStats
    image/color 2.41s ±19% 2.00s ±14% -17.08%p=0.000, n=10+10
    k8s.io/kubernetes/pkg/... 276s ± 1% 219s ± 1% -20.62%p=0.000, n=10+10
    net/http 6.18s ± 1% 5.61s ± 5% -9.21%p=0.000, n=8+10
    std 49.5s ± 1% 42.5s ± 1% -14.04%p=0.000, n=9+10
    strconv 2.49s ± 9% 2.19s ±12% -12.08%p=0.001, n=10+10
    image/color 167MB ±26% 146MB ±19% -12.62%p=0.043, n=10+10
    k8s.io/kubernetes/pkg/... 2.14GB ± 1% 0.45GB ±13% -79.09%p=0.000, n=10+10
    net/http 216MB ± 6% 166MB ±18% -23.11%p=0.000, n=10+10
    std 972MB ± 3% 284MB ± 9% -70.82%p=0.000, n=8+10
    strconv 155MB ±21% 139MB ±29% ~p=0.063, n=10+10
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Cached, GOMAXPROCS=1
    Package2020.1.62020.2DeltaStats
    image/color 160ms ± 0% 107ms ± 7% -33.13%p=0.000, n=8+10
    k8s.io/kubernetes/pkg/... 12.7s ± 1% 6.9s ± 1% -45.26%p=0.000, n=9+10
    net/http 370ms ± 0% 230ms ± 0% -37.84%p=0.000, n=8+8
    std 2.52s ± 1% 1.31s ± 1% -48.13%p=0.000, n=10+9
    strconv 164ms ± 4% 110ms ± 0% -32.93%p=0.000, n=10+10
    image/color 38.6MB ± 4% 20.8MB ± 1% -45.96%p=0.000, n=9+10
    k8s.io/kubernetes/pkg/... 863MB ± 4% 283MB ± 2% -67.28%p=0.000, n=10+10
    net/http 70.5MB ± 5% 25.8MB ± 2% -63.48%p=0.000, n=10+9
    std 243MB ±16% 73MB ± 8% -70.00%p=0.000, n=10+10
    strconv 37.2MB ± 2% 21.3MB ± 1% -42.76%p=0.000, n=9+10
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Uncached, GOMAXPROCS=32
    Package2020.1.62020.2DeltaStats
    image/color 1.19s ±21% 1.06s ±12% ~p=0.115, n=10+8
    k8s.io/kubernetes/pkg/... 27.0s ± 2% 22.4s ± 2% -16.96%p=0.000, n=10+10
    net/http 2.24s ±11% 2.23s ±10% ~p=0.870, n=10+10
    std 7.14s ± 5% 5.10s ± 9% -28.56%p=0.000, n=10+9
    strconv 1.24s ±26% 1.18s ±21% ~p=0.753, n=10+10
    image/color 143MB ± 7% 141MB ± 6% ~p=0.515, n=8+10
    k8s.io/kubernetes/pkg/... 5.77GB ± 6% 2.76GB ± 4% -52.25%p=0.000, n=10+10
    net/http 284MB ±10% 226MB ±14% -20.38%p=0.000, n=10+10
    std 1.74GB ±10% 1.15GB ±14% -34.11%p=0.000, n=10+10
    strconv 148MB ±18% 144MB ±16% ~p=0.579, n=10+10
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    Cached, GOMAXPROCS=32
    Package2020.1.62020.2DeltaStats
    image/color 96.0ms ± 6% 80.0ms ± 0% -16.67%p=0.000, n=10+9
    k8s.io/kubernetes/pkg/... 4.64s ± 1% 3.88s ± 0% -16.22%p=0.000, n=9+8
    net/http 216ms ± 3% 167ms ± 4% -22.69%p=0.000, n=10+10
    std 1.09s ± 2% 0.96s ± 2% -12.20%p=0.000, n=10+10
    strconv 100ms ± 0% 87ms ± 8% -13.00%p=0.000, n=9+10
    image/color 46.4MB ± 3% 24.1MB ± 5% -48.08%p=0.000, n=8+10
    k8s.io/kubernetes/pkg/... 1.38GB ± 9% 0.27GB ± 1% -80.29%p=0.000, n=10+10
    net/http 80.7MB ±12% 31.4MB ± 2% -61.16%p=0.000, n=10+8
    std 363MB ±12% 75MB ± 7% -79.30%p=0.000, n=10+10
    strconv 48.5MB ± 6% 24.4MB ± 3% -49.72%p=0.000, n=10+10
    +
    + +

    + See commit 5cfc85b70e7b778eb76fd7338e538d7c9af21e4e + for details on how these improvements have been achieved. +

    + +

    + Furthermore, Staticcheck 2020.2 will skip very large packages (currently packages that are 50 MiB or larger), + under the assumption that these packages contain bundled assets and aren't worth analyzing. + This might further reduce Staticcheck's memory usage in your projects. +

    + +

    Changes to the detection of unused code

    + +

    Removal of whole-program mode and changes to the handling of exported identifiers

    +

    + The aforementioned performance improvements necessitate some changes to the U1000 check (also known as unused). +

    + +

    + The most visible change is the removal of the whole program mode. + This mode, which analyzed an entire program and reported unused code even if it is exported, + did not work well with the kind of caching that we use in Staticcheck. + Even in previous versions, it didn't always work correctly and may have caused flaky results, + depending on the state of the cache and the order of staticcheck invocations. +

    + +

    + The whole-program mode may be revived in the future as a standalone tool, + with the understanding that this mode of operation is inherently more expensive than staticcheck. + In the meantime, if you depend on this functionality and can tolerate its bugs, you should continue using Staticcheck 2020.1. +

    + +

    + As part of improving the correctness of U1000, changes were made to the normal mode as well. + In particular, all exported package-level identifiers will be considered used from now on, + even if these identifiers are declared in package main or tests, even if they are otherwise unused. + Exported identifiers in package main can be used in ways invisible to us, for example via the plugin build mode. + For tests, we would run into the same kind of issues as we did with the whole program mode. +

    + +

    Improvements

    + +

    + The //lint:ignore directive now works more intelligently with the U1000 check. + In previous versions, the directive would only suppress the output of a diagnostic. For example, for the following example +

    + +
    package pkg
    +
    +//lint:ignore U1000 This is fine.
    +func fn1() { fn2() }
    +
    +func fn2() {}
    + +

    + Staticcheck would emit the following output: +

    + +
    foo.go:6:6: func fn2 is unused (U1000)
    + +

    + as it would only suppress the diagnostic for fn1. +

    + +

    + Beginning with this release, the directive instead actively marks the identifier as used, + which means that any transitively used code will also be considered used, and no diagnostic will be reported for fn2. + Similarly, the //lint:file-ignore directive will consider everything in a file used, which may transitively mark code in other files used, too. +

    + +

    UI improvements

    + +We've made some minor improvements to the output and behavior of the staticcheck command: + +
      +
    • the command now prints instructions on how to look up documentation for checks
    • +
    • output of the -explain flag includes a link to the online documentation
    • + +
    • a warning is emitted when a package pattern matches no packages
    • +
    • unmatched ignore directives cause staticcheck to exit with a non-zero status code
    • +
    + +

    Changes to versioning scheme

    + +

    + Staticcheck releases have two version numbers: one meant for human consumption and one meant for consumption by machines, via Go modules. + For example, the previous release was both 2020.1.6 (for humans) and v0.0.1-2020.1.6 (for machines). +

    + +

    + In previous releases, we've tried to include the human version in the machine version, by using the v0.0.1-<human version> scheme. + However, this scheme had various drawbacks. + For this and future releases we've switched to a more standard scheme for machine versions: v0.<minor>.<patch>. + Minor will increase by one for every feature release of Staticcheck, + and patch will increase by one for every bugfix release of Staticcheck, + resetting to zero on feature releases. +

    + +

    + For example, this release is both 2020.2 and v0.1.0. + A hypothetical 2020.2.1 would be v0.1.1, and 2021.1 will be v0.2.0. + This new versioning scheme fixes various issues when trying to use Staticcheck as a Go module. + It will also allow us to make true pre-releases in the future. +

    + +

    + Documentation on the website, as well as the output of staticcheck -version, will include both version numbers, to make it easier to associate the two. +

    + +

    + For detailed information on how we arrived at this decision, see the discussion on issue 777. +

    + +

    Checks

    +

    New checks

    + +

    + The following new checks have been added: +

    + +
      +
    • {{ check "SA4023" }} flags impossible comparisons of interface values with untyped nils
    • +
    • {{ check "SA5012" }} flags function calls with slice arguments that aren't the right length
    • +
    • {{ check "SA9006" }} flags dubious bit shifts of fixed size integers
    • +
    + +

    Changed checks

    + +

    + Several checks have been improved: +

    + +
      +
    • {{ check "S1030" }} no longer recommends replacing m[string(buf.Bytes())] with m[buf.String()], as the former gets optimized by the compiler
    • +
    • {{ check "S1008" }} no longer incorrectly suggests that the negation of >= is <=
    • +
    • {{ check "S1029" }} and {{ check "SA6003" }} now also check custom types with underlying type string
    • +
    • {{ check "SA1019" }} now recognizes deprecation notices that aren't in the last paragraph of a comment
    • +
    • {{ check "SA1019" }} now emits more precise diagnostics for deprecated code in the standard library
    • +
    • {{ check "SA4006" }} no longer flags assignments where the value is a typed nil
    • +
    • {{ check "SA5011" }} is now able to detect more functions that never return, thus reducing the number of false positives
    • +
    • {{ check "SA9004" }} no longer assumes that constants belong to the same group when they have different types
    • +
    • Automatic fixes for {{ check "SA9004" }} inside gopls no longer incorrectly duplicate comments
    • +
    • {{ check "ST1003" }} no longer complains about ALL_CAPS in variable names that don't contain any letters
    • +
    • Incorrect position information in various checks have been fixed
    • +
    • Crashes in various checks have been fixed
    • +
    diff --git a/doc/staticcheck.html b/doc/staticcheck.html index 81f28f52b..15d217fa6 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -22,12 +22,12 @@

    Installation

    If you use Go modules, you can simply run go get honnef.co/go/tools/cmd/staticcheck to obtain the latest released version. If you're still using a GOPATH-based workflow, then the above command will instead fetch the master branch. - It is suggested that you explicitly check out the latest release tag instead, which is currently 2020.1.6. + It is suggested that you explicitly check out the latest release tag instead, which is currently 2020.2. One way of doing so would be as follows:

    cd $GOPATH/src/honnef.co/go/tools/cmd/staticcheck
    -git checkout 2020.1.6
    +git checkout 2020.2
     go get
     go install
     
    From c5b83c7d52ec0e1e12b8e776612955ef710247fb Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 14 Dec 2020 17:01:15 +0100 Subject: [PATCH 111/111] Version 2020.2 (v0.1.0) --- lintcmd/version/version.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lintcmd/version/version.go b/lintcmd/version/version.go index cb12e2121..0acde9ea0 100644 --- a/lintcmd/version/version.go +++ b/lintcmd/version/version.go @@ -7,8 +7,8 @@ import ( "runtime" ) -const Version = "devel" -const MachineVersion = "devel" +const Version = "2020.2" +const MachineVersion = "v0.1.0" // version returns a version descriptor and reports whether the // version is a known release.