From 17562b81b08373b3b11be787d2cf278102bce55d Mon Sep 17 00:00:00 2001 From: Alexey Surikov Date: Tue, 20 Nov 2018 10:36:51 +0100 Subject: [PATCH 001/254] staticcheck: new check for inefficient string comparisons with strings.ToLower/strings.ToUpper Closes gh-368 Closes gh-366 --- staticcheck/lint.go | 47 +++++++++++++++++++ .../CheckToLowerToUpperComparison.go | 43 +++++++++++++++++ 2 files changed, 90 insertions(+) create mode 100644 staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 2cd04ae80..be0f8b682 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -286,6 +286,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA6002", FilterGenerated: false, Fn: c.callChecker(checkSyncPoolValueRules)}, {ID: "SA6003", FilterGenerated: false, Fn: c.CheckRangeStringRunes}, // {ID: "SA6004", FilterGenerated: false, Fn: c.CheckSillyRegexp}, + {ID: "SA6005", FilterGenerated: false, Fn: c.CheckToLowerToUpperComparison}, {ID: "SA9001", FilterGenerated: false, Fn: c.CheckDubiousDeferInChannelRangeLoop}, {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode}, @@ -2816,3 +2817,49 @@ func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { } } } + +func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { + fn := func(node ast.Node) bool { + binExpr, ok := node.(*ast.BinaryExpr) + if !ok { + return true + } + + var negative bool + switch binExpr.Op { + case token.EQL: + negative = false + case token.NEQ: + negative = true + default: + return true + } + + const ( + lo = "strings.ToLower" + up = "strings.ToUpper" + ) + var call string + + if IsCallToAST(j, binExpr.X, lo) && IsCallToAST(j, binExpr.Y, lo) { + call = lo + } else if IsCallToAST(j, binExpr.X, up) && IsCallToAST(j, binExpr.Y, up) { + call = up + } else { + return true + } + + bang := "" + if negative { + bang = "!" + } + + j.Errorf(binExpr, "%s(a) %s %s(b) is better written as %sstrings.EqualFold(a, b)", call, binExpr.Op, call, bang) + + return true + } + + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go new file mode 100644 index 000000000..c86fbaf76 --- /dev/null +++ b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go @@ -0,0 +1,43 @@ +package pkg + +import "strings" + +func fn() { + const ( + s1 = "foo" + s2 = "bar" + ) + + if strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "strings.ToLower(a) == strings.ToLower(b) is better written as strings.EqualFold(a, b)" + panic("") + } + + if strings.ToUpper(s1) == strings.ToUpper(s2) { // MATCH "strings.ToUpper(a) == strings.ToUpper(b) is better written as strings.EqualFold(a, b)" + panic("") + } + + if strings.ToLower(s1) != strings.ToLower(s2) { // MATCH "strings.ToLower(a) != strings.ToLower(b) is better written as !strings.EqualFold(a, b)" + panic("") + } + + switch strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "strings.ToLower(a) == strings.ToLower(b) is better written as strings.EqualFold(a, b)" + case true, false: + panic("") + } + + if strings.ToLower(s1) == strings.ToLower(s2) || s1+s2 == s2+s1 { // MATCH "strings.ToLower(a) == strings.ToLower(b) is better written as strings.EqualFold(a, b)" { + panic("") + } + + if strings.ToLower(s1) > strings.ToLower(s2) { + panic("") + } + + if strings.ToLower(s1) < strings.ToLower(s2) { + panic("") + } + + if strings.ToLower(s1) == strings.ToUpper(s2) { + panic("") + } +} From fe93b0e3b36bbb31b17360949cb67af25639dd0b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 2 Jan 2019 08:49:36 +0100 Subject: [PATCH 002/254] staticcheck: wording change for SA6005 --- staticcheck/lint.go | 5 ++--- .../CheckToLowerToUpperComparison.go | 10 +++++----- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index be0f8b682..4f90bc460 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -2839,8 +2839,8 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { lo = "strings.ToLower" up = "strings.ToUpper" ) - var call string + var call string if IsCallToAST(j, binExpr.X, lo) && IsCallToAST(j, binExpr.Y, lo) { call = lo } else if IsCallToAST(j, binExpr.X, up) && IsCallToAST(j, binExpr.Y, up) { @@ -2854,8 +2854,7 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { bang = "!" } - j.Errorf(binExpr, "%s(a) %s %s(b) is better written as %sstrings.EqualFold(a, b)", call, binExpr.Op, call, bang) - + j.Errorf(binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) return true } diff --git a/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go index c86fbaf76..4a42ee616 100644 --- a/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go +++ b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go @@ -8,24 +8,24 @@ func fn() { s2 = "bar" ) - if strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "strings.ToLower(a) == strings.ToLower(b) is better written as strings.EqualFold(a, b)" + if strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" panic("") } - if strings.ToUpper(s1) == strings.ToUpper(s2) { // MATCH "strings.ToUpper(a) == strings.ToUpper(b) is better written as strings.EqualFold(a, b)" + if strings.ToUpper(s1) == strings.ToUpper(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToUpper(a) == strings.ToUpper(b)" panic("") } - if strings.ToLower(s1) != strings.ToLower(s2) { // MATCH "strings.ToLower(a) != strings.ToLower(b) is better written as !strings.EqualFold(a, b)" + if strings.ToLower(s1) != strings.ToLower(s2) { // MATCH "should use !strings.EqualFold(a, b) instead of strings.ToLower(a) != strings.ToLower(b)" panic("") } - switch strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "strings.ToLower(a) == strings.ToLower(b) is better written as strings.EqualFold(a, b)" + switch strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" case true, false: panic("") } - if strings.ToLower(s1) == strings.ToLower(s2) || s1+s2 == s2+s1 { // MATCH "strings.ToLower(a) == strings.ToLower(b) is better written as strings.EqualFold(a, b)" { + if strings.ToLower(s1) == strings.ToLower(s2) || s1+s2 == s2+s1 { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" { panic("") } From 721bcf71a761f8b117e2967036f156750a15c242 Mon Sep 17 00:00:00 2001 From: Alexey Surikov Date: Fri, 7 Sep 2018 20:03:12 +0200 Subject: [PATCH 003/254] staticcheck: detect unreachable cases in type switches Closes gh-351 --- staticcheck/lint.go | 78 +++++++++++ .../CheckUnreachableTypeCases.go | 123 ++++++++++++++++++ 2 files changed, 201 insertions(+) create mode 100644 staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 4f90bc460..e2138df1c 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -272,6 +272,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA4017", FilterGenerated: false, Fn: c.CheckPureFunctions}, {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment}, {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints}, + {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases}, {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps}, {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer}, @@ -2862,3 +2863,80 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { ast.Inspect(f, fn) } } + +func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { + // Check if T subsumes V in a type switch: + // T and V are structurally identical interfaces; + // interface T is a subset of V; + // T is an interface implemented by concrete type V. + subsumes := func(T, V types.Type) bool { + tIface, ok := T.Underlying().(*types.Interface) + if !ok { + return false + } + + return types.Implements(V, tIface) + } + + subsumesAny := func(Ts, Vs []types.Type) (types.Type, types.Type, bool) { + for _, T := range Ts { + for _, V := range Vs { + if subsumes(T, V) { + return T, V, true + } + } + } + + return nil, nil, false + } + + fn := func(node ast.Node) bool { + tsStmt, ok := node.(*ast.TypeSwitchStmt) + if !ok { + return true + } + + type ccAndTypes struct { + cc *ast.CaseClause + types []types.Type + } + + // All asserted types in the order of case clauses. + ccs := make([]ccAndTypes, 0, len(tsStmt.Body.List)) + for _, stmt := range tsStmt.Body.List { + cc, _ := stmt.(*ast.CaseClause) + + // Exclude the 'default' case. + if len(cc.List) == 0 { + continue + } + + Ts := make([]types.Type, len(cc.List)) + for i, expr := range cc.List { + Ts[i] = TypeOf(j, expr) + } + + ccs = append(ccs, ccAndTypes{cc: cc, types: Ts}) + } + + if len(ccs) <= 1 { + // Zero or one case clauses, nothing to check. + return true + } + + // Check if case clauses following cc have types that are subsumed by cc. + for i, cc := range ccs[:len(ccs)-1] { + for _, next := range ccs[i+1:] { + if T, V, yes := subsumesAny(cc.types, next.types); yes { + j.Errorf(next.cc, "unreachable case clause: %s will always match before %s", T.String(), V.String()) + } + } + } + + return true + } + + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go b/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go new file mode 100644 index 000000000..d08a18392 --- /dev/null +++ b/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go @@ -0,0 +1,123 @@ +package pkg + +import "io" + +type T struct{} + +func (T) Read(b []byte) (int, error) { return 0, nil } +func (T) something() string { return "non-exported method" } + +type V error +type U error + +func fn1() { + var ( + v interface{} + err error + ) + + switch v.(type) { + case io.Reader: + println("io.Reader") + case io.ReadCloser: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + println("io.ReadCloser") + } + + switch v.(type) { + case io.Reader: + println("io.Reader") + case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + println("T") + } + + switch v.(type) { + case io.Reader: + println("io.Reader") + case io.ReadCloser: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + println("io.ReadCloser") + case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + println("T") + } + + switch v.(type) { + case io.Reader: + println("io.Reader") + case io.ReadCloser, T: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + println("io.ReadCloser or T") + } + + switch v.(type) { + case io.ReadCloser, io.Reader: + println("io.ReadCloser or io.Reader") + case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + println("T") + } + + switch v.(type) { + default: + println("something else") + case io.Reader: + println("io.Reader") + case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + println("T") + } + + switch err.(type) { + case V: + println("V") + case U: // MATCH "unreachable case clause: CheckUnreachableTypeCases.V will always match before CheckUnreachableTypeCases.U" + println("U") + } + + switch err.(type) { + case U: + println("U") + case V: // MATCH "unreachable case clause: CheckUnreachableTypeCases.U will always match before CheckUnreachableTypeCases.V" + println("V") + } +} + +func fn3() { + var ( + v interface{} + err error + ) + + switch v.(type) { + case T: + println("T") + case io.Reader: + println("io.Reader") + } + + switch v.(type) { + case io.ReadCloser: + println("io.ReadCloser") + case T: + println("T") + } + + switch v.(type) { + case io.ReadCloser: + println("io.ReadCloser") + case io.Reader: + println("io.Reader") + } + + switch v.(type) { + case T: + println("T") + } + + switch err.(type) { + case V, U: + println("V or U") + case io.Reader: + println("io.Reader") + } + + switch v.(type) { + default: + println("something") + } +} From 62634cd177c42fca9801bc841364b335f4f26bec Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 2 Jan 2019 10:45:00 +0100 Subject: [PATCH 004/254] staticcheck: rework comment in CheckUnreachableTypeCases --- staticcheck/lint.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index e2138df1c..9ec540214 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -2865,10 +2865,7 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { } func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { - // Check if T subsumes V in a type switch: - // T and V are structurally identical interfaces; - // interface T is a subset of V; - // T is an interface implemented by concrete type V. + // Check if T subsumes V in a type switch. T subsumes V if T is an interface and T's method set is a subset of V's method set. subsumes := func(T, V types.Type) bool { tIface, ok := T.Underlying().(*types.Interface) if !ok { From 58d974c26431f82c96ad8cbe763b6ae0b1179b1e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 3 Jan 2019 05:04:47 +0100 Subject: [PATCH 005/254] staticcheck: don't flag `for false {}` Closes gh-367 --- staticcheck/lint.go | 17 ++++++++++++----- .../CheckInfiniteEmptyLoop.go | 18 ++++++++++++++++++ 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 9ec540214..c96cf073a 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -654,14 +654,21 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { // is dynamic and the loop might terminate. Similarly for // channel receives. - if loop.Cond != nil && hasSideEffects(loop.Cond) { - return true - } - - j.Errorf(loop, "this loop will spin, using 100%% CPU") if loop.Cond != nil { + if hasSideEffects(loop.Cond) { + return true + } + if ident, ok := loop.Cond.(*ast.Ident); ok { + if k, ok := ObjectOf(j, ident).(*types.Const); ok { + if !constant.BoolVal(k.Val()) { + // don't flag `for false {}` loops. They're a debug aid. + return true + } + } + } j.Errorf(loop, "loop condition never changes or has a race condition") } + j.Errorf(loop, "this loop will spin, using 100%% CPU") return true } diff --git a/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go b/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go index efa3e235d..72cc1c26e 100644 --- a/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go +++ b/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go @@ -15,6 +15,24 @@ func fn() { for true { // MATCH "loop condition never changes" } + + x := true + for x { // MATCH "loop condition never changes" + } + + x = false + for x { // MATCH "loop condition never changes" + } + + for false { + } + + false := true + for false { // MATCH "loop condition never changes" + } } // MATCH:16 "this loop will spin" +// MATCH:20 "this loop will spin" +// MATCH:24 "this loop will spin" +// MATCH:31 "this loop will spin" From b3e1e8beccde17f6e368e2dea0e79ef3370c7fa3 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 3 Jan 2019 05:14:19 +0100 Subject: [PATCH 006/254] staticcheck: don't flag identical lhs/rhs for custom float types Closes gh-363 --- staticcheck/lint.go | 2 +- .../CheckLhsRhsIdentical/CheckLhsRhsIdentical.go | 14 +++++++++++--- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index c96cf073a..28485f15b 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -872,7 +872,7 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { } switch op.Op { case token.EQL, token.NEQ: - if basic, ok := TypeOf(j, op.X).(*types.Basic); ok { + if basic, ok := TypeOf(j, op.X).Underlying().(*types.Basic); ok { if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { // f == f and f != f might be used to check for NaN return true diff --git a/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go b/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go index 76ac1d015..e914080a2 100644 --- a/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go +++ b/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go @@ -1,6 +1,8 @@ package pkg -func fn(a int, s []int, f float64) { +type Float float64 + +func fn(a int, s []int, f1 float64, f2 Float) { if 1 == 1 { // MATCH /identical expressions/ println() } @@ -19,10 +21,16 @@ func fn(a int, s []int, f float64) { if (1 + 2 + 3) == (1 + 2 + 3) { // MATCH /identical expressions/ println() } - if f == f { + if f1 == f1 { + println() + } + if f1 != f1 { + println() + } + if f1 > f1 { // MATCH /identical expressions/ println() } - if f != f { + if f2 == f2 { println() } } From 5ad80af7e056178dae7b1fa359d53afd3be2cf46 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 3 Jan 2019 06:01:50 +0100 Subject: [PATCH 007/254] simple: catch unnecessary nil check around type assertion Closes gh-371 --- simple/lint.go | 67 ++++++++++++++++++- .../src/LintAssertNotNil/LintAssertNotNil.go | 27 +++++++- 2 files changed, 92 insertions(+), 2 deletions(-) diff --git a/simple/lint.go b/simple/lint.go index 25de813b8..d93017b14 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -1380,7 +1380,7 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { } return true } - fn := func(node ast.Node) bool { + fn1 := func(node ast.Node) bool { ifstmt, ok := node.(*ast.IfStmt) if !ok { return true @@ -1412,6 +1412,71 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) return true } + fn2 := func(node ast.Node) bool { + // Check that outer ifstmt is an 'if x != nil {}' + ifstmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + if ifstmt.Init != nil { + return true + } + if ifstmt.Else != nil { + return true + } + if len(ifstmt.Body.List) != 1 { + return true + } + binop, ok := ifstmt.Cond.(*ast.BinaryExpr) + if !ok { + return true + } + if binop.Op != token.NEQ { + return true + } + lhs, ok := binop.X.(*ast.Ident) + if !ok { + return true + } + if !IsNil(j, binop.Y) { + return true + } + + // Check that inner ifstmt is an `if _, ok := x.(T); ok {}` + ifstmt, ok = ifstmt.Body.List[0].(*ast.IfStmt) + if !ok { + return true + } + assign, ok := ifstmt.Init.(*ast.AssignStmt) + if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) { + return true + } + assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) + if !ok { + return true + } + assertIdent, ok := assert.X.(*ast.Ident) + if !ok { + return true + } + if lhs.Obj != assertIdent.Obj { + return true + } + assignIdent, ok := assign.Lhs[1].(*ast.Ident) + if !ok { + return true + } + if !isOKCheck(assignIdent, ifstmt.Cond) { + return true + } + j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) + return true + } + fn := func(node ast.Node) bool { + b1 := fn1(node) + b2 := fn2(node) + return b1 || b2 + } for _, f := range j.Program.Files { ast.Inspect(f, fn) } diff --git a/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go b/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go index 049202638..f4b6b50c0 100644 --- a/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go +++ b/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go @@ -1,6 +1,6 @@ package pkg -func fn(i interface{}) { +func fn(i interface{}, x interface{}) { if _, ok := i.(string); ok && i != nil { // MATCH "when ok is true, i can't be nil" } if _, ok := i.(string); i != nil && ok { // MATCH "when ok is true, i can't be nil" @@ -11,4 +11,29 @@ func fn(i interface{}) { } if _, ok := i.(string); i == nil && ok { } + if i != nil { + if _, ok := i.(string); ok { // MATCH "when ok is true, i can't be nil" + } + } + if i != nil { + if _, ok := i.(string); ok { + } + println(i) + } + if i == nil { + if _, ok := i.(string); ok { + } + } + if i != nil { + if _, ok := i.(string); !ok { + } + } + if x != nil { + if _, ok := i.(string); ok { + } + } + if i != nil { + if _, ok := x.(string); ok { + } + } } From 1964500673eb8e0b491f2acb604c4bd087061b0c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 3 Sep 2018 10:10:12 +0200 Subject: [PATCH 008/254] simple: flag unnecessary guard around map deletion Closes gh-312 --- lint/lintdsl/lintdsl.go | 25 +++++-- simple/lint.go | 66 +++++++++++++++++++ .../LintGuardedDelete/LintGuardedDelete.go | 34 ++++++++++ 3 files changed, 118 insertions(+), 7 deletions(-) create mode 100644 simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 1bf567d9d..5a3b1d89d 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -233,15 +233,26 @@ func IsGoVersion(j *lint.Job, minor int) bool { } func CallNameAST(j *lint.Job, call *ast.CallExpr) string { - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return "" - } - fn, ok := j.NodePackage(call).TypesInfo.ObjectOf(sel.Sel).(*types.Func) - if !ok { + switch fun := call.Fun.(type) { + case *ast.SelectorExpr: + fn, ok := ObjectOf(j, fun.Sel).(*types.Func) + if !ok { + return "" + } + return fn.FullName() + case *ast.Ident: + obj := ObjectOf(j, fun) + switch obj := obj.(type) { + case *types.Func: + return obj.FullName() + case *types.Builtin: + return obj.Name() + default: + return "" + } + default: return "" } - return fn.FullName() } func IsCallToAST(j *lint.Job, node ast.Node, name string) bool { diff --git a/simple/lint.go b/simple/lint.go index d93017b14..cc04e62bb 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -62,6 +62,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "S1030", FilterGenerated: true, Fn: c.LintBytesBufferConversions}, {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange}, {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers}, + {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete}, } } @@ -1797,3 +1798,68 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { ast.Inspect(f, fnFuncs) } } + +func (c *Checker) LintGuardedDelete(j *lint.Job) { + isCommaOkMapIndex := func(stmt ast.Stmt) (b *ast.Ident, m ast.Expr, key ast.Expr, ok bool) { + // Has to be of the form `_, = [] + + assign, ok := stmt.(*ast.AssignStmt) + if !ok { + return nil, nil, nil, false + } + if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { + return nil, nil, nil, false + } + if !IsBlank(assign.Lhs[0]) { + return nil, nil, nil, false + } + ident, ok := assign.Lhs[1].(*ast.Ident) + if !ok { + return nil, nil, nil, false + } + index, ok := assign.Rhs[0].(*ast.IndexExpr) + if !ok { + return nil, nil, nil, false + } + if _, ok := TypeOf(j, index.X).(*types.Map); !ok { + return nil, nil, nil, false + } + key = index.Index + return ident, index.X, key, true + } + fn := func(node ast.Node) bool { + stmt, ok := node.(*ast.IfStmt) + if !ok { + return true + } + if len(stmt.Body.List) != 1 { + return true + } + expr, ok := stmt.Body.List[0].(*ast.ExprStmt) + if !ok { + return true + } + call, ok := expr.X.(*ast.CallExpr) + if !ok { + return true + } + if !IsCallToAST(j, call, "delete") { + return true + } + b, m, key, ok := isCommaOkMapIndex(stmt.Init) + if !ok { + return true + } + if cond, ok := stmt.Cond.(*ast.Ident); !ok || ObjectOf(j, cond) != ObjectOf(j, b) { + return true + } + if Render(j, call.Args[0]) != Render(j, m) || Render(j, call.Args[1]) != Render(j, key) { + return true + } + j.Errorf(stmt, "unnecessary guard around call to delete") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go b/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go new file mode 100644 index 000000000..726de772c --- /dev/null +++ b/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go @@ -0,0 +1,34 @@ +// Package pkg ... +package pkg + +func fn(m map[int]int) { + if _, ok := m[0]; ok { // MATCH "unnecessary guard" + delete(m, 0) + } + if _, ok := m[0]; !ok { + delete(m, 0) + } + if _, ok := m[0]; ok { + println("deleting") + delete(m, 0) + } + if v, ok := m[0]; ok && v > 0 { + delete(m, 0) + } + + var key int + if _, ok := m[key]; ok { // MATCH "unnecessary guard" + delete(m, key) + } + if _, ok := m[key]; ok { + delete(m, 0) + } + + var ok bool + if _, ok = m[key]; ok { + delete(m, 0) + } + if ok { + println("deleted") + } +} From aa95f1c27f2f674700a40e72b53d90d63f0e2f19 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 3 Sep 2018 04:27:28 +0200 Subject: [PATCH 009/254] stylecheck: flag Yoda conditions Closes gh-330 --- stylecheck/lint.go | 25 +++++++++++++++++++ .../CheckYodaConditions.go | 16 ++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 27e972050..f5b7fa0d7 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -48,6 +48,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes}, {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder}, {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical}, + {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions}, } } @@ -616,3 +617,27 @@ func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { ast.Inspect(f, fn) } } + +func (c *Checker) CheckYodaConditions(j *lint.Job) { + fn := func(node ast.Node) bool { + cond, ok := node.(*ast.BinaryExpr) + if !ok { + return true + } + if cond.Op != token.EQL && cond.Op != token.NEQ { + return true + } + if _, ok := cond.X.(*ast.BasicLit); !ok { + return true + } + if _, ok := cond.Y.(*ast.BasicLit); ok { + // Don't flag lit == lit conditions, just in case + return true + } + j.Errorf(cond, "don't use Yoda conditions") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go b/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go new file mode 100644 index 000000000..273794656 --- /dev/null +++ b/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go @@ -0,0 +1,16 @@ +// Package pkg ... +package pkg + +func fn(x string, y int) { + if "" == x { // MATCH "Yoda" + } + if 0 == y { // MATCH "Yoda" + } + if 0 > y { + } + if "" == "" { + } + + if "" == "" || 0 == y { // MATCH "Yoda" + } +} From 51b3beccf3bdd50144ef605756da100d0ab43868 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 3 Sep 2018 06:54:45 +0200 Subject: [PATCH 010/254] staticcheck: flag more pointless comparisons in SA4003 Updates gh-325 --- lint/lintdsl/lintdsl.go | 8 ++ staticcheck/lint.go | 85 ++++++++++++++++--- .../CheckExtremeComparison.go | 41 +++++++++ .../CheckUnsignedComparison.go | 10 --- 4 files changed, 120 insertions(+), 24 deletions(-) create mode 100644 staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go delete mode 100644 staticcheck/testdata/src/CheckUnsignedComparison/CheckUnsignedComparison.go diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 5a3b1d89d..56e6ae24d 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -332,3 +332,11 @@ func GroupSpecs(j *lint.Job, specs []ast.Spec) [][]ast.Spec { return groups } + +func IsObject(obj types.Object, name string) bool { + var path string + if pkg := obj.Pkg(); pkg != nil { + path = pkg.Path() + "." + } + return path+obj.Name() == name +} diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 28485f15b..659cba830 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -257,7 +257,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA4000", FilterGenerated: false, Fn: c.CheckLhsRhsIdentical}, {ID: "SA4001", FilterGenerated: false, Fn: c.CheckIneffectiveCopy}, {ID: "SA4002", FilterGenerated: false, Fn: c.CheckDiffSizeComparison}, - {ID: "SA4003", FilterGenerated: false, Fn: c.CheckUnsignedComparison}, + {ID: "SA4003", FilterGenerated: false, Fn: c.CheckExtremeComparison}, {ID: "SA4004", FilterGenerated: false, Fn: c.CheckIneffectiveLoop}, {ID: "SA4006", FilterGenerated: false, Fn: c.CheckUnreadVariableValues}, {ID: "SA4008", FilterGenerated: false, Fn: c.CheckLoopCondition}, @@ -1391,7 +1391,15 @@ func (c *Checker) CheckNilMaps(j *lint.Job) { } } -func (c *Checker) CheckUnsignedComparison(j *lint.Job) { +func (c *Checker) CheckExtremeComparison(j *lint.Job) { + isobj := func(expr ast.Expr, name string) bool { + sel, ok := expr.(*ast.SelectorExpr) + if !ok { + return false + } + return IsObject(ObjectOf(j, sel.Sel), name) + } + fn := func(node ast.Node) bool { expr, ok := node.(*ast.BinaryExpr) if !ok { @@ -1402,19 +1410,68 @@ func (c *Checker) CheckUnsignedComparison(j *lint.Job) { if !ok { return true } - if (basic.Info() & types.IsUnsigned) == 0 { - return true - } - lit, ok := expr.Y.(*ast.BasicLit) - if !ok || lit.Value != "0" { - return true - } - switch expr.Op { - case token.GEQ: - j.Errorf(expr, "unsigned values are always >= 0") - case token.LSS: - j.Errorf(expr, "unsigned values are never < 0") + + var max string + var min string + + switch basic.Kind() { + case types.Uint8: + max = "math.MaxUint8" + case types.Uint16: + max = "math.MaxUint16" + case types.Uint32: + max = "math.MaxUint32" + case types.Uint64: + max = "math.MaxUint64" + case types.Uint: + max = "math.MaxUint64" + + case types.Int8: + min = "math.MinInt8" + max = "math.MaxInt8" + case types.Int16: + min = "math.MinInt16" + max = "math.MaxInt16" + case types.Int32: + min = "math.MinInt32" + max = "math.MaxInt32" + case types.Int64: + min = "math.MinInt64" + max = "math.MaxInt64" + case types.Int: + min = "math.MinInt64" + max = "math.MaxInt64" + } + + if (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.Y, max) || + (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.X, max) { + j.Errorf(expr, "no value of type %s is greater than %s", basic, max) + } + if expr.Op == token.LEQ && isobj(expr.Y, max) || + expr.Op == token.GEQ && isobj(expr.X, max) { + j.Errorf(expr, "every value of type %s is <= %s", basic, max) + } + + if (basic.Info() & types.IsUnsigned) != 0 { + if (expr.Op == token.LSS || expr.Op == token.LEQ) && IsIntLiteral(expr.Y, "0") || + (expr.Op == token.GTR || expr.Op == token.GEQ) && IsIntLiteral(expr.X, "0") { + j.Errorf(expr, "no value of type %s is less than 0", basic) + } + if expr.Op == token.GEQ && IsIntLiteral(expr.Y, "0") || + expr.Op == token.LEQ && IsIntLiteral(expr.X, "0") { + j.Errorf(expr, "every value of type %s is >= 0", basic) + } + } else { + if (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.Y, min) || + (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.X, min) { + j.Errorf(expr, "no value of type %s is less than %s", basic, min) + } + if expr.Op == token.GEQ && isobj(expr.Y, min) || + expr.Op == token.LEQ && isobj(expr.X, min) { + j.Errorf(expr, "every value of type %s is >= %s", basic, min) + } } + return true } for _, f := range j.Program.Files { diff --git a/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go b/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go new file mode 100644 index 000000000..a37521f2f --- /dev/null +++ b/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go @@ -0,0 +1,41 @@ +package pkg + +import "math" + +func fn() { + var ( + u8 uint8 + u16 uint16 + u uint + + i8 int8 + i16 int16 + i int + ) + + _ = u8 > math.MaxUint8 // MATCH "no value of type uint8 is greater than math.MaxUint8" + _ = u8 >= math.MaxUint8 // MATCH "no value of type uint8 is greater than math.MaxUint8" + _ = u8 >= 0 // MATCH "every value of type uint8 is >= 0" + _ = u8 <= math.MaxUint8 // MATCH "every value of type uint8 is <= math.MaxUint8" + _ = u8 > 0 + _ = u8 >= 1 + _ = u8 < math.MaxUint8 + + _ = u16 > math.MaxUint8 + _ = u16 > math.MaxUint16 // MATCH "no value of type uint16 is greater than math.MaxUint16" + _ = u16 <= math.MaxUint8 + _ = u16 <= math.MaxUint16 // MATCH "every value of type uint16 is <= math.MaxUint16" + + _ = u > math.MaxUint32 + _ = u > math.MaxUint64 // MATCH "no value of type uint is greater than math.MaxUint64" + + _ = i8 > math.MaxInt8 // MATCH "no value of type int8 is greater than math.MaxInt8" + _ = i16 > math.MaxInt8 + _ = i16 > math.MaxInt16 // MATCH "no value of type int16 is greater than math.MaxInt16" + _ = i > math.MaxInt32 + _ = i > math.MaxInt64 // MATCH "no value of type int is greater than math.MaxInt64" + _ = i8 < 0 + _ = i8 <= math.MinInt8 // MATCH "no value of type int8 is less than math.MinInt8" + _ = i8 < math.MinInt8 // MATCH "no value of type int8 is less than math.MinInt8" + _ = i8 >= math.MinInt8 // MATCH "every value of type int8 is >= math.MinInt8" +} diff --git a/staticcheck/testdata/src/CheckUnsignedComparison/CheckUnsignedComparison.go b/staticcheck/testdata/src/CheckUnsignedComparison/CheckUnsignedComparison.go deleted file mode 100644 index 0390c271c..000000000 --- a/staticcheck/testdata/src/CheckUnsignedComparison/CheckUnsignedComparison.go +++ /dev/null @@ -1,10 +0,0 @@ -package pkg - -func fn(x uint32) { - if x >= 0 { // MATCH /unsigned values are always >= 0/ - println() - } - if x < 0 { // MATCH /unsigned values are never < 0/ - println() - } -} From c51f1e51d5a3f3c686a9e8229818d34e8fe4e547 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 4 Jan 2019 07:41:18 +0100 Subject: [PATCH 011/254] stylecheck: don't lint aliased types Closes gh-379 --- stylecheck/lint.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/stylecheck/lint.go b/stylecheck/lint.go index f5b7fa0d7..ee7efa450 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -254,7 +254,7 @@ func (c *Checker) CheckUnexportedReturn(j *lint.Job) { func (c *Checker) CheckReceiverNames(j *lint.Job) { for _, pkg := range j.Program.InitialPackages { for _, m := range pkg.SSA.Members { - if T, ok := m.Object().(*types.TypeName); ok { + if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { ms := typeutil.IntuitiveMethodSet(T.Type(), nil) for _, sel := range ms { fn := sel.Obj().(*types.Func) @@ -281,7 +281,7 @@ func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) { names := map[string]int{} var firstFn *types.Func - if T, ok := m.Object().(*types.TypeName); ok { + if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { ms := typeutil.IntuitiveMethodSet(T.Type(), nil) for _, sel := range ms { fn := sel.Obj().(*types.Func) From 14a06965f532b87246e8f15fbb1fd5b92d4f163b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 3 Jan 2019 08:47:05 +0100 Subject: [PATCH 012/254] simple: flag unnecessarily complex type switches Closes gh-126 --- simple/lint.go | 76 +++++++++++++++++++ .../LintSimplifyTypeSwitch.go | 33 ++++++++ 2 files changed, 109 insertions(+) create mode 100644 simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go diff --git a/simple/lint.go b/simple/lint.go index cc04e62bb..55429a41f 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -63,6 +63,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange}, {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers}, {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete}, + {ID: "S1034", FilterGenerated: true, Fn: c.LintSimplifyTypeSwitch}, } } @@ -1863,3 +1864,78 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { ast.Inspect(f, fn) } } + +func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { + fn := func(node ast.Node) bool { + stmt, ok := node.(*ast.TypeSwitchStmt) + if !ok { + return true + } + if stmt.Init != nil { + // bailing out for now, can't anticipate how type switches with initializers are being used + return true + } + expr, ok := stmt.Assign.(*ast.ExprStmt) + if !ok { + // the user is in fact assigning the result + return true + } + assert := expr.X.(*ast.TypeAssertExpr) + ident, ok := assert.X.(*ast.Ident) + if !ok { + return true + } + x := ObjectOf(j, ident) + var allOffenders []ast.Node + for _, clause := range stmt.Body.List { + clause := clause.(*ast.CaseClause) + if len(clause.List) != 1 { + continue + } + hasUnrelatedAssertion := false + var offenders []ast.Node + ast.Inspect(clause, func(node ast.Node) bool { + assert2, ok := node.(*ast.TypeAssertExpr) + if !ok { + return true + } + ident, ok := assert2.X.(*ast.Ident) + if !ok { + hasUnrelatedAssertion = true + return false + } + if ObjectOf(j, ident) != x { + hasUnrelatedAssertion = true + return false + } + + if !types.Identical(TypeOf(j, clause.List[0]), TypeOf(j, assert2.Type)) { + hasUnrelatedAssertion = true + return false + } + offenders = append(offenders, assert2) + return true + }) + if !hasUnrelatedAssertion { + // don't flag cases that have other type assertions + // unrelated to the one in the case clause. often + // times, this is done for symmetry, when two + // different values have to be asserted to the same + // type. + allOffenders = append(allOffenders, offenders...) + } + } + if len(allOffenders) != 0 { + at := "" + for _, offender := range allOffenders { + pos := j.Program.DisplayPosition(offender.Pos()) + at += "\n\t" + pos.String() + } + j.Errorf(expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(j, ident), Render(j, ident), at) + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go b/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go new file mode 100644 index 000000000..275a388bb --- /dev/null +++ b/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go @@ -0,0 +1,33 @@ +package pkg + +import "fmt" + +func gen() interface{} { return nil } + +func fn(x, y interface{}) { + switch z := x.(type) { + case int: + _ = z + fmt.Println(x.(int)) + } + switch x.(type) { + case int: + fmt.Println(x.(int), y.(int)) + } + switch x.(type) { // MATCH "assigning the result of this type assertion" + case int: + fmt.Println(x.(int)) + } + switch x.(type) { + case int: + fmt.Println(x.(string)) + } + switch x.(type) { + case int: + fmt.Println(y.(int)) + } + switch (gen()).(type) { + case int: + fmt.Println((gen()).(int)) + } +} From 1297f1d20ec0294602b7b74d730071c7504d0b0f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 3 Jan 2019 17:33:18 +0100 Subject: [PATCH 013/254] simple: extend S1017 to match more unnecessary guards Closes gh-359 --- simple/lint.go | 193 +++++++++++++++++-------------- simple/testdata/src/trim/trim.go | 12 ++ 2 files changed, 120 insertions(+), 85 deletions(-) diff --git a/simple/lint.go b/simple/lint.go index 55429a41f..fb7e90ce0 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -1087,22 +1087,26 @@ func (c *Checker) LintTrim(j *lint.Job) { if !ok { return true } - call, ok := condCall.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - if IsIdent(call.X, "strings") { + switch { + case IsCallToAST(j, condCall, "strings.HasPrefix"): + pkg = "strings" + fun = "HasPrefix" + case IsCallToAST(j, condCall, "strings.HasSuffix"): pkg = "strings" - } else if IsIdent(call.X, "bytes") { + fun = "HasSuffix" + case IsCallToAST(j, condCall, "strings.Contains"): + pkg = "strings" + fun = "Contains" + case IsCallToAST(j, condCall, "bytes.HasPrefix"): pkg = "bytes" - } else { - return true - } - if IsIdent(call.Sel, "HasPrefix") { fun = "HasPrefix" - } else if IsIdent(call.Sel, "HasSuffix") { + case IsCallToAST(j, condCall, "bytes.HasSuffix"): + pkg = "bytes" fun = "HasSuffix" - } else { + case IsCallToAST(j, condCall, "bytes.Contains"): + pkg = "bytes" + fun = "Contains" + default: return true } @@ -1119,102 +1123,121 @@ func (c *Checker) LintTrim(j *lint.Job) { if !sameNonDynamic(condCall.Args[0], assign.Lhs[0]) { return true } - slice, ok := assign.Rhs[0].(*ast.SliceExpr) - if !ok { - return true - } - if slice.Slice3 { - return true - } - if !sameNonDynamic(slice.X, condCall.Args[0]) { - return true - } - var index ast.Expr - switch fun { - case "HasPrefix": - // TODO(dh) We could detect a High that is len(s), but another - // rule will already flag that, anyway. - if slice.High != nil { + + switch rhs := assign.Rhs[0].(type) { + case *ast.CallExpr: + if len(rhs.Args) < 2 || !sameNonDynamic(condCall.Args[0], rhs.Args[0]) || !sameNonDynamic(condCall.Args[1], rhs.Args[1]) { return true } - index = slice.Low - case "HasSuffix": - if slice.Low != nil { - n, ok := ExprToInt(j, slice.Low) - if !ok || n != 0 { - return true - } + if IsCallToAST(j, condCall, "strings.HasPrefix") && IsCallToAST(j, rhs, "strings.TrimPrefix") || + IsCallToAST(j, condCall, "strings.HasSuffix") && IsCallToAST(j, rhs, "strings.TrimSuffix") || + IsCallToAST(j, condCall, "strings.Contains") && IsCallToAST(j, rhs, "strings.Replace") || + IsCallToAST(j, condCall, "bytes.HasPrefix") && IsCallToAST(j, rhs, "bytes.TrimPrefix") || + IsCallToAST(j, condCall, "bytes.HasSuffix") && IsCallToAST(j, rhs, "bytes.TrimSuffix") || + IsCallToAST(j, condCall, "bytes.Contains") && IsCallToAST(j, rhs, "bytes.Replace") { + j.Errorf(ifstmt, "should replace this if statement with an unconditional %s", CallNameAST(j, rhs)) } - index = slice.High - } - - switch index := index.(type) { - case *ast.CallExpr: - if fun != "HasPrefix" { + return true + case *ast.SliceExpr: + slice := rhs + if !ok { return true } - if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" { + if slice.Slice3 { return true } - if len(index.Args) != 1 { + if !sameNonDynamic(slice.X, condCall.Args[0]) { return true } - id3 := index.Args[Arg("len.v")] - switch oid3 := condCall.Args[1].(type) { - case *ast.BasicLit: + var index ast.Expr + switch fun { + case "HasPrefix": + // TODO(dh) We could detect a High that is len(s), but another + // rule will already flag that, anyway. + if slice.High != nil { + return true + } + index = slice.Low + case "HasSuffix": + if slice.Low != nil { + n, ok := ExprToInt(j, slice.Low) + if !ok || n != 0 { + return true + } + } + index = slice.High + } + + switch index := index.(type) { + case *ast.CallExpr: + if fun != "HasPrefix" { + return true + } + if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" { + return true + } + if len(index.Args) != 1 { + return true + } + id3 := index.Args[Arg("len.v")] + switch oid3 := condCall.Args[1].(type) { + case *ast.BasicLit: + if pkg != "strings" { + return false + } + lit, ok := id3.(*ast.BasicLit) + if !ok { + return true + } + s1, ok1 := ExprToString(j, lit) + s2, ok2 := ExprToString(j, condCall.Args[1]) + if !ok1 || !ok2 || s1 != s2 { + return true + } + default: + if !sameNonDynamic(id3, oid3) { + return true + } + } + case *ast.BasicLit, *ast.Ident: + if fun != "HasPrefix" { + return true + } if pkg != "strings" { - return false + return true } - lit, ok := id3.(*ast.BasicLit) - if !ok { + string, ok1 := ExprToString(j, condCall.Args[1]) + int, ok2 := ExprToInt(j, slice.Low) + if !ok1 || !ok2 || int != int64(len(string)) { return true } - s1, ok1 := ExprToString(j, lit) - s2, ok2 := ExprToString(j, condCall.Args[1]) - if !ok1 || !ok2 || s1 != s2 { + case *ast.BinaryExpr: + if fun != "HasSuffix" { return true } - default: - if !sameNonDynamic(id3, oid3) { + if index.Op != token.SUB { return true } - } - case *ast.BasicLit, *ast.Ident: - if fun != "HasPrefix" { - return true - } - if pkg != "strings" { - return true - } - string, ok1 := ExprToString(j, condCall.Args[1]) - int, ok2 := ExprToInt(j, slice.Low) - if !ok1 || !ok2 || int != int64(len(string)) { - return true - } - case *ast.BinaryExpr: - if fun != "HasSuffix" { - return true - } - if index.Op != token.SUB { + if !isLenOnIdent(index.X, condCall.Args[0]) || + !isLenOnIdent(index.Y, condCall.Args[1]) { + return true + } + default: return true } - if !isLenOnIdent(index.X, condCall.Args[0]) || - !isLenOnIdent(index.Y, condCall.Args[1]) { - return true + + var replacement string + switch fun { + case "HasPrefix": + replacement = "TrimPrefix" + case "HasSuffix": + replacement = "TrimSuffix" } + j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) + return true default: return true } - - var replacement string - switch fun { - case "HasPrefix": - replacement = "TrimPrefix" - case "HasSuffix": - replacement = "TrimSuffix" - } - j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) - return true } for _, f := range j.Program.Files { ast.Inspect(f, fn) diff --git a/simple/testdata/src/trim/trim.go b/simple/testdata/src/trim/trim.go index edc2a92a3..ccab6c496 100644 --- a/simple/testdata/src/trim/trim.go +++ b/simple/testdata/src/trim/trim.go @@ -21,6 +21,18 @@ func fn() { id1 = id1[len(s1):] } + if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + id1 = strings.TrimPrefix(id1, s1) + } + + if strings.HasPrefix(id1, s1) { + id1 = strings.TrimPrefix(id1, s2) + } + + if strings.Contains(id1, s1) { // MATCH /should replace.*with.*strings.Replace/ + id1 = strings.Replace(id1, s1, "something", 123) + } + if strings.HasSuffix(id1, s2) { // MATCH /should replace.*with.*strings.TrimSuffix/ id1 = id1[:len(id1)-len(s2)] } From 6f3d766489dfcf82b32a7e4f8b5dfaf41150915a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 3 Jan 2019 17:50:50 +0100 Subject: [PATCH 014/254] staticcheck: also flag Fprintf in SA1006 Closes gh-255 --- arg/arg.go | 1 + staticcheck/lint.go | 15 ++++++++++----- .../src/CheckUnsafePrintf/CheckUnsafePrintf.go | 3 +++ 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/arg/arg.go b/arg/arg.go index d9e42dbea..6f3aafc4e 100644 --- a/arg/arg.go +++ b/arg/arg.go @@ -12,6 +12,7 @@ var args = map[string]int{ "encoding/binary.Write.data": 2, "errors.New.text": 0, "fmt.Printf.format": 0, + "fmt.Fprintf.format": 1, "fmt.Sprintf.a[0]": 1, "fmt.Sprintf.format": 0, "len.v": 0, diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 659cba830..6b7cc1a33 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -964,19 +964,24 @@ func (c *Checker) CheckUnsafePrintf(j *lint.Job) { if !ok { return true } - if !IsCallToAnyAST(j, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { + var arg int + if IsCallToAnyAST(j, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { + arg = Arg("fmt.Printf.format") + } else if IsCallToAnyAST(j, call, "fmt.Fprintf") { + arg = Arg("fmt.Fprintf.format") + } else { return true } - if len(call.Args) != 1 { + if len(call.Args) != arg+1 { return true } - switch call.Args[Arg("fmt.Printf.format")].(type) { + switch call.Args[arg].(type) { case *ast.CallExpr, *ast.Ident: default: return true } - j.Errorf(call.Args[Arg("fmt.Printf.format")], - "printf-style function with dynamic first argument and no further arguments should use print-style function instead") + j.Errorf(call.Args[arg], + "printf-style function with dynamic format string and no further arguments should use print-style function instead") return true } for _, f := range j.Program.Files { diff --git a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go index bf428c01d..a573f072c 100644 --- a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go +++ b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go @@ -3,6 +3,7 @@ package pkg import ( "fmt" "log" + "os" ) func fn() { @@ -13,6 +14,8 @@ func fn() { log.Printf(fn2()) // MATCH /should use print-style function/ fmt.Printf(s) // MATCH /should use print-style function/ fmt.Printf(s, "") + fmt.Fprintf(os.Stdout, s) // MATCH /should use print-style function/ + fmt.Fprintf(os.Stdout, s, "") fmt.Printf(fn2(), "") fmt.Printf("") From f72fdd6b2db3c3bc4dc60dbf588f8ada46aa3801 Mon Sep 17 00:00:00 2001 From: helloPiers Date: Sun, 6 Jan 2019 15:37:57 +0000 Subject: [PATCH 015/254] lint/lintutil: correctly parse position information in compile errors Closes: gh-382 [via git-merge-pr] --- lint/lintutil/util.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index ba9f0fdcb..efc04f917 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -319,9 +319,9 @@ func parsePos(pos string) token.Position { if parts == nil { panic(fmt.Sprintf("internal error: malformed position %q", pos)) } - file := parts[0] - line, _ := strconv.Atoi(parts[1]) - col, _ := strconv.Atoi(parts[2]) + file := parts[1] + line, _ := strconv.Atoi(parts[2]) + col, _ := strconv.Atoi(parts[3]) return token.Position{ Filename: file, Line: line, From 3f1c8253044aabf792852a2a093728a4e46d8dec Mon Sep 17 00:00:00 2001 From: helloPiers Date: Sun, 6 Jan 2019 15:46:54 +0000 Subject: [PATCH 016/254] lint/lintutil/format: avoid panic in Stylish if first Position to report is empty Pointer tw was not initialised before use, if the first call to Format was with an empty Position, leading to panic. This change forces the Filename to be something ("-"), to avoid that. Closes: gh-383 [via git-merge-pr] --- lint/lintutil/format/format.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lint/lintutil/format/format.go b/lint/lintutil/format/format.go index d41ab4b67..23aa132de 100644 --- a/lint/lintutil/format/format.go +++ b/lint/lintutil/format/format.go @@ -102,6 +102,10 @@ type Stylish struct { } func (o *Stylish) Format(p lint.Problem) { + if p.Position.Filename == "" { + p.Position.Filename = "-" + } + if p.Position.Filename != o.prevFile { if o.prevFile != "" { o.tw.Flush() From 453727d7f9d690ad78b22bfbb36d90106f3318ca Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 9 Jan 2019 16:15:35 +0100 Subject: [PATCH 017/254] Improve README Point to our tagged releases; mention the minimum Go version --- README.md | 36 +++++++++++++++++++++++++++++------- cmd/errcheck-ng/README.md | 13 +------------ cmd/keyify/README.md | 4 +--- cmd/rdeps/README.md | 4 +--- cmd/staticcheck/README.md | 2 +- cmd/structlayout/README.md | 6 +----- 6 files changed, 34 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index a740208ef..0feea1eff 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # honnef.co/go/tools `honnef.co/go/tools/...` is a collection of tools and libraries for -working with Go code, including linters and static analysis. +working with Go code, including linters and static analysis, most +prominently staticcheck. **These tools are supported by [patrons on Patreon](https://www.patreon.com/dominikh) and @@ -11,12 +12,29 @@ consider purchasing ## Installation -To install or update all tools run -``` -go get -u honnef.co/go/tools/... -``` +### Releases -Alternatively, see individual tool pages below. +It is recommended that you run released versions of the tools. These +releases can be found as git tags (e.g. `2019.1`) as well as prebuilt +binaries in the [releases tab](https://github.com/dominikh/go-tools/releases). + +The easiest way of using the releases from source is to use a Go +package manager such as Godep or Go modules. Alternatively you can use +a combination of `git clone -b` and `go get` to check out the +appropriate tag and download its dependencies. + + +### Master + +You can also run the master branch instead of a release. Note that +while the master branch is usually stable, it may still contain new +checks or backwards incompatible changes that break your build. By +using the master branch you agree to become a beta tester. + +To use the master branch, a simple `go get -u +honnef.co/go/tools/cmd....` suffices. You can also install a subset of +the commands, for example only staticcheck with `go get -u +honnef.co/go/tools/cmd/staticcheck`. ## Tools @@ -42,9 +60,13 @@ Their main purpose is to aid the implementation of the tools. If you decide to use these libraries, please vendor them and expect regular backwards-incompatible changes. +## System requirements + +We support the last two versions of Go. + ## Documentation -You can find more documentation on +You can find extensive documentation on [staticcheck.io](https://staticcheck.io). ## Sponsors diff --git a/cmd/errcheck-ng/README.md b/cmd/errcheck-ng/README.md index 29b95b210..3693e32a9 100644 --- a/cmd/errcheck-ng/README.md +++ b/cmd/errcheck-ng/README.md @@ -2,15 +2,4 @@ _errcheck-ng_ is the next generation of errcheck. -## Installation - - go get honnef.co/go/tools/cmd/errcheck-ng - -## Usage - -TODO - -## Purpose - -TODO - +It is a prototype and not fit for use. diff --git a/cmd/keyify/README.md b/cmd/keyify/README.md index 6a2fa64ec..bae4fd62e 100644 --- a/cmd/keyify/README.md +++ b/cmd/keyify/README.md @@ -3,9 +3,7 @@ ones (`T{A: 1, B: 2, C: 3}`) ## Installation -Keyify requires Go 1.6 or later. - - go get honnef.co/go/tools/cmd/keyify +See [the main README](https://github.com/dominikh/go-tools#installation) for installation instructions. ## Usage diff --git a/cmd/rdeps/README.md b/cmd/rdeps/README.md index a6b071822..b470bd1ae 100644 --- a/cmd/rdeps/README.md +++ b/cmd/rdeps/README.md @@ -3,9 +3,7 @@ packages. # Installation -``` -go get honnef.co/go/tools/cmd/rdeps -``` +See [the main README](https://github.com/dominikh/go-tools#installation) for installation instructions. # Usage diff --git a/cmd/staticcheck/README.md b/cmd/staticcheck/README.md index 127d8edf3..4d14577fd 100644 --- a/cmd/staticcheck/README.md +++ b/cmd/staticcheck/README.md @@ -6,7 +6,7 @@ point out dead code, and more. ## Installation - go get honnef.co/go/tools/cmd/staticcheck +See [the main README](https://github.com/dominikh/go-tools#installation) for installation instructions. ## Documentation diff --git a/cmd/structlayout/README.md b/cmd/structlayout/README.md index 5a2b06514..5667ed461 100644 --- a/cmd/structlayout/README.md +++ b/cmd/structlayout/README.md @@ -26,11 +26,7 @@ go get github.com/ajstarks/svgo/structlayout-svg ## Installation -``` -go get honnef.co/go/tools/cmd/structlayout -go get honnef.co/go/tools/cmd/structlayout-pretty -go get honnef.co/go/tools/cmd/structlayout-optimize -``` +See [the main README](https://github.com/dominikh/go-tools#installation) for installation instructions. ## Examples From 25ec6875c40dc2a86cdf593677b45207aed3f7a5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 9 Jan 2019 16:18:28 +0100 Subject: [PATCH 018/254] Remove outdated link to paid support --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index 0feea1eff..90defe66c 100644 --- a/README.md +++ b/README.md @@ -7,8 +7,7 @@ prominently staticcheck. **These tools are supported by [patrons on Patreon](https://www.patreon.com/dominikh) and [sponsors](#sponsors). If you use these tools at your company, -consider purchasing -[commercial support](https://staticcheck.io/pricing).** +consider supporting open source by [becoming a sponsor!](mailto:dominik@honnef.co?subject=Staticcheck%20sponsorship)** ## Installation From 573657f03166f41f120d5855d2844d8dd43aa0e5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 9 Jan 2019 16:33:26 +0100 Subject: [PATCH 019/254] Ignore unused objects in code generated by Go 1.10 cgo Closes gh-384 --- lint/generated.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lint/generated.go b/lint/generated.go index d407223e7..58b23f68f 100644 --- a/lint/generated.go +++ b/lint/generated.go @@ -7,6 +7,8 @@ import ( ) var ( + // used by cgo before Go 1.11 + oldCgo = []byte("// Created by cgo - DO NOT EDIT") prefix = []byte("// Code generated ") suffix = []byte(" DO NOT EDIT.") nl = []byte("\n") @@ -25,6 +27,9 @@ func isGenerated(r io.Reader) bool { if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) { return true } + if bytes.Equal(s, oldCgo) { + return true + } if err == io.EOF { break } From 5bcec433c8ea69ebef8f2aabab7185230b924e0f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 9 Jan 2019 16:43:34 +0100 Subject: [PATCH 020/254] lint/lintutil: parse position information as seen in Go 1.10 cgo Closes gh-387 --- lint/lintutil/util.go | 2 +- lint/lintutil/util_test.go | 45 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 1 deletion(-) create mode 100644 lint/lintutil/util_test.go diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index efc04f917..1142aa04d 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -309,7 +309,7 @@ func Lint(cs []lint.Checker, paths []string, opt *Options) ([]lint.Problem, erro return problems, nil } -var posRe = regexp.MustCompile(`^(.+?):(\d+):(\d+)?$`) +var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`) func parsePos(pos string) token.Position { if pos == "-" || pos == "" { diff --git a/lint/lintutil/util_test.go b/lint/lintutil/util_test.go new file mode 100644 index 000000000..b348ba1bf --- /dev/null +++ b/lint/lintutil/util_test.go @@ -0,0 +1,45 @@ +package lintutil + +import ( + "go/token" + "testing" +) + +func TestParsePos(t *testing.T) { + var tests = []struct { + in string + out token.Position + }{ + { + "/tmp/gopackages280076669/go-build/net/cgo_linux.cgo1.go:1", + token.Position{ + Filename: "/tmp/gopackages280076669/go-build/net/cgo_linux.cgo1.go", + Line: 1, + Column: 0, + }, + }, + { + "/tmp/gopackages280076669/go-build/net/cgo_linux.cgo1.go:1:", + token.Position{ + Filename: "/tmp/gopackages280076669/go-build/net/cgo_linux.cgo1.go", + Line: 1, + Column: 0, + }, + }, + { + "/tmp/gopackages280076669/go-build/net/cgo_linux.cgo1.go:23:43", + token.Position{ + Filename: "/tmp/gopackages280076669/go-build/net/cgo_linux.cgo1.go", + Line: 23, + Column: 43, + }, + }, + } + + for _, tt := range tests { + res := parsePos(tt.in) + if res != tt.out { + t.Errorf("failed to parse %q correctly", tt.in) + } + } +} From 072eb0b8b7d5d695a3de63ec8daaf887d3147845 Mon Sep 17 00:00:00 2001 From: Reilly Watson Date: Tue, 22 Jan 2019 17:22:22 -0500 Subject: [PATCH 021/254] simple: fix false positives on guarded delete check with else-statements Closes: gh-398 [via git-merge-pr] --- simple/lint.go | 3 +++ simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/simple/lint.go b/simple/lint.go index fb7e90ce0..1d96713f6 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -1859,6 +1859,9 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { if len(stmt.Body.List) != 1 { return true } + if stmt.Else != nil { + return true + } expr, ok := stmt.Body.List[0].(*ast.ExprStmt) if !ok { return true diff --git a/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go b/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go index 726de772c..2a4f34332 100644 --- a/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go +++ b/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go @@ -23,6 +23,11 @@ func fn(m map[int]int) { if _, ok := m[key]; ok { delete(m, 0) } + if _, ok := m[key]; ok { + delete(m, key) + } else { + println("not deleted") + } var ok bool if _, ok = m[key]; ok { From 3f36ca0168d8e4dfc42bee4d67ed3c99ae3817ed Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 23 Jan 2019 17:27:03 +0100 Subject: [PATCH 022/254] staticcheck: flag attempts at marshaling unexported fields Updates gh-300 --- arg/arg.go | 10 +- lint/lintdsl/lintdsl.go | 28 ++++ staticcheck/lint.go | 45 +++++++ .../src/CheckNoopMarshal/CheckNoopMarshal.go | 121 ++++++++++++++++++ 4 files changed, 203 insertions(+), 1 deletion(-) create mode 100644 staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go diff --git a/arg/arg.go b/arg/arg.go index 6f3aafc4e..1e7f30db4 100644 --- a/arg/arg.go +++ b/arg/arg.go @@ -1,6 +1,10 @@ package arg var args = map[string]int{ + "(*encoding/json.Decoder).Decode.v": 0, + "(*encoding/json.Encoder).Encode.v": 0, + "(*encoding/xml.Decoder).Decode.v": 0, + "(*encoding/xml.Encoder).Encode.v": 0, "(*sync.Pool).Put.x": 0, "(*text/template.Template).Parse.text": 0, "(io.Seeker).Seek.offset": 0, @@ -11,10 +15,12 @@ var args = map[string]int{ "bytes.Equal.b": 1, "encoding/binary.Write.data": 2, "errors.New.text": 0, - "fmt.Printf.format": 0, "fmt.Fprintf.format": 1, + "fmt.Printf.format": 0, "fmt.Sprintf.a[0]": 1, "fmt.Sprintf.format": 0, + "json.Marshal.v": 0, + "json.Unmarshal.v": 1, "len.v": 0, "make.size[0]": 1, "make.size[1]": 2, @@ -29,6 +35,8 @@ var args = map[string]int{ "sort.Sort.data": 0, "time.Parse.layout": 0, "time.Sleep.d": 0, + "xml.Marshal.v": 0, + "xml.Unmarshal.v": 1, } func Arg(name string) int { diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 56e6ae24d..31c160939 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -340,3 +340,31 @@ func IsObject(obj types.Object, name string) bool { } return path+obj.Name() == name } + +func HasExportedFieldsR(T *types.Struct) bool { + return hasExportedFieldsR(T, nil) +} + +func hasExportedFieldsR(T *types.Struct, seen map[types.Type]bool) bool { + if seen == nil { + seen = map[types.Type]bool{} + } + if seen[T] { + return false + } + seen[T] = true + for i := 0; i < T.NumFields(); i++ { + field := T.Field(i) + if field.Anonymous() { + s, ok := Dereference(field.Type()).Underlying().(*types.Struct) + if ok && hasExportedFieldsR(s, seen) { + return true + } + } else { + if ast.IsExported(field.Name()) { + return true + } + } + } + return false +} diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 6b7cc1a33..64821f4b5 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -204,8 +204,51 @@ var ( "regexp.MatchReader": loopedRegexp("regexp.MatchReader"), "regexp.MatchString": loopedRegexp("regexp.MatchString"), } + + checkNoopMarshal = map[string]CallCheck{ + // TODO(dh): should we really flag XML? Even an empty struct + // produces a non-zero amount of data, namely its type name. + // Let's see if we encounter any false positives. + // + // Also, should we flag gob? + "encoding/json.Marshal": checkNoopMarshalImpl(Arg("json.Marshal.v"), "MarshalJSON", "MarshalText"), + "encoding/xml.Marshal": checkNoopMarshalImpl(Arg("xml.Marshal.v"), "MarshalXML", "MarshalText"), + "(*encoding/json.Encoder).Encode": checkNoopMarshalImpl(Arg("(*encoding/json.Encoder).Encode.v"), "MarshalJSON", "MarshalText"), + "(*encoding/xml.Encoder).Encode": checkNoopMarshalImpl(Arg("(*encoding/xml.Encoder).Encode.v"), "MarshalXML", "MarshalText"), + + "encoding/json.Unmarshal": checkNoopMarshalImpl(Arg("json.Unmarshal.v"), "UnmarshalJSON", "UnmarshalText"), + "encoding/xml.Unmarshal": checkNoopMarshalImpl(Arg("xml.Unmarshal.v"), "UnmarshalXML", "UnmarshalText"), + "(*encoding/json.Decoder).Decode": checkNoopMarshalImpl(Arg("(*encoding/json.Decoder).Decode.v"), "UnmarshalJSON", "UnmarshalText"), + "(*encoding/xml.Decoder).Decode": checkNoopMarshalImpl(Arg("(*encoding/xml.Decoder).Decode.v"), "UnmarshalXML", "UnmarshalText"), + } ) +func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { + return func(call *Call) { + arg := call.Args[argN] + T := arg.Value.Value.Type() + Ts, ok := Dereference(T).Underlying().(*types.Struct) + if !ok { + return + } + if Ts.NumFields() == 0 { + return + } + if !HasExportedFieldsR(Ts) { + // OPT(dh): we could use a method set cache here + ms := types.NewMethodSet(T) + // TODO(dh): we're not checking the signature, which can cause false negatives. + // This isn't a huge problem, however, since vet complains about incorrect signatures. + for _, meth := range meths { + if ms.Lookup(nil, meth) != nil { + return + } + } + arg.Invalid("struct doesn't have any exported fields, nor custom marshaling") + } + } +} + type Checker struct { CheckGenerated bool funcDescs *functions.Descriptions @@ -293,6 +336,8 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode}, {ID: "SA9003", FilterGenerated: false, Fn: c.CheckEmptyBranch}, {ID: "SA9004", FilterGenerated: false, Fn: c.CheckMissingEnumTypesInDeclaration}, + // Filtering generated code because it may include empty structs generated from data models. + {ID: "SA9005", FilterGenerated: true, Fn: c.callChecker(checkNoopMarshal)}, } // "SA5006": c.CheckSliceOutOfBounds, diff --git a/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go b/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go new file mode 100644 index 000000000..2a7200005 --- /dev/null +++ b/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go @@ -0,0 +1,121 @@ +package pkg + +import ( + "encoding/json" + "encoding/xml" +) + +type T1 struct{} +type T2 struct{ x int } +type T3 struct{ X int } +type T4 struct{ T3 } +type t5 struct{ X int } +type T6 struct{ t5 } +type T7 struct{ x int } + +func (T7) MarshalJSON() ([]byte, error) { return nil, nil } +func (*T7) UnmarshalJSON([]byte) error { return nil } + +type T8 struct{ x int } + +func (T8) MarshalXML() ([]byte, error) { return nil, nil } +func (*T8) UnmarshalXML(*xml.Decoder, *xml.StartElement) error { return nil } + +type T9 struct{} + +func (T9) MarshalText() ([]byte, error) { return nil, nil } +func (*T9) UnmarshalText([]byte) error { return nil } + +type T10 struct{} +type T11 struct{ T10 } +type T12 struct{ T7 } +type t13 struct{} + +func (t13) MarshalJSON() ([]byte, error) { return nil, nil } + +type T14 struct{ t13 } +type T15 struct{ *t13 } +type T16 struct{ *T3 } +type T17 struct{ *T17 } +type T18 struct { + T17 + Actual int +} + +func fn() { + // don't flag structs with no fields + json.Marshal(T1{}) + // no exported fields + json.Marshal(T2{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + // pointer vs non-pointer makes no difference + json.Marshal(&T2{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + // exported field + json.Marshal(T3{}) + // exported field, pointer makes no difference + json.Marshal(&T3{}) + // embeds struct with exported fields + json.Marshal(T4{}) + // exported field + json.Marshal(t5{}) + // embeds unexported type, but said type does have exported fields + json.Marshal(T6{}) + // MarshalJSON + json.Marshal(T7{}) + // MarshalXML does not apply to JSON + json.Marshal(T8{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + // MarshalText + json.Marshal(T9{}) + // embeds exported struct, but it has no fields + json.Marshal(T11{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + // embeds type with MarshalJSON + json.Marshal(T12{}) + // embeds type with MarshalJSON and type isn't exported + json.Marshal(T14{}) + // embedded pointer with MarshalJSON + json.Marshal(T15{}) + // embedded pointer to struct with exported fields + json.Marshal(T16{}) + // don't recurse forever on recursive data structure + json.Marshal(T17{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T18{}) + + // MarshalJSON does not apply to JSON + xml.Marshal(T7{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + // MarshalXML + xml.Marshal(T8{}) + + var t2 T2 + var t3 T3 + var t7 T7 + var t8 T8 + var t9 T9 + // check that all other variations of methods also work + json.Unmarshal(nil, &t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Unmarshal(nil, &t3) + json.Unmarshal(nil, &t9) + xml.Unmarshal(nil, &t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + xml.Unmarshal(nil, &t3) + xml.Unmarshal(nil, &t9) + (*json.Decoder)(nil).Decode(&t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Decoder)(nil).Decode(&t3) + (*json.Decoder)(nil).Decode(&t9) + (*json.Encoder)(nil).Encode(t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Encoder)(nil).Encode(t3) + (*json.Encoder)(nil).Encode(t9) + (*xml.Decoder)(nil).Decode(&t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Decoder)(nil).Decode(&t3) + (*xml.Decoder)(nil).Decode(&t9) + (*xml.Encoder)(nil).Encode(t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Encoder)(nil).Encode(t3) + (*xml.Encoder)(nil).Encode(t9) + + (*json.Decoder)(nil).Decode(&t7) + (*json.Decoder)(nil).Decode(&t8) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Encoder)(nil).Encode(t7) + (*json.Encoder)(nil).Encode(t8) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Decoder)(nil).Decode(&t7) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Decoder)(nil).Decode(&t8) + (*xml.Encoder)(nil).Encode(t7) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Encoder)(nil).Encode(t8) + +} From 8e66885c52b06b23bcbc88fee5fec0ce5af205b6 Mon Sep 17 00:00:00 2001 From: "Iskander (Alex) Sharipov" Date: Fri, 25 Jan 2019 03:37:59 +0300 Subject: [PATCH 023/254] readme: fix typo in installation step overview Closes: gh-401 [via git-merge-pr] --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 90defe66c..aefbde692 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ checks or backwards incompatible changes that break your build. By using the master branch you agree to become a beta tester. To use the master branch, a simple `go get -u -honnef.co/go/tools/cmd....` suffices. You can also install a subset of +honnef.co/go/tools/cmd/...` suffices. You can also install a subset of the commands, for example only staticcheck with `go get -u honnef.co/go/tools/cmd/staticcheck`. From 926d74b3f82c93e2f06fcbd2bb91bf5810236904 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 27 Jan 2019 23:49:49 +0100 Subject: [PATCH 024/254] unused: don't store AST nodes in graph --- unused/unused.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index c1088c7ad..9c69277ee 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -594,15 +594,15 @@ func (c *Checker) processTypes(pkg *lint.Pkg) { func (c *Checker) processSelections(pkg *lint.Pkg) { fn := func(expr *ast.SelectorExpr, sel *types.Selection, offset int) { scope := pkg.Types.Scope().Innermost(expr.Pos()) - c.graph.markUsedBy(expr.X, c.topmostScope(scope, pkg.Types)) - c.graph.markUsedBy(sel.Obj(), expr.X) + c.graph.markUsedBy(sel, c.topmostScope(scope, pkg.Types)) + c.graph.markUsedBy(sel.Obj(), sel) if len(sel.Index()) > 1 { typ := sel.Recv() indices := sel.Index() for _, idx := range indices[:len(indices)-offset] { obj := getField(typ, idx) typ = obj.Type() - c.graph.markUsedBy(obj, expr.X) + c.graph.markUsedBy(obj, sel) } } } From 3bb9299f9c1c5730317be667fe3947697dd3d6b8 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 28 Jan 2019 03:39:17 +0100 Subject: [PATCH 025/254] unused: reduce number of calls to types.Implements types.Implements is a relatively expensive operation, producing a lot of garbage in lookupFieldOrMethod. When I tested 2019.1 on github.com/cockroachdb/cockroach/pkg/..., lookupFieldOrMethod allocated 23 GB of garbage. Updates gh-394 --- unused/unused.go | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 9c69277ee..c16f3fe3a 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -552,10 +552,22 @@ func (c *Checker) processTypes(pkg *lint.Pkg) { for i := 0; i < iface.NumEmbeddeds(); i++ { c.graph.markUsedBy(iface.Embedded(i), iface) } + namedLoop: for obj, objPtr := range named { - if !types.Implements(obj, iface) && !types.Implements(objPtr, iface) { - continue + switch obj.Underlying().(type) { + case *types.Interface: + // pointers to interfaces have no methods, only checking non-pointer + if !types.Implements(obj, iface) { + continue namedLoop + } + default: + // pointer receivers include the method set of non-pointer receivers, + // only checking pointer + if !types.Implements(objPtr, iface) { + continue namedLoop + } } + ifaceMethods := make(map[string]struct{}, iface.NumMethods()) n := iface.NumMethods() for i := 0; i < n; i++ { From 84ddbfd6bc0aab5d24aa5f3e8ea99e6966fbbe8f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 28 Jan 2019 05:07:35 +0100 Subject: [PATCH 026/254] unused: use more efficient types.Implements implementation The standard types.Implements implementation produces a lot of garbage in lookupFieldOrMethod. By using MethodSetCache and method sets, we can avoid a lot of this garbage. This directly translates to lower runtimes and lower peak memory usage. Compared with 442643a, for std and cockroachdb, we get the following numbers: Total allocations std: 6268 MB -> 4702 MB cdb: 28398 MB -> 19048 MB Peak rss std: 4,556,404 -> 3,253,004 cdb: 17,403,616 -> 12,546,948 Time std: 0:25.18 -> 0:21.84 cdb: 1:45.70 -> 1:30.74 Updates gh-394 --- unused/implements.go | 79 ++++++++++++++++++++++++++++++++++++++++++++ unused/unused.go | 5 +-- 2 files changed, 82 insertions(+), 2 deletions(-) create mode 100644 unused/implements.go diff --git a/unused/implements.go b/unused/implements.go new file mode 100644 index 000000000..78a545639 --- /dev/null +++ b/unused/implements.go @@ -0,0 +1,79 @@ +package unused + +import "go/types" + +// lookupMethod returns the index of and method with matching package and name, or (-1, nil). +func lookupMethod(T *types.Interface, pkg *types.Package, name string) (int, *types.Func) { + if name != "_" { + for i := 0; i < T.NumMethods(); i++ { + m := T.Method(i) + if sameId(m, pkg, name) { + return i, m + } + } + } + return -1, nil +} + +func sameId(obj types.Object, pkg *types.Package, name string) bool { + // spec: + // "Two identifiers are different if they are spelled differently, + // or if they appear in different packages and are not exported. + // Otherwise, they are the same." + if name != obj.Name() { + return false + } + // obj.Name == name + if obj.Exported() { + return true + } + // not exported, so packages must be the same (pkg == nil for + // fields in Universe scope; this can only happen for types + // introduced via Eval) + if pkg == nil || obj.Pkg() == nil { + return pkg == obj.Pkg() + } + // pkg != nil && obj.pkg != nil + return pkg.Path() == obj.Pkg().Path() +} + +func (c *Checker) implements(V types.Type, T *types.Interface) bool { + // fast path for common case + if T.Empty() { + return true + } + + if ityp, _ := V.Underlying().(*types.Interface); ityp != nil { + for i := 0; i < T.NumMethods(); i++ { + m := T.Method(i) + _, obj := lookupMethod(ityp, m.Pkg(), m.Name()) + switch { + case obj == nil: + return false + case !types.Identical(obj.Type(), m.Type()): + return false + } + } + return true + } + + // A concrete type implements T if it implements all methods of T. + ms := c.msCache.MethodSet(V) + for i := 0; i < T.NumMethods(); i++ { + m := T.Method(i) + sel := ms.Lookup(m.Pkg(), m.Name()) + if sel == nil { + return false + } + + f, _ := sel.Obj().(*types.Func) + if f == nil { + return false + } + + if !types.Identical(f.Type(), m.Type()) { + return false + } + } + return true +} diff --git a/unused/unused.go b/unused/unused.go index c16f3fe3a..b1dbd6f54 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -265,6 +265,7 @@ func (c *Checker) Check(prog *lint.Program) []Unused { unused = append(unused, Unused{Obj: obj, Position: pos}) } + return unused } @@ -557,13 +558,13 @@ func (c *Checker) processTypes(pkg *lint.Pkg) { switch obj.Underlying().(type) { case *types.Interface: // pointers to interfaces have no methods, only checking non-pointer - if !types.Implements(obj, iface) { + if !c.implements(obj, iface) { continue namedLoop } default: // pointer receivers include the method set of non-pointer receivers, // only checking pointer - if !types.Implements(objPtr, iface) { + if !c.implements(objPtr, iface) { continue namedLoop } } From 0068a30425e36a70a1d97c93f7c924c0bf0af65c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 27 Jan 2019 21:55:59 +0100 Subject: [PATCH 027/254] lint/lintdsl: implement FlattenFields, use in HasExportedFieldsR --- lint/lintdsl/lintdsl.go | 31 +++++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 31c160939..0c3b97412 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -342,29 +342,40 @@ func IsObject(obj types.Object, name string) bool { } func HasExportedFieldsR(T *types.Struct) bool { - return hasExportedFieldsR(T, nil) + fields := FlattenFields(T) + for _, field := range fields { + if ast.IsExported(field.Name()) { + return true + } + } + return false +} + +// FlattenFields recursively flattens T and embedded structs, +// returning a list of fields. If multiple fields with the same name +// exist, all will be returned. +func FlattenFields(T *types.Struct) []*types.Var { + return flattenFields(T, nil) } -func hasExportedFieldsR(T *types.Struct, seen map[types.Type]bool) bool { +func flattenFields(T *types.Struct, seen map[types.Type]bool) []*types.Var { if seen == nil { seen = map[types.Type]bool{} } if seen[T] { - return false + return nil } seen[T] = true + var out []*types.Var for i := 0; i < T.NumFields(); i++ { field := T.Field(i) if field.Anonymous() { - s, ok := Dereference(field.Type()).Underlying().(*types.Struct) - if ok && hasExportedFieldsR(s, seen) { - return true + if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok { + out = append(out, flattenFields(s, seen)...) } } else { - if ast.IsExported(field.Name()) { - return true - } + out = append(out, field) } } - return false + return out } From a958caf0eadc4d9f462a223099336653d6815b21 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 27 Jan 2019 21:58:45 +0100 Subject: [PATCH 028/254] lint/lintdsl: remove HasExportedFieldsR Inline only caller --- lint/lintdsl/lintdsl.go | 10 ---------- staticcheck/lint.go | 24 ++++++++++++++---------- 2 files changed, 14 insertions(+), 20 deletions(-) diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 0c3b97412..a70971908 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -341,16 +341,6 @@ func IsObject(obj types.Object, name string) bool { return path+obj.Name() == name } -func HasExportedFieldsR(T *types.Struct) bool { - fields := FlattenFields(T) - for _, field := range fields { - if ast.IsExported(field.Name()) { - return true - } - } - return false -} - // FlattenFields recursively flattens T and embedded structs, // returning a list of fields. If multiple fields with the same name // exist, all will be returned. diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 64821f4b5..843b91ac1 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -234,18 +234,22 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { if Ts.NumFields() == 0 { return } - if !HasExportedFieldsR(Ts) { - // OPT(dh): we could use a method set cache here - ms := types.NewMethodSet(T) - // TODO(dh): we're not checking the signature, which can cause false negatives. - // This isn't a huge problem, however, since vet complains about incorrect signatures. - for _, meth := range meths { - if ms.Lookup(nil, meth) != nil { - return - } + fields := FlattenFields(Ts) + for _, field := range fields { + if ast.IsExported(field.Name()) { + return + } + } + // OPT(dh): we could use a method set cache here + ms := types.NewMethodSet(T) + // TODO(dh): we're not checking the signature, which can cause false negatives. + // This isn't a huge problem, however, since vet complains about incorrect signatures. + for _, meth := range meths { + if ms.Lookup(nil, meth) != nil { + return } - arg.Invalid("struct doesn't have any exported fields, nor custom marshaling") } + arg.Invalid("struct doesn't have any exported fields, nor custom marshaling") } } From f51a370591f6dd37d9e4f39534044451d9f4bcfa Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 27 Jan 2019 22:56:40 +0100 Subject: [PATCH 029/254] staticcheck: flag marshaling of unmarshable types --- lint/lintdsl/lintdsl.go | 14 ++-- staticcheck/lint.go | 53 +++++++++++++- .../CheckUnsupportedMarshal.go | 70 +++++++++++++++++++ 3 files changed, 132 insertions(+), 5 deletions(-) create mode 100644 staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index a70971908..a600c4705 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -341,14 +341,19 @@ func IsObject(obj types.Object, name string) bool { return path+obj.Name() == name } +type Field struct { + Var *types.Var + Tag string +} + // FlattenFields recursively flattens T and embedded structs, // returning a list of fields. If multiple fields with the same name // exist, all will be returned. -func FlattenFields(T *types.Struct) []*types.Var { +func FlattenFields(T *types.Struct) []Field { return flattenFields(T, nil) } -func flattenFields(T *types.Struct, seen map[types.Type]bool) []*types.Var { +func flattenFields(T *types.Struct, seen map[types.Type]bool) []Field { if seen == nil { seen = map[types.Type]bool{} } @@ -356,15 +361,16 @@ func flattenFields(T *types.Struct, seen map[types.Type]bool) []*types.Var { return nil } seen[T] = true - var out []*types.Var + var out []Field for i := 0; i < T.NumFields(); i++ { field := T.Field(i) + tag := T.Tag(i) if field.Anonymous() { if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok { out = append(out, flattenFields(s, seen)...) } } else { - out = append(out, field) + out = append(out, Field{field, tag}) } } return out diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 843b91ac1..50c17e55a 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -9,6 +9,7 @@ import ( "go/types" htmltemplate "html/template" "net/http" + "reflect" "regexp" "regexp/syntax" "sort" @@ -221,6 +222,13 @@ var ( "(*encoding/json.Decoder).Decode": checkNoopMarshalImpl(Arg("(*encoding/json.Decoder).Decode.v"), "UnmarshalJSON", "UnmarshalText"), "(*encoding/xml.Decoder).Decode": checkNoopMarshalImpl(Arg("(*encoding/xml.Decoder).Decode.v"), "UnmarshalXML", "UnmarshalText"), } + + checkUnsupportedMarshal = map[string]CallCheck{ + "encoding/json.Marshal": checkUnsupportedMarshalImpl(Arg("json.Marshal.v"), "json", "MarshalJSON", "MarshalText"), + "encoding/xml.Marshal": checkUnsupportedMarshalImpl(Arg("xml.Marshal.v"), "xml", "MarshalXML", "MarshalText"), + "(*encoding/json.Encoder).Encode": checkUnsupportedMarshalImpl(Arg("(*encoding/json.Encoder).Encode.v"), "json", "MarshalJSON", "MarshalText"), + "(*encoding/xml.Encoder).Encode": checkUnsupportedMarshalImpl(Arg("(*encoding/xml.Encoder).Encode.v"), "xml", "MarshalXML", "MarshalText"), + } ) func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { @@ -236,7 +244,7 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { } fields := FlattenFields(Ts) for _, field := range fields { - if ast.IsExported(field.Name()) { + if field.Var.Exported() { return } } @@ -253,6 +261,48 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { } } +func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallCheck { + return func(call *Call) { + arg := call.Args[argN] + T := arg.Value.Value.Type() + Ts, ok := Dereference(T).Underlying().(*types.Struct) + if !ok { + return + } + // OPT(dh): we could use a method set cache here + ms := types.NewMethodSet(T) + // TODO(dh): we're not checking the signature, which can cause false negatives. + // This isn't a huge problem, however, since vet complains about incorrect signatures. + for _, meth := range meths { + if ms.Lookup(nil, meth) != nil { + return + } + } + fields := FlattenFields(Ts) + for _, field := range fields { + if !(field.Var.Exported()) { + continue + } + if reflect.StructTag(field.Tag).Get(tag) == "-" { + continue + } + // OPT(dh): we could use a method set cache here + ms := types.NewMethodSet(field.Var.Type()) + // TODO(dh): we're not checking the signature, which can cause false negatives. + // This isn't a huge problem, however, since vet complains about incorrect signatures. + for _, meth := range meths { + if ms.Lookup(nil, meth) != nil { + return + } + } + switch field.Var.Type().Underlying().(type) { + case *types.Chan, *types.Signature: + arg.Invalid("trying to marshal chan or func value") + } + } + } +} + type Checker struct { CheckGenerated bool funcDescs *functions.Descriptions @@ -292,6 +342,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified}, {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules)}, {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue}, + {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal)}, {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd}, {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection}, diff --git a/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go b/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go new file mode 100644 index 000000000..b28d17027 --- /dev/null +++ b/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go @@ -0,0 +1,70 @@ +package pkg + +import ( + "encoding/json" + "encoding/xml" +) + +type T1 struct { + A int + B func() `json:"-" xml:"-"` + c chan int +} + +type T2 struct { + T1 +} + +type T3 struct { + C chan int +} + +type T4 struct { + C C +} + +type T5 struct { + B func() `xml:"-"` +} + +type T6 struct { + B func() `json:"-"` +} + +type C chan int + +func (C) MarshalText() ([]byte, error) { return nil, nil } + +func fn() { + var t1 T1 + var t2 T2 + var t3 T3 + var t4 T4 + var t5 T5 + var t6 T6 + json.Marshal(t1) + json.Marshal(t2) + json.Marshal(t3) // MATCH "trying to marshal chan or func value" + json.Marshal(t4) + json.Marshal(t5) // MATCH "trying to marshal chan or func value" + json.Marshal(t6) + (*json.Encoder)(nil).Encode(t1) + (*json.Encoder)(nil).Encode(t2) + (*json.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value" + (*json.Encoder)(nil).Encode(t4) + (*json.Encoder)(nil).Encode(t5) // MATCH "trying to marshal chan or func value" + (*json.Encoder)(nil).Encode(t6) + + xml.Marshal(t1) + xml.Marshal(t2) + xml.Marshal(t3) // MATCH "trying to marshal chan or func value" + xml.Marshal(t4) + xml.Marshal(t5) + xml.Marshal(t6) // MATCH "trying to marshal chan or func value" + (*xml.Encoder)(nil).Encode(t1) + (*xml.Encoder)(nil).Encode(t2) + (*xml.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value" + (*xml.Encoder)(nil).Encode(t4) + (*xml.Encoder)(nil).Encode(t5) + (*xml.Encoder)(nil).Encode(t6) // MATCH "trying to marshal chan or func value" +} From 71123fcbb8fe554a51e76ae6bab8a6f40a12db78 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 27 Jan 2019 23:12:17 +0100 Subject: [PATCH 030/254] staticcheck: print paths to unsupported fields --- lint/lintdsl/lintdsl.go | 14 +++++---- staticcheck/lint.go | 13 +++++++- .../CheckUnsupportedMarshal.go | 30 ++++++++++++++----- 3 files changed, 42 insertions(+), 15 deletions(-) diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index a600c4705..2f614c9b7 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -342,18 +342,19 @@ func IsObject(obj types.Object, name string) bool { } type Field struct { - Var *types.Var - Tag string + Var *types.Var + Tag string + Path []int } // FlattenFields recursively flattens T and embedded structs, // returning a list of fields. If multiple fields with the same name // exist, all will be returned. func FlattenFields(T *types.Struct) []Field { - return flattenFields(T, nil) + return flattenFields(T, nil, nil) } -func flattenFields(T *types.Struct, seen map[types.Type]bool) []Field { +func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field { if seen == nil { seen = map[types.Type]bool{} } @@ -365,12 +366,13 @@ func flattenFields(T *types.Struct, seen map[types.Type]bool) []Field { for i := 0; i < T.NumFields(); i++ { field := T.Field(i) tag := T.Tag(i) + np := append(path[:len(path):len(path)], i) if field.Anonymous() { if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok { - out = append(out, flattenFields(s, seen)...) + out = append(out, flattenFields(s, np, seen)...) } } else { - out = append(out, Field{field, tag}) + out = append(out, Field{field, tag, np}) } } return out diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 50c17e55a..0c848880d 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -262,6 +262,7 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { } func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallCheck { + // TODO(dh): flag slices and maps of unsupported types return func(call *Call) { arg := call.Args[argN] T := arg.Value.Value.Type() @@ -297,12 +298,22 @@ func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallChec } switch field.Var.Type().Underlying().(type) { case *types.Chan, *types.Signature: - arg.Invalid("trying to marshal chan or func value") + arg.Invalid(fmt.Sprintf("trying to marshal chan or func value, field %s", fieldPath(T, field.Path))) } } } } +func fieldPath(start types.Type, indices []int) string { + p := start.String() + for _, idx := range indices { + field := Dereference(start).Underlying().(*types.Struct).Field(idx) + start = field.Type() + p += "." + field.Name() + } + return p +} + type Checker struct { CheckGenerated bool funcDescs *functions.Descriptions diff --git a/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go b/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go index b28d17027..e02b90fc8 100644 --- a/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go +++ b/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go @@ -31,6 +31,17 @@ type T6 struct { B func() `json:"-"` } +type T7 struct { + A int + B int + T3 +} + +type T8 struct { + C int + *T7 +} + type C chan int func (C) MarshalText() ([]byte, error) { return nil, nil } @@ -42,29 +53,32 @@ func fn() { var t4 T4 var t5 T5 var t6 T6 + var t8 T8 json.Marshal(t1) json.Marshal(t2) - json.Marshal(t3) // MATCH "trying to marshal chan or func value" + json.Marshal(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" json.Marshal(t4) - json.Marshal(t5) // MATCH "trying to marshal chan or func value" + json.Marshal(t5) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T5.B" json.Marshal(t6) (*json.Encoder)(nil).Encode(t1) (*json.Encoder)(nil).Encode(t2) - (*json.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value" + (*json.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" (*json.Encoder)(nil).Encode(t4) - (*json.Encoder)(nil).Encode(t5) // MATCH "trying to marshal chan or func value" + (*json.Encoder)(nil).Encode(t5) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T5.B" (*json.Encoder)(nil).Encode(t6) xml.Marshal(t1) xml.Marshal(t2) - xml.Marshal(t3) // MATCH "trying to marshal chan or func value" + xml.Marshal(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" xml.Marshal(t4) xml.Marshal(t5) - xml.Marshal(t6) // MATCH "trying to marshal chan or func value" + xml.Marshal(t6) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T6.B" (*xml.Encoder)(nil).Encode(t1) (*xml.Encoder)(nil).Encode(t2) - (*xml.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value" + (*xml.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" (*xml.Encoder)(nil).Encode(t4) (*xml.Encoder)(nil).Encode(t5) - (*xml.Encoder)(nil).Encode(t6) // MATCH "trying to marshal chan or func value" + (*xml.Encoder)(nil).Encode(t6) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T6.B" + + json.Marshal(t8) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T8.T7.T3.C" } From 466a0476246c45c74e011a03de704d72dc83da2d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 Feb 2019 05:12:34 +0100 Subject: [PATCH 031/254] staticcheck: flag imports of deprecated packages Closes gh-413 --- staticcheck/lint.go | 44 ++++++++++++++++--- .../src/CheckDeprecated/CheckDeprecated.go | 3 ++ .../CheckDeprecatedassist.go | 4 ++ 3 files changed, 46 insertions(+), 5 deletions(-) create mode 100644 staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go create mode 100644 staticcheck/testdata/src/CheckDeprecatedassist/CheckDeprecatedassist.go diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 0c848880d..69389844e 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -317,6 +317,7 @@ func fieldPath(start types.Type, indices []int) string { type Checker struct { CheckGenerated bool funcDescs *functions.Descriptions + deprecatedPkgs map[*types.Package]string deprecatedObjs map[types.Object]string } @@ -411,11 +412,9 @@ func (c *Checker) Checks() []lint.Check { } func (c *Checker) findDeprecated(prog *lint.Program) { - var docs []*ast.CommentGroup var names []*ast.Ident - doDocs := func(pkg *packages.Package, names []*ast.Ident, docs []*ast.CommentGroup) { - var alt string + extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { for _, doc := range docs { if doc == nil { continue @@ -425,10 +424,14 @@ func (c *Checker) findDeprecated(prog *lint.Program) { if !strings.HasPrefix(last, "Deprecated: ") { continue } - alt = last[len("Deprecated: "):] + alt := last[len("Deprecated: "):] alt = strings.Replace(alt, "\n", " ", -1) - break + return alt } + return "" + } + doDocs := func(pkg *packages.Package, names []*ast.Ident, docs []*ast.CommentGroup) { + alt := extractDeprecatedMessage(docs) if alt == "" { return } @@ -440,6 +443,21 @@ func (c *Checker) findDeprecated(prog *lint.Program) { } for _, pkg := range prog.AllPackages { + var docs []*ast.CommentGroup + for _, f := range pkg.Syntax { + docs = append(docs, f.Doc) + } + if alt := extractDeprecatedMessage(docs); alt != "" { + // Don't mark package syscall as deprecated, even though + // it is. A lot of people still use it for simple + // constants like SIGKILL, and I am not comfortable + // telling them to use x/sys for that. + if pkg.PkgPath != "syscall" { + c.deprecatedPkgs[pkg.Types] = alt + } + } + + docs = docs[:0] for _, f := range pkg.Syntax { fn := func(node ast.Node) bool { if node == nil { @@ -511,6 +529,7 @@ func (c *Checker) Init(prog *lint.Program) { }() go func() { + c.deprecatedPkgs = map[*types.Package]string{} c.deprecatedObjs = map[types.Object]string{} c.findDeprecated(prog) wg.Done() @@ -2555,6 +2574,21 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { } return true } + for _, pkg := range j.Program.InitialPackages { + for _, f := range pkg.Syntax { + ast.Inspect(f, func(node ast.Node) bool { + if node, ok := node.(*ast.ImportSpec); ok { + p := node.Path.Value + path := p[1 : len(p)-1] + imp := pkg.Imports[path] + if alt := c.deprecatedPkgs[imp.Types]; alt != "" { + j.Errorf(node, "Package %s is deprecated: %s", path, alt) + } + } + return true + }) + } + } for _, f := range j.Program.Files { ast.Inspect(f, fn) } diff --git a/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go new file mode 100644 index 000000000..2a1189328 --- /dev/null +++ b/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go @@ -0,0 +1,3 @@ +package pkg + +import _ "CheckDeprecatedassist" // MATCH "Alas, it is deprecated." diff --git a/staticcheck/testdata/src/CheckDeprecatedassist/CheckDeprecatedassist.go b/staticcheck/testdata/src/CheckDeprecatedassist/CheckDeprecatedassist.go new file mode 100644 index 000000000..3a7a3f023 --- /dev/null +++ b/staticcheck/testdata/src/CheckDeprecatedassist/CheckDeprecatedassist.go @@ -0,0 +1,4 @@ +// Package pkg is a nice package. +// +// Deprecated: Alas, it is deprecated. +package pkg From 3806f745e4224e69da0d65d9cc58a1531d231e39 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 Mar 2019 15:39:18 +0100 Subject: [PATCH 032/254] stylecheck: flag all zero-width and some control characters in string literals Closes gh-423 --- stylecheck/lint.go | 21 +++++++++++++++++++ .../CheckInvisibleCharacters.go | 15 +++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go diff --git a/stylecheck/lint.go b/stylecheck/lint.go index ee7efa450..69221987c 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -49,6 +49,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder}, {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical}, {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions}, + {ID: "ST1018", FilterGenerated: false, Fn: c.CheckInvisibleCharacters}, } } @@ -641,3 +642,23 @@ func (c *Checker) CheckYodaConditions(j *lint.Job) { ast.Inspect(f, fn) } } + +func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { + fn := func(node ast.Node) bool { + lit, ok := node.(*ast.BasicLit) + if !ok || lit.Kind != token.STRING { + return true + } + for _, r := range lit.Value { + if unicode.Is(unicode.Cf, r) { + j.Errorf(lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r) + } else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' { + j.Errorf(lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) + } + } + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go b/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go new file mode 100644 index 000000000..152505f69 --- /dev/null +++ b/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go @@ -0,0 +1,15 @@ +// Package pkg ... +package pkg + +var ( + a = "" // MATCH "Unicode control character U+0007" + b = "" + c = "Test test" + d = `T +est` + e = `Zero​Width` // MATCH "Unicode format character U+200B" + f = "\u200b" +) + +// MATCH:6 "Unicode control character U+0007" +// MATCH:6 "Unicode control character U+001A" From d3cee469ff019b583e8d772e3602784a18435757 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 Mar 2019 15:53:31 +0100 Subject: [PATCH 033/254] config: add SIP and RTP as default initialisms --- config/config.go | 2 +- config/example.conf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/config/config.go b/config/config.go index 112980b49..cfde5d51a 100644 --- a/config/config.go +++ b/config/config.go @@ -82,7 +82,7 @@ var defaultConfig = Config{ "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", - "XSS", + "XSS", "SIP", "RTP", }, DotImportWhitelist: []string{}, HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"}, diff --git a/config/example.conf b/config/example.conf index 5ffc597f9..a715a24d4 100644 --- a/config/example.conf +++ b/config/example.conf @@ -5,6 +5,6 @@ initialisms = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", - "XSS"] + "XSS", "SIP", "RTP"] dot_import_whitelist = [] http_status_code_whitelist = ["200", "400", "404", "500"] From a70856f86fcb8123ed746a1a0eebcd92cf565931 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 Mar 2019 19:18:59 +0100 Subject: [PATCH 034/254] staticcheck: flag append with single argument Closes gh-414 --- staticcheck/lint.go | 18 ++++++++++++++++++ .../CheckSingleArgAppend.go | 15 +++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 69389844e..40c3ff0f1 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -383,6 +383,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment}, {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints}, {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases}, + {ID: "SA4021", FilterGenerated: true, Fn: c.CheckSingleArgAppend}, {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps}, {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer}, @@ -3151,3 +3152,20 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { ast.Inspect(f, fn) } } + +func (c *Checker) CheckSingleArgAppend(j *lint.Job) { + fn := func(node ast.Node) bool { + if !IsCallToAST(j, node, "append") { + return true + } + call, _ := node.(*ast.CallExpr) + if len(call.Args) != 1 { + return true + } + j.Errorf(call, "x = append(y) is equivalent to x = y") + return true + } + for _, f := range j.Program.Files { + ast.Inspect(f, fn) + } +} diff --git a/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go b/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go new file mode 100644 index 000000000..27f9bd600 --- /dev/null +++ b/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go @@ -0,0 +1,15 @@ +package pkg + +//lint:file-ignore SA4010,SA4006 Not relevant to this test case + +func fn(arg []int) { + x := append(arg) // MATCH "x = append(y) is equivalent to x = y" + _ = x + y := append(arg, 1) + _ = y + arg = append(arg) // MATCH "x = append(y) is equivalent to x = y" + arg = append(arg, 1, 2, 3) + var nilly []int + arg = append(arg, nilly...) + arg = append(arg, arg...) +} From 090066f55abbfddf52ca80f5646088e39c569470 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 Mar 2019 19:44:17 +0100 Subject: [PATCH 035/254] simple: don't merge declaration and assignments if there are multiple assignments Closes gh-415 --- simple/lint.go | 26 +++++++++++++++++++ .../LintDeclareAssign/LintDeclareAssign.go | 11 ++++++++ 2 files changed, 37 insertions(+) diff --git a/simple/lint.go b/simple/lint.go index 1d96713f6..7f421b414 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -1508,6 +1508,28 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { } func (c *Checker) LintDeclareAssign(j *lint.Job) { + hasMultipleAssignments := func(root ast.Node, ident *ast.Ident) bool { + num := 0 + ast.Inspect(root, func(node ast.Node) bool { + if num >= 2 { + return false + } + assign, ok := node.(*ast.AssignStmt) + if !ok { + return true + } + for _, lhs := range assign.Lhs { + if oident, ok := lhs.(*ast.Ident); ok { + if oident.Obj == ident.Obj { + num++ + } + } + } + + return true + }) + return num >= 2 + } fn := func(node ast.Node) bool { block, ok := node.(*ast.BlockStmt) if !ok { @@ -1549,6 +1571,10 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { if refersTo(j, assign.Rhs[0], ident) { continue } + if hasMultipleAssignments(block, ident) { + continue + } + j.Errorf(decl, "should merge variable declaration with assignment on next line") } return true diff --git a/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go b/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go index 3f101c885..424a3668f 100644 --- a/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go +++ b/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go @@ -22,4 +22,15 @@ func fn() { var f func() f = func() { f() } _ = f + + var a int + a = 1 + a = 2 + _ = a + + var b int + b = 1 + // do stuff + b = 2 + _ = b } From 8a21b1042b47f4b7923d516660c687049931268d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 15 Mar 2019 19:51:38 +0100 Subject: [PATCH 036/254] stylecheck: allow error messages to start with type names Closes gh-408 --- stylecheck/lint.go | 22 +++++++++++-------- .../CheckErrorStrings/CheckErrorStrings.go | 1 + 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 69221987c..86031be3b 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -338,14 +338,18 @@ fnLoop: } func (c *Checker) CheckErrorStrings(j *lint.Job) { - fnNames := map[*ssa.Package]map[string]bool{} - for _, fn := range j.Program.InitialFunctions { - m := fnNames[fn.Package()] - if m == nil { - m = map[string]bool{} - fnNames[fn.Package()] = m + objNames := map[*ssa.Package]map[string]bool{} + for _, pkg := range j.Program.InitialPackages { + ssapkg := pkg.SSA + objNames[ssapkg] = map[string]bool{} + for _, m := range ssapkg.Members { + if typ, ok := m.(*ssa.Type); ok { + objNames[ssapkg][typ.Name()] = true + } } - m[fn.Name()] = true + } + for _, fn := range j.Program.InitialFunctions { + objNames[fn.Package()][fn.Name()] = true } for _, fn := range j.Program.InitialFunctions { @@ -400,8 +404,8 @@ func (c *Checker) CheckErrorStrings(j *lint.Job) { } word = strings.TrimRightFunc(word, func(r rune) bool { return unicode.IsPunct(r) }) - if fnNames[fn.Package()][word] { - // Word is probably the name of a function in this package + if objNames[fn.Package()][word] { + // Word is probably the name of a function or type in this package continue } // First word in error starts with a capital diff --git a/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go b/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go index de7f56f1f..0de45215b 100644 --- a/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go +++ b/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go @@ -10,6 +10,7 @@ func fn() { errors.New("URL is okay") errors.New("SomeFunc is okay") errors.New("URL is okay, but the period is not.") // MATCH "error strings should not end with punctuation or a newline" + errors.New("T must not be nil") } func Write() { From 1fb8c4841f161d6df5cfc6852bf14cd3bbcd016d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 16 Mar 2019 15:11:22 +0100 Subject: [PATCH 037/254] unused: anonymous structs can implement interfaces --- unused/testdata/src/anonymous/anonymous.go | 18 ++++++++++++++++++ unused/unused.go | 7 ++++--- 2 files changed, 22 insertions(+), 3 deletions(-) create mode 100644 unused/testdata/src/anonymous/anonymous.go diff --git a/unused/testdata/src/anonymous/anonymous.go b/unused/testdata/src/anonymous/anonymous.go new file mode 100644 index 000000000..d0577a737 --- /dev/null +++ b/unused/testdata/src/anonymous/anonymous.go @@ -0,0 +1,18 @@ +package pkg + +import "fmt" + +type Node interface { + position() int +} + +type noder struct{} + +func (noder) position() int { panic("unreachable") } + +func Fn() { + nodes := []Node{struct { + noder + }{}} + fmt.Println(nodes) +} diff --git a/unused/unused.go b/unused/unused.go index b1dbd6f54..e962c16bb 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -514,7 +514,7 @@ func (c *Checker) findExportedInterfaces() { } func (c *Checker) processTypes(pkg *lint.Pkg) { - named := map[*types.Named]*types.Pointer{} + implementers := map[types.Type]*types.Pointer{} var interfaces []*types.Interface for _, tv := range pkg.TypesInfo.Types { if typ, ok := tv.Type.(interface { @@ -525,7 +525,7 @@ func (c *Checker) processTypes(pkg *lint.Pkg) { switch obj := tv.Type.(type) { case *types.Named: - named[obj] = types.NewPointer(obj) + implementers[obj] = types.NewPointer(obj) c.graph.markUsedBy(obj, obj.Underlying()) c.graph.markUsedBy(obj.Underlying(), obj) case *types.Interface: @@ -533,6 +533,7 @@ func (c *Checker) processTypes(pkg *lint.Pkg) { interfaces = append(interfaces, obj) } case *types.Struct: + implementers[obj] = types.NewPointer(obj) c.useNoCopyFields(obj) if pkg.Types.Name() != "main" && !c.WholeProgram { c.useExportedFields(obj, obj) @@ -554,7 +555,7 @@ func (c *Checker) processTypes(pkg *lint.Pkg) { c.graph.markUsedBy(iface.Embedded(i), iface) } namedLoop: - for obj, objPtr := range named { + for obj, objPtr := range implementers { switch obj.Underlying().(type) { case *types.Interface: // pointers to interfaces have no methods, only checking non-pointer From b36f0ad77138818d7745f01fe1c52dc8727113df Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 17 Mar 2019 05:06:04 +0100 Subject: [PATCH 038/254] unused2: work towards a fresh implementation of unused --- unused2/implements.go | 79 ++ unused2/testdata/src/anonymous/anonymous.go | 18 + unused2/testdata/src/blank/blank.go | 26 + unused2/testdata/src/cgo/cgo.go | 6 + unused2/testdata/src/consts/consts.go | 18 + unused2/testdata/src/conversion/conversion.go | 92 ++ unused2/testdata/src/cyclic/cyclic.go | 9 + unused2/testdata/src/elem/elem.go | 16 + .../src/embedded_call/embedded_call.go | 21 + unused2/testdata/src/embedding/embedding.go | 64 ++ .../src/exported_fields/exported_fields.go | 36 + .../exported_fields_main.go | 14 + .../exported_method_test/exported_method.go | 1 + .../exported_method_test.go | 28 + unused2/testdata/src/fields/fields.go | 68 ++ unused2/testdata/src/functions/functions.go | 36 + unused2/testdata/src/generated1/generated1.go | 5 + unused2/testdata/src/generated2/generated2.go | 5 + unused2/testdata/src/interfaces/interfaces.go | 15 + unused2/testdata/src/main/main.go | 14 + unused2/testdata/src/mapslice/mapslice.go | 8 + unused2/testdata/src/methods/methods.go | 14 + unused2/testdata/src/nested/nested.go | 17 + .../testdata/src/nocopy-main/nocopy-main.go | 24 + unused2/testdata/src/nocopy/nocopy.go | 20 + unused2/testdata/src/only_in_test/pkg.go | 3 + unused2/testdata/src/only_in_test/pkg_test.go | 7 + .../pointer-type-embedding.go | 17 + unused2/testdata/src/selectors/selectors.go | 14 + .../src/switch_interface/switch_interface.go | 19 + .../src/unused-argument/unused-argument.go | 10 + .../testdata/src/unused_type/unused_type.go | 17 + unused2/unused.go | 889 ++++++++++++++++++ unused2/unused_test.go | 19 + 34 files changed, 1649 insertions(+) create mode 100644 unused2/implements.go create mode 100644 unused2/testdata/src/anonymous/anonymous.go create mode 100644 unused2/testdata/src/blank/blank.go create mode 100644 unused2/testdata/src/cgo/cgo.go create mode 100644 unused2/testdata/src/consts/consts.go create mode 100644 unused2/testdata/src/conversion/conversion.go create mode 100644 unused2/testdata/src/cyclic/cyclic.go create mode 100644 unused2/testdata/src/elem/elem.go create mode 100644 unused2/testdata/src/embedded_call/embedded_call.go create mode 100644 unused2/testdata/src/embedding/embedding.go create mode 100644 unused2/testdata/src/exported_fields/exported_fields.go create mode 100644 unused2/testdata/src/exported_fields_main/exported_fields_main.go create mode 100644 unused2/testdata/src/exported_method_test/exported_method.go create mode 100644 unused2/testdata/src/exported_method_test/exported_method_test.go create mode 100644 unused2/testdata/src/fields/fields.go create mode 100644 unused2/testdata/src/functions/functions.go create mode 100644 unused2/testdata/src/generated1/generated1.go create mode 100644 unused2/testdata/src/generated2/generated2.go create mode 100644 unused2/testdata/src/interfaces/interfaces.go create mode 100644 unused2/testdata/src/main/main.go create mode 100644 unused2/testdata/src/mapslice/mapslice.go create mode 100644 unused2/testdata/src/methods/methods.go create mode 100644 unused2/testdata/src/nested/nested.go create mode 100644 unused2/testdata/src/nocopy-main/nocopy-main.go create mode 100644 unused2/testdata/src/nocopy/nocopy.go create mode 100644 unused2/testdata/src/only_in_test/pkg.go create mode 100644 unused2/testdata/src/only_in_test/pkg_test.go create mode 100644 unused2/testdata/src/pointer-type-embedding/pointer-type-embedding.go create mode 100644 unused2/testdata/src/selectors/selectors.go create mode 100644 unused2/testdata/src/switch_interface/switch_interface.go create mode 100644 unused2/testdata/src/unused-argument/unused-argument.go create mode 100644 unused2/testdata/src/unused_type/unused_type.go create mode 100644 unused2/unused.go create mode 100644 unused2/unused_test.go diff --git a/unused2/implements.go b/unused2/implements.go new file mode 100644 index 000000000..7a5579d73 --- /dev/null +++ b/unused2/implements.go @@ -0,0 +1,79 @@ +package unused + +import "go/types" + +// lookupMethod returns the index of and method with matching package and name, or (-1, nil). +func lookupMethod(T *types.Interface, pkg *types.Package, name string) (int, *types.Func) { + if name != "_" { + for i := 0; i < T.NumMethods(); i++ { + m := T.Method(i) + if sameId(m, pkg, name) { + return i, m + } + } + } + return -1, nil +} + +func sameId(obj types.Object, pkg *types.Package, name string) bool { + // spec: + // "Two identifiers are different if they are spelled differently, + // or if they appear in different packages and are not exported. + // Otherwise, they are the same." + if name != obj.Name() { + return false + } + // obj.Name == name + if obj.Exported() { + return true + } + // not exported, so packages must be the same (pkg == nil for + // fields in Universe scope; this can only happen for types + // introduced via Eval) + if pkg == nil || obj.Pkg() == nil { + return pkg == obj.Pkg() + } + // pkg != nil && obj.pkg != nil + return pkg.Path() == obj.Pkg().Path() +} + +func (g *Graph) implements(V types.Type, T *types.Interface) bool { + // fast path for common case + if T.Empty() { + return true + } + + if ityp, _ := V.Underlying().(*types.Interface); ityp != nil { + for i := 0; i < T.NumMethods(); i++ { + m := T.Method(i) + _, obj := lookupMethod(ityp, m.Pkg(), m.Name()) + switch { + case obj == nil: + return false + case !types.Identical(obj.Type(), m.Type()): + return false + } + } + return true + } + + // A concrete type implements T if it implements all methods of T. + ms := g.msCache.MethodSet(V) + for i := 0; i < T.NumMethods(); i++ { + m := T.Method(i) + sel := ms.Lookup(m.Pkg(), m.Name()) + if sel == nil { + return false + } + + f, _ := sel.Obj().(*types.Func) + if f == nil { + return false + } + + if !types.Identical(f.Type(), m.Type()) { + return false + } + } + return true +} diff --git a/unused2/testdata/src/anonymous/anonymous.go b/unused2/testdata/src/anonymous/anonymous.go new file mode 100644 index 000000000..d0577a737 --- /dev/null +++ b/unused2/testdata/src/anonymous/anonymous.go @@ -0,0 +1,18 @@ +package pkg + +import "fmt" + +type Node interface { + position() int +} + +type noder struct{} + +func (noder) position() int { panic("unreachable") } + +func Fn() { + nodes := []Node{struct { + noder + }{}} + fmt.Println(nodes) +} diff --git a/unused2/testdata/src/blank/blank.go b/unused2/testdata/src/blank/blank.go new file mode 100644 index 000000000..d220bdff3 --- /dev/null +++ b/unused2/testdata/src/blank/blank.go @@ -0,0 +1,26 @@ +package pkg + +import _ "fmt" + +type t1 struct{} // MATCH /t1 is unused/ +type t2 struct{} +type t3 struct{} + +var _ = t2{} + +func fn1() { // MATCH /fn1 is unused/ + _ = t1{} + var _ = t1{} +} + +func fn2() { + _ = t3{} +} + +func init() { + fn2() +} + +func _() {} + +type _ struct{} diff --git a/unused2/testdata/src/cgo/cgo.go b/unused2/testdata/src/cgo/cgo.go new file mode 100644 index 000000000..6b484f820 --- /dev/null +++ b/unused2/testdata/src/cgo/cgo.go @@ -0,0 +1,6 @@ +package pkg + +//go:cgo_export_dynamic +func foo() {} + +func bar() {} // MATCH /bar is unused/ diff --git a/unused2/testdata/src/consts/consts.go b/unused2/testdata/src/consts/consts.go new file mode 100644 index 000000000..6906f1e38 --- /dev/null +++ b/unused2/testdata/src/consts/consts.go @@ -0,0 +1,18 @@ +package pkg + +const c1 = 1 + +const c2 = 1 +const c3 = 1 +const c4 = 1 + +var _ = []int{c3: 1} + +type T1 struct { + F1 [c1]int +} + +func init() { + _ = []int{c2: 1} + var _ [c4]int +} diff --git a/unused2/testdata/src/conversion/conversion.go b/unused2/testdata/src/conversion/conversion.go new file mode 100644 index 000000000..afeb1f7a1 --- /dev/null +++ b/unused2/testdata/src/conversion/conversion.go @@ -0,0 +1,92 @@ +package pkg + +import ( + "compress/flate" + "unsafe" +) + +type t1 struct { + a int + b int +} + +type t2 struct { + a int + b int +} + +type t3 struct { + a int + b int // MATCH /b is unused/ +} + +type t4 struct { + a int + b int // MATCH /b is unused/ +} + +type t5 struct { + a int + b int +} + +type t6 struct { + a int + b int +} + +type t7 struct { + a int + b int +} + +type t8 struct { + a int + b int +} + +type t9 struct { + Offset int64 + Err error +} + +type t10 struct { + a int + b int +} + +func fn() { + // All fields in t2 used because they're initialised in t1 + v1 := t1{0, 1} + v2 := t2(v1) + _ = v2 + + // Field b isn't used by anyone + v3 := t3{} + v4 := t4(v3) + println(v3.a) + _ = v4 + + // Both fields are used + v5 := t5{} + v6 := t6(v5) + println(v5.a) + println(v6.b) + + v7 := &t7{} + println(v7.a) + println(v7.b) + v8 := (*t8)(v7) + _ = v8 + + vb := flate.ReadError{} + v9 := t9(vb) + _ = v9 + + // All fields are used because this is an unsafe conversion + var b []byte + v10 := (*t10)(unsafe.Pointer(&b[0])) + _ = v10 +} + +func init() { fn() } diff --git a/unused2/testdata/src/cyclic/cyclic.go b/unused2/testdata/src/cyclic/cyclic.go new file mode 100644 index 000000000..8601c24f2 --- /dev/null +++ b/unused2/testdata/src/cyclic/cyclic.go @@ -0,0 +1,9 @@ +package pkg + +func a() { // MATCH /a is unused/ + b() +} + +func b() { // MATCH /b is unused/ + a() +} diff --git a/unused2/testdata/src/elem/elem.go b/unused2/testdata/src/elem/elem.go new file mode 100644 index 000000000..24cbf03cc --- /dev/null +++ b/unused2/testdata/src/elem/elem.go @@ -0,0 +1,16 @@ +// Test of field usage detection + +package pkg + +type t15 struct{ f151 int } +type a2 [1]t15 + +type t16 struct{} +type a3 [1][1]t16 + +func foo() { + _ = a2{0: {1}} + _ = a3{{{}}} +} + +func init() { foo() } diff --git a/unused2/testdata/src/embedded_call/embedded_call.go b/unused2/testdata/src/embedded_call/embedded_call.go new file mode 100644 index 000000000..196ac0dec --- /dev/null +++ b/unused2/testdata/src/embedded_call/embedded_call.go @@ -0,0 +1,21 @@ +package pkg + +var t1 struct { + t2 + t3 + t4 +} + +type t2 struct{} +type t3 struct{} +type t4 struct{ t5 } +type t5 struct{} + +func (t2) foo() {} +func (t3) bar() {} +func (t5) baz() {} +func init() { + t1.foo() + _ = t1.bar + t1.baz() +} diff --git a/unused2/testdata/src/embedding/embedding.go b/unused2/testdata/src/embedding/embedding.go new file mode 100644 index 000000000..31bb43520 --- /dev/null +++ b/unused2/testdata/src/embedding/embedding.go @@ -0,0 +1,64 @@ +package pkg + +type I interface { + f1() + f2() +} + +func init() { + var _ I +} + +type t1 struct{} +type T2 struct{ t1 } + +func (t1) f1() {} +func (T2) f2() {} + +func Fn() { + var v T2 + _ = v.t1 +} + +type I2 interface { + f3() + f4() +} + +type t3 struct{} +type t4 struct { + x int // MATCH /x is unused/ + y int // MATCH /y is unused/ + t3 +} + +func (*t3) f3() {} +func (*t4) f4() {} + +func init() { + var i I2 = &t4{} + i.f3() + i.f4() +} + +type i3 interface { + F() +} + +type I4 interface { + i3 +} + +type T5 struct { + t6 +} + +type t6 struct { + F int +} + +type t7 struct{ X int } +type t8 struct{ t7 } +type t9 struct{ t8 } + +var _ = t9{} diff --git a/unused2/testdata/src/exported_fields/exported_fields.go b/unused2/testdata/src/exported_fields/exported_fields.go new file mode 100644 index 000000000..64686ccab --- /dev/null +++ b/unused2/testdata/src/exported_fields/exported_fields.go @@ -0,0 +1,36 @@ +package pkg + +type t1 struct { + F1 int +} + +type T2 struct { + F2 int +} + +var v struct { + T3 +} + +type T3 struct{} + +func (T3) Foo() {} + +func init() { + v.Foo() +} + +func init() { + _ = t1{} +} + +type codeResponse struct { + Tree *codeNode `json:"tree"` +} + +type codeNode struct { +} + +func init() { + var _ codeResponse +} diff --git a/unused2/testdata/src/exported_fields_main/exported_fields_main.go b/unused2/testdata/src/exported_fields_main/exported_fields_main.go new file mode 100644 index 000000000..ffb99d990 --- /dev/null +++ b/unused2/testdata/src/exported_fields_main/exported_fields_main.go @@ -0,0 +1,14 @@ +package main + +type t1 struct { + F1 int +} + +type T2 struct { + F2 int +} + +func init() { + _ = t1{} + _ = T2{} +} diff --git a/unused2/testdata/src/exported_method_test/exported_method.go b/unused2/testdata/src/exported_method_test/exported_method.go new file mode 100644 index 000000000..c1caffeb1 --- /dev/null +++ b/unused2/testdata/src/exported_method_test/exported_method.go @@ -0,0 +1 @@ +package pkg diff --git a/unused2/testdata/src/exported_method_test/exported_method_test.go b/unused2/testdata/src/exported_method_test/exported_method_test.go new file mode 100644 index 000000000..d59c2f52a --- /dev/null +++ b/unused2/testdata/src/exported_method_test/exported_method_test.go @@ -0,0 +1,28 @@ +package pkg + +import ( + "bytes" + "io" + "io/ioutil" + "testing" +) + +type countReadSeeker struct { + io.ReadSeeker + N int64 +} + +func (rs *countReadSeeker) Read(buf []byte) (int, error) { + n, err := rs.ReadSeeker.Read(buf) + rs.N += int64(n) + return n, err +} + +func TestFoo(t *testing.T) { + r := bytes.NewReader([]byte("Hello, world!")) + cr := &countReadSeeker{ReadSeeker: r} + ioutil.ReadAll(cr) + if cr.N != 13 { + t.Errorf("got %d, want 13", cr.N) + } +} diff --git a/unused2/testdata/src/fields/fields.go b/unused2/testdata/src/fields/fields.go new file mode 100644 index 000000000..feb5ea022 --- /dev/null +++ b/unused2/testdata/src/fields/fields.go @@ -0,0 +1,68 @@ +// Test of field usage detection + +package pkg + +type t1 struct{ f11, f12 int } +type t2 struct{ f21, f22 int } +type t3 struct{ f31 t4 } +type t4 struct{ f41 int } +type t5 struct{ f51 int } +type t6 struct{ f61 int } +type t7 struct{ f71 int } +type m1 map[string]t7 +type t8 struct{ f81 int } +type t9 struct{ f91 int } +type t10 struct{ f101 int } +type t11 struct{ f111 int } +type s1 []t11 +type t12 struct{ f121 int } +type s2 []t12 +type t13 struct{ f131 int } +type t14 struct{ f141 int } +type a1 [1]t14 +type t15 struct{ f151 int } +type a2 [1]t15 +type t16 struct{ f161 int } +type t17 struct{ f171, f172 int } // MATCH /t17 is unused/ +// MATCH:28 /f183 is unused/ +type t18 struct{ f181, f182, f183 int } // MATCH /f182 is unused/ + +type t19 struct{ f191 int } +type m2 map[string]t19 + +type t20 struct{ f201 int } +type m3 map[string]t20 + +type t21 struct{ f211, f212 int } // MATCH /f211 is unused/ + +func foo() { + _ = t10{1} + _ = t21{f212: 1} + _ = []t1{{1, 2}} + _ = t2{1, 2} + _ = []struct{ a int }{{1}} + + // XXX + // _ = []struct{ foo struct{ bar int } }{{struct{ bar int }{1}}} + + _ = []t1{t1{1, 2}} + _ = []t3{{t4{1}}} + _ = map[string]t5{"a": {1}} + _ = map[t6]string{{1}: "a"} + _ = m1{"a": {1}} + _ = map[t8]t8{{}: {1}} + _ = map[t9]t9{{1}: {}} + _ = s1{{1}} + _ = s2{2: {1}} + _ = [...]t13{{1}} + _ = a1{{1}} + _ = a2{0: {1}} + _ = map[[1]t16]int{{{1}}: 1} + y := struct{ x int }{} // MATCH /x is unused/ + _ = y + _ = t18{f181: 1} + _ = []m2{{"a": {1}}} + _ = [][]m3{{{"a": {1}}}} +} + +func init() { foo() } diff --git a/unused2/testdata/src/functions/functions.go b/unused2/testdata/src/functions/functions.go new file mode 100644 index 000000000..80b5b5d0b --- /dev/null +++ b/unused2/testdata/src/functions/functions.go @@ -0,0 +1,36 @@ +package main + +type state func() state + +func a() state { + return a +} + +func main() { + st := a + _ = st() +} + +type t1 struct{} // MATCH /t1 is unused/ +type t2 struct{} +type t3 struct{} + +func fn1() t1 { return t1{} } // MATCH /fn1 is unused/ +func fn2() (x t2) { return } +func fn3() *t3 { return nil } + +func fn4() { + const x = 1 + const y = 2 // MATCH /y is unused/ + type foo int // MATCH /foo is unused/ + type bar int + + _ = x + var _ bar +} + +func init() { + fn2() + fn3() + fn4() +} diff --git a/unused2/testdata/src/generated1/generated1.go b/unused2/testdata/src/generated1/generated1.go new file mode 100644 index 000000000..1a8ca55f6 --- /dev/null +++ b/unused2/testdata/src/generated1/generated1.go @@ -0,0 +1,5 @@ +// Code generated by a clever monkey; DO NOT EDIT. + +package pkg + +type t struct{} diff --git a/unused2/testdata/src/generated2/generated2.go b/unused2/testdata/src/generated2/generated2.go new file mode 100644 index 000000000..17d736ee1 --- /dev/null +++ b/unused2/testdata/src/generated2/generated2.go @@ -0,0 +1,5 @@ +// Code generated by a bunch of monkeys with typewriters and RSI, DO NOT EDIT. + +package pkg + +type t struct{} diff --git a/unused2/testdata/src/interfaces/interfaces.go b/unused2/testdata/src/interfaces/interfaces.go new file mode 100644 index 000000000..e810549a7 --- /dev/null +++ b/unused2/testdata/src/interfaces/interfaces.go @@ -0,0 +1,15 @@ +package pkg + +type I interface { + fn1() +} + +type t struct{} + +func (t) fn1() {} +func (t) fn2() {} // MATCH /fn2 is unused/ + +func init() { + var _ I + var _ t +} diff --git a/unused2/testdata/src/main/main.go b/unused2/testdata/src/main/main.go new file mode 100644 index 000000000..ab000fc79 --- /dev/null +++ b/unused2/testdata/src/main/main.go @@ -0,0 +1,14 @@ +package main + +func Fn1() {} +func Fn2() {} // MATCH /Fn2 is unused/ + +const X = 1 // MATCH /X is unused/ + +var Y = 2 // MATCH /Y is unused/ + +type Z struct{} // MATCH /Z is unused/ + +func main() { + Fn1() +} diff --git a/unused2/testdata/src/mapslice/mapslice.go b/unused2/testdata/src/mapslice/mapslice.go new file mode 100644 index 000000000..2769b2c21 --- /dev/null +++ b/unused2/testdata/src/mapslice/mapslice.go @@ -0,0 +1,8 @@ +package pkg + +type M map[int]int + +func Fn() { + var n M + _ = []M{n} +} diff --git a/unused2/testdata/src/methods/methods.go b/unused2/testdata/src/methods/methods.go new file mode 100644 index 000000000..17673addd --- /dev/null +++ b/unused2/testdata/src/methods/methods.go @@ -0,0 +1,14 @@ +package pkg + +type t1 struct{} +type t2 struct{ t3 } +type t3 struct{} + +func (t1) Foo() {} +func (t3) Foo() {} +func (t3) foo() {} // MATCH /foo is unused/ + +func init() { + _ = t1{} + _ = t2{} +} diff --git a/unused2/testdata/src/nested/nested.go b/unused2/testdata/src/nested/nested.go new file mode 100644 index 000000000..ade2c0dbb --- /dev/null +++ b/unused2/testdata/src/nested/nested.go @@ -0,0 +1,17 @@ +package pkg + +type t struct{} // MATCH /t is unused/ + +func (t) fragment() {} + +func fn() bool { // MATCH /fn is unused/ + var v interface{} = t{} + switch obj := v.(type) { + // XXX it shouldn't report fragment(), because fn is unused + case interface { + fragment() // MATCH /fragment is unused/ + }: + obj.fragment() + } + return false +} diff --git a/unused2/testdata/src/nocopy-main/nocopy-main.go b/unused2/testdata/src/nocopy-main/nocopy-main.go new file mode 100644 index 000000000..4fefb5071 --- /dev/null +++ b/unused2/testdata/src/nocopy-main/nocopy-main.go @@ -0,0 +1,24 @@ +package main + +type myNoCopy1 struct{} +type myNoCopy2 struct{} +type locker struct{} // MATCH "locker is unused" +type someStruct struct{ x int } // MATCH "someStruct is unused" + +func (myNoCopy1) Lock() {} +func (recv myNoCopy2) Lock() {} +func (locker) Lock() {} +func (locker) Unlock() {} +func (someStruct) Lock() {} + +type T struct { + noCopy1 myNoCopy1 + noCopy2 myNoCopy2 + field1 someStruct // MATCH "field1 is unused" + field2 locker // MATCH "field2 is unused" + field3 int // MATCH "field3 is unused" +} + +func main() { + _ = T{} +} diff --git a/unused2/testdata/src/nocopy/nocopy.go b/unused2/testdata/src/nocopy/nocopy.go new file mode 100644 index 000000000..156edf50c --- /dev/null +++ b/unused2/testdata/src/nocopy/nocopy.go @@ -0,0 +1,20 @@ +package bar + +type myNoCopy1 struct{} +type myNoCopy2 struct{} +type locker struct{} // MATCH "locker is unused" +type someStruct struct{ x int } // MATCH "someStruct is unused" + +func (myNoCopy1) Lock() {} +func (recv myNoCopy2) Lock() {} +func (locker) Lock() {} +func (locker) Unlock() {} +func (someStruct) Lock() {} + +type T struct { + noCopy1 myNoCopy1 + noCopy2 myNoCopy2 + field1 someStruct // MATCH "field1 is unused" + field2 locker // MATCH "field2 is unused" + field3 int // MATCH "field3 is unused" +} diff --git a/unused2/testdata/src/only_in_test/pkg.go b/unused2/testdata/src/only_in_test/pkg.go new file mode 100644 index 000000000..ca2d5b3cd --- /dev/null +++ b/unused2/testdata/src/only_in_test/pkg.go @@ -0,0 +1,3 @@ +package pkg + +func fn() {} diff --git a/unused2/testdata/src/only_in_test/pkg_test.go b/unused2/testdata/src/only_in_test/pkg_test.go new file mode 100644 index 000000000..99fdfd753 --- /dev/null +++ b/unused2/testdata/src/only_in_test/pkg_test.go @@ -0,0 +1,7 @@ +package pkg + +import "testing" + +func TestPkg(t *testing.T) { + fn() +} diff --git a/unused2/testdata/src/pointer-type-embedding/pointer-type-embedding.go b/unused2/testdata/src/pointer-type-embedding/pointer-type-embedding.go new file mode 100644 index 000000000..648b7d6f0 --- /dev/null +++ b/unused2/testdata/src/pointer-type-embedding/pointer-type-embedding.go @@ -0,0 +1,17 @@ +package pkg + +func init() { + var p P + _ = p.n +} + +type T0 struct { + m int // MATCH /m is unused/ + n int +} + +type T1 struct { + T0 +} + +type P *T1 diff --git a/unused2/testdata/src/selectors/selectors.go b/unused2/testdata/src/selectors/selectors.go new file mode 100644 index 000000000..9ab337888 --- /dev/null +++ b/unused2/testdata/src/selectors/selectors.go @@ -0,0 +1,14 @@ +package pkg + +type t struct { + f int +} + +func fn(v *t) { + println(v.f) +} + +func init() { + var v t + fn(&v) +} diff --git a/unused2/testdata/src/switch_interface/switch_interface.go b/unused2/testdata/src/switch_interface/switch_interface.go new file mode 100644 index 000000000..99c2ce858 --- /dev/null +++ b/unused2/testdata/src/switch_interface/switch_interface.go @@ -0,0 +1,19 @@ +package pkg + +type t struct{} + +func (t) fragment() {} + +func fn() bool { + var v interface{} = t{} + switch obj := v.(type) { + case interface { + fragment() + }: + obj.fragment() + } + return false +} + +var x = fn() +var _ = x diff --git a/unused2/testdata/src/unused-argument/unused-argument.go b/unused2/testdata/src/unused-argument/unused-argument.go new file mode 100644 index 000000000..423592692 --- /dev/null +++ b/unused2/testdata/src/unused-argument/unused-argument.go @@ -0,0 +1,10 @@ +package main + +type t1 struct{} +type t2 struct{} + +func (t1) foo(arg *t2) {} + +func init() { + t1{}.foo(nil) +} diff --git a/unused2/testdata/src/unused_type/unused_type.go b/unused2/testdata/src/unused_type/unused_type.go new file mode 100644 index 000000000..eabfce4b5 --- /dev/null +++ b/unused2/testdata/src/unused_type/unused_type.go @@ -0,0 +1,17 @@ +package pkg + +type t1 struct{} // MATCH /t1 is unused/ + +func (t1) Fn() {} + +type t2 struct{} + +func (*t2) Fn() {} + +func init() { + (*t2).Fn(nil) +} + +type t3 struct{} // MATCH /t3 is unused/ + +func (t3) fn() diff --git a/unused2/unused.go b/unused2/unused.go new file mode 100644 index 000000000..f831d058d --- /dev/null +++ b/unused2/unused.go @@ -0,0 +1,889 @@ +package unused + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/types/typeutil" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/lint/lintdsl" + "honnef.co/go/tools/ssa" +) + +// OPT(dh): optimize graph by not storing irrelevant nodes. storing +// basic types, empty signatures etc doesn't add any information to +// the graph. +// +// OPT(dh): deduplicate type nodes. [1]T and [1]T are the same type and can be merged +// +// OPT(dh): deduplicate edges +// +// OPT(dh): don't track function calls into external packages + +// TODO(dh): conversions between structs mark fields as used, but the +// conversion itself isn't part of that subgraph. even if the function +// containing the conversion is unused, the fields will be marked as +// used. + +const debug = false + +/* + +TODO known reflect +TODO error interface + +- packages use: + - exported named types + - exported functions + - exported variables + - exported constants + - init functions + - TODO functions exported to cgo + +- named types use: + - exported methods + +- variables and constants use: + - their types + +- functions use: + - all their arguments, return parameters and receivers + - anonymous functions defined beneath them + - functions they return. we assume that someone else will call the returned function + - functions/interface methods they call + - types they instantiate or convert to + - fields they read or write + - fields whose addresses they return + - types of all instructions + +- conversions use: + - when converting between two equivalent structs, the fields in + either struct use each other. the fields are relevant for the + conversion, but only if the fields are also accessed outside the + conversion. + - when converting to or from unsafe.Pointer, mark all fields as used. + +- structs use: + - fields of type NoCopy sentinel + - exported fields + - embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) + - embedded fields that have exported methods (recursively) + - embedded structs that have exported fields (recursively) + +- field accesses use fields + +- interfaces use: + - all their methods. we really have no idea what is going on with interfaces. + - matching methods on types that implement this interface. + we assume that types are meant to implement as many interfaces as possible. + takes into consideration embedding into possibly unnamed types. + +- interface calls use: + - the called interface method + +- thunks and other generated wrappers call the real function + +- things named _ are used +*/ + +func assert(b bool) { + if !b { + panic("failed assertion") + } +} + +func NewLintChecker(c *Checker) *LintChecker { + l := &LintChecker{ + c: c, + } + return l +} + +type LintChecker struct { + c *Checker +} + +func (*LintChecker) Name() string { return "unused" } +func (*LintChecker) Prefix() string { return "U" } + +func (l *LintChecker) Init(*lint.Program) {} +func (l *LintChecker) Checks() []lint.Check { + return []lint.Check{ + {ID: "U1000", FilterGenerated: true, Fn: l.Lint}, + } +} + +func typString(obj types.Object) string { + switch obj := obj.(type) { + case *types.Func: + return "func" + case *types.Var: + if obj.IsField() { + return "field" + } + return "var" + case *types.Const: + return "const" + case *types.TypeName: + return "type" + default: + return "identifier" + } +} + +func (l *LintChecker) Lint(j *lint.Job) { + unused := l.c.Check(j.Program, j) + for _, u := range unused { + name := u.Obj.Name() + if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil { + switch sig.Recv().Type().(type) { + case *types.Named, *types.Pointer: + typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) + if len(typ) > 0 && typ[0] == '*' { + name = fmt.Sprintf("(%s).%s", typ, u.Obj.Name()) + } else if len(typ) > 0 { + name = fmt.Sprintf("%s.%s", typ, u.Obj.Name()) + } + } + } + j.Errorf(u.Obj, "%s %s is unused", typString(u.Obj), name) + } +} + +type Unused struct { + Obj types.Object + Position token.Position +} + +func NewChecker() *Checker { + return &Checker{} +} + +type Checker struct{} + +func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { + var out []Unused + for _, pkg := range prog.InitialPackages { + graph := NewGraph(pkg.SSA) + graph.job = j + graph.entry(pkg.TypesInfo) + + graph.color(graph.Root) + for _, node := range graph.Nodes { + if node.seen { + continue + } + switch obj := node.obj.(type) { + case *types.Var: + if !obj.IsField() { + // Only flag fields. Variables are either + // arguments or local variables, neither of which + // should ever be reported. + node.quiet = true + } + case *types.Named: + for i := 0; i < obj.NumMethods(); i++ { + m := pkg.SSA.Prog.FuncValue(obj.Method(i)) + if node, ok := graph.Nodes[m]; ok { + node.quiet = true + } + } + case *types.Struct: + for i := 0; i < obj.NumFields(); i++ { + if node, ok := graph.Nodes[obj.Field(i)]; ok { + node.quiet = true + } + } + } + } + for _, node := range graph.Nodes { + if !node.seen { + if node.quiet { + if debug { + fmt.Printf("n%d [color=purple];\n", node.id) + } + continue + } + if debug { + fmt.Printf("n%d [color=red];\n", node.id) + } + switch obj := node.obj.(type) { + case types.Object: + if obj.Pkg() == pkg.Package.Types { + pos := prog.Fset().Position(obj.Pos()) + out = append(out, Unused{ + Obj: obj, + Position: pos, + }) + } + case *ssa.Function: + if obj == nil { + // TODO(dh): how does this happen? + continue + } + + // OPT(dh): objects in other packages should never make it into the graph + if obj.Object() != nil && obj.Object().Pkg() == pkg.Types { + pos := prog.Fset().Position(obj.Pos()) + out = append(out, Unused{ + Obj: obj.Object(), + Position: pos, + }) + } + default: + if debug { + fmt.Printf("n%d [color=gray];\n", node.id) + } + } + } + } + } + return out +} + +type Graph struct { + job *lint.Job + pkg *ssa.Package + msCache typeutil.MethodSetCache + + nodeCounter int + + Root *Node + Nodes map[interface{}]*Node + + seenTypes map[types.Type]struct{} + seenFns map[*ssa.Function]struct{} +} + +func NewGraph(pkg *ssa.Package) *Graph { + g := &Graph{ + pkg: pkg, + Nodes: map[interface{}]*Node{}, + seenTypes: map[types.Type]struct{}{}, + seenFns: map[*ssa.Function]struct{}{}, + } + g.Root = g.newNode(nil) + if debug { + fmt.Printf("n%d [label=\"Root\"];\n", g.Root.id) + } + + return g +} + +func (g *Graph) color(root *Node) { + if root.seen { + return + } + root.seen = true + for other := range root.used { + g.color(other) + } +} + +type Node struct { + obj interface{} + id int + used map[*Node]struct{} + + seen bool + quiet bool +} + +func (g *Graph) newNode(obj interface{}) *Node { + g.nodeCounter++ + return &Node{ + obj: obj, + id: g.nodeCounter, + used: map[*Node]struct{}{}, + } +} + +func (n *Node) use(node *Node) { + assert(node != nil) + n.used[node] = struct{}{} +} + +func (g *Graph) see(obj interface{}) { + assert(obj != nil) + if obj, ok := obj.(types.Object); ok { + if obj.Pkg() != g.pkg.Pkg { + return + } + } + if g.Nodes[obj] == nil { + g.Nodes[obj] = g.newNode(obj) + + if debug { + fmt.Printf("n%d [label=%q];\n", g.Nodes[obj].id, obj) + } + } +} + +func (g *Graph) use(used, by interface{}, reason string) { + assert(used != nil) + if _, ok := used.(*types.Func); ok { + assert(g.pkg.Prog.FuncValue(used.(*types.Func)) == nil) + } + if _, ok := by.(*types.Func); ok { + assert(g.pkg.Prog.FuncValue(by.(*types.Func)) == nil) + } + if obj, ok := used.(types.Object); ok { + if obj.Pkg() != g.pkg.Pkg { + return + } + } + if obj, ok := by.(types.Object); ok { + if obj.Pkg() != g.pkg.Pkg { + return + } + } + usedNode := g.Nodes[used] + assert(usedNode != nil) + if by == nil { + g.Root.use(usedNode) + if debug { + fmt.Printf("n%d -> n%d [label=%q];\n", g.Root.id, usedNode.id, reason) + } + } else { + assert(g.Nodes[by] != nil) + if debug { + fmt.Printf("n%d -> n%d [label=%q];\n", g.Nodes[by].id, usedNode.id, reason) + } + g.Nodes[by].use(usedNode) + } +} + +func (g *Graph) seeAndUse(used, by interface{}, reason string) { + g.see(used) + g.use(used, by, reason) +} + +func (g *Graph) entry(tinfo *types.Info) { + scopes := map[*types.Scope]ast.Node{} + for node, scope := range tinfo.Scopes { + switch node.(type) { + case *ast.File, *ast.FuncType: + scopes[scope] = node + } + } + // TODO rename Entry + + // SSA form won't tell us about locally scoped types that aren't + // being used. Walk the list of Defs to get all named types. + // + // SSA form also won't tell us about constants; use Defs and Uses + // to determine which constants exist and which are being used. + for def, obj := range tinfo.Defs { + if def.Name == "_" && obj != nil { + path, _ := astutil.PathEnclosingInterval(g.job.File(def), def.Pos(), def.Pos()) + for _, p := range path { + if decl, ok := p.(*ast.FuncDecl); ok { + fnObj := lintdsl.ObjectOf(g.job, decl.Name) + fn := g.pkg.Prog.FuncValue(fnObj.(*types.Func)) + g.see(fn) + // Note that this isn't necessarily the tightest + // possible match. This will only match named + // functions, not closures, for example. + g.seeAndUse(obj.Type(), fn, "defined as blank") + break + } + } + } + switch obj := obj.(type) { + case *types.TypeName: + g.see(obj) + g.typ(obj.Type()) + case *types.Const: + g.see(obj) + // FIXME(dh): we don't know the scope of the constant, it + // may be local to a function + if obj.Exported() { + g.use(obj, nil, "exported constant") + } + g.typ(obj.Type()) + g.seeAndUse(obj.Type(), obj, "constant type") + } + } +usesLoop: + for use, obj := range tinfo.Uses { + switch obj := obj.(type) { + case *types.Const: + path, _ := astutil.PathEnclosingInterval(g.job.File(use), use.Pos(), use.Pos()) + for _, p := range path { + if decl, ok := p.(*ast.FuncDecl); ok { + fnObj := lintdsl.ObjectOf(g.job, decl.Name) + fn := g.pkg.Prog.FuncValue(fnObj.(*types.Func)) + g.see(fn) + // Note that this isn't necessarily the tightest + // possible match. This will only match named + // functions, not closures, for example. + g.seeAndUse(obj, fn, "used constant") + continue usesLoop + } + } + // we couldn't find the surrounding function, so mark it as used by the root node. + g.seeAndUse(obj, nil, "used constant") + } + } + + for _, m := range g.pkg.Members { + switch m := m.(type) { + case *ssa.NamedConst: + // XXX + case *ssa.Global: + // XXX + case *ssa.Function: + g.see(m) + if m.Name() == "init" { + g.use(m, nil, "init function") + } + // This branch catches top-level functions, not methods. + if m.Object() != nil && m.Object().Exported() { + g.use(m, nil, "exported top-level function") + } + if m.Name() == "main" && g.pkg.Pkg.Name() == "main" { + g.use(m, nil, "main function") + } + g.function(m) + case *ssa.Type: + if m.Object() != nil { + g.see(m.Object()) + if m.Object().Exported() { + g.use(m.Object(), nil, "exported top-level type") + } + } + g.typ(m.Type()) + default: + panic(fmt.Sprintf("unreachable: %T", m)) + } + } + + var ifaces []*types.Interface + var notIfaces []types.Type + + for t := range g.seenTypes { + switch t := t.(type) { + case *types.Interface: + ifaces = append(ifaces, t) + default: + if _, ok := t.Underlying().(*types.Interface); !ok { + notIfaces = append(notIfaces, t) + } + } + } + + for _, iface := range ifaces { + for _, t := range notIfaces { + if g.implements(t, iface) { + for i := 0; i < iface.NumMethods(); i++ { + // get the chain of embedded types that lead to the function implementing the interface + // OPT(dh): use method set cache + obj, path, _ := types.LookupFieldOrMethod(t, false, g.pkg.Pkg, iface.Method(i).Name()) + assert(obj != nil) + if len(path) > 1 { + base := lintdsl.Dereference(t).Underlying().(*types.Struct) + for _, idx := range path[:len(path)-1] { + next := base.Field(idx) + g.seeAndUse(next, base, "helps implement") + base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct) + } + } + if fn := g.pkg.Prog.FuncValue(obj.(*types.Func)); fn != nil { + // actual function + g.seeAndUse(fn, iface, "implements") + } else { + // interface method + g.seeAndUse(obj, iface, "implements") + } + } + } + } + } +} + +func (g *Graph) function(fn *ssa.Function) { + g.seeAndUse(fn.Signature, fn, "function signature") + g.signature(fn.Signature) + g.instructions(fn) + for _, anon := range fn.AnonFuncs { + g.seeAndUse(anon, fn, "anonymous function") + g.function(anon) + } +} + +func (g *Graph) typ(t types.Type) { + if _, ok := g.seenTypes[t]; ok { + return + } + if t, ok := t.(*types.Named); ok { + if t.Obj().Pkg() != g.pkg.Pkg { + return + } + } + g.seenTypes[t] = struct{}{} + + g.see(t) + switch t := t.(type) { + case *types.Struct: + for i := 0; i < t.NumFields(); i++ { + g.see(t.Field(i)) + if t.Field(i).Exported() { + g.use(t.Field(i), t, "exported struct field") + } else if isNoCopyType(t.Field(i).Type()) { + g.use(t.Field(i), t, "NoCopy sentinel") + } + if t.Field(i).Anonymous() { + // does the embedded field contribute exported methods to the method set? + ms := g.msCache.MethodSet(t.Field(i).Type()) + for j := 0; j < ms.Len(); j++ { + if ms.At(j).Obj().Exported() { + g.use(t.Field(i), t, "extends exported method set") + break + } + } + + seen := map[*types.Struct]struct{}{} + var hasExportedField func(t types.Type) bool + hasExportedField = func(T types.Type) bool { + t, ok := lintdsl.Dereference(T).Underlying().(*types.Struct) + if !ok { + return false + } + if _, ok := seen[t]; ok { + return false + } + seen[t] = struct{}{} + for i := 0; i < t.NumFields(); i++ { + field := t.Field(i) + if field.Exported() { + return true + } + if field.Embedded() && hasExportedField(field.Type()) { + return true + } + } + return false + } + // does the embedded field contribute exported fields? + if hasExportedField(t.Field(i).Type()) { + g.use(t.Field(i), t, "extends exported fields") + } + + } + g.variable(t.Field(i)) + } + case *types.Basic: + // Nothing to do + case *types.Named: + g.seeAndUse(t.Underlying(), t, "underlying type") + g.seeAndUse(t.Obj(), t, "type name") + g.seeAndUse(t, t.Obj(), "named type") + + for i := 0; i < t.NumMethods(); i++ { + meth := g.pkg.Prog.FuncValue(t.Method(i)) + g.see(meth) + if meth.Object() != nil && meth.Object().Exported() { + g.use(meth, t, "exported method") + } + g.function(meth) + } + + g.typ(t.Underlying()) + case *types.Slice: + g.seeAndUse(t.Elem(), t, "element type") + g.typ(t.Elem()) + case *types.Map: + g.seeAndUse(t.Elem(), t, "element type") + g.seeAndUse(t.Key(), t, "key type") + g.typ(t.Elem()) + g.typ(t.Key()) + case *types.Signature: + g.signature(t) + case *types.Interface: + for i := 0; i < t.NumMethods(); i++ { + m := t.Method(i) + g.seeAndUse(m, t, "interface method") + g.seeAndUse(m.Type().(*types.Signature), m, "signature") + g.signature(m.Type().(*types.Signature)) + } + case *types.Array: + g.seeAndUse(t.Elem(), t, "element type") + g.typ(t.Elem()) + case *types.Pointer: + g.seeAndUse(t.Elem(), t, "element type") + g.typ(t.Elem()) + case *types.Chan: + g.seeAndUse(t.Elem(), t, "element type") + g.typ(t.Elem()) + case *types.Tuple: + for i := 0; i < t.Len(); i++ { + g.seeAndUse(t.At(i), t, "tuple element") + g.variable(t.At(i)) + } + default: + panic(fmt.Sprintf("unreachable: %T", t)) + } +} + +func (g *Graph) variable(v *types.Var) { + g.seeAndUse(v.Type(), v, "variable type") + g.typ(v.Type()) +} + +func (g *Graph) signature(sig *types.Signature) { + if sig.Recv() != nil { + g.seeAndUse(sig.Recv(), sig, "receiver") + g.variable(sig.Recv()) + } + for i := 0; i < sig.Params().Len(); i++ { + param := sig.Params().At(i) + g.seeAndUse(param, sig, "function argument") + g.variable(param) + } + for i := 0; i < sig.Results().Len(); i++ { + param := sig.Results().At(i) + g.seeAndUse(param, sig, "function result") + g.variable(param) + } +} + +func (g *Graph) instructions(fn *ssa.Function) { + if _, ok := g.seenFns[fn]; ok { + return + } + g.seenFns[fn] = struct{}{} + + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + if v, ok := instr.(ssa.Value); ok { + if _, ok := v.(*ssa.Range); !ok { + // See https://github.com/golang/go/issues/19670 + + g.see(v.Type()) + g.seeAndUse(v.Type(), fn, fmt.Sprintf("%s = %s", v.Name(), v)) + g.typ(v.Type()) + } + } + switch instr := instr.(type) { + case *ssa.Field: + st := instr.X.Type().Underlying().(*types.Struct) + field := st.Field(instr.Field) + g.seeAndUse(field, fn, "field access") + case *ssa.FieldAddr: + st := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) + field := st.Field(instr.Field) + g.seeAndUse(field, fn, "field access") + case *ssa.Store: + case *ssa.Call: + c := instr.Common() + if !c.IsInvoke() { + seen := map[ssa.Value]struct{}{} + var useCall func(v ssa.Value) + useCall = func(v ssa.Value) { + if _, ok := seen[v]; ok { + return + } + seen[v] = struct{}{} + switch v := v.(type) { + case *ssa.Function: + g.seeAndUse(v, fn, "function call") + if obj := v.Object(); obj != nil { + if cfn := g.pkg.Prog.FuncValue(obj.(*types.Func)); cfn != v { + // The called function is a thunk (or similar), + // process its instructions to get the call to the real function. + // Alternatively, we could mark the function as used by the thunk. + // + // We can detect the thunk because ssa.Function -> types.Object -> ssa.Function + // leads from the thunk to the real function. + g.instructions(v) + } + } + case *ssa.MakeClosure: + useCall(v.Fn) + case *ssa.Builtin: + // nothing to do + case *ssa.Phi: + for _, e := range v.Edges { + useCall(e) + } + } + } + // non-interface call + useCall(c.Value) + } else { + g.seeAndUse(c.Method, fn, "interface call") + } + case *ssa.Return: + seen := map[ssa.Value]struct{}{} + var handleReturn func(v ssa.Value) + handleReturn = func(v ssa.Value) { + if _, ok := seen[v]; ok { + return + } + seen[v] = struct{}{} + switch v := v.(type) { + case *ssa.Function: + g.seeAndUse(v, fn, "returning function") + case *ssa.MakeClosure: + g.seeAndUse(v.Fn, fn, "returning closure") + case *ssa.Phi: + for _, e := range v.Edges { + handleReturn(e) + } + } + } + for _, v := range instr.Results { + handleReturn(v) + } + case *ssa.ChangeType: + g.seeAndUse(instr.Type(), fn, "conversion") + g.typ(instr.Type()) + + s1, ok1 := lintdsl.Dereference(instr.Type()).Underlying().(*types.Struct) + s2, ok2 := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) + if ok1 && ok2 { + // Converting between two structs. The fields are + // relevant for the conversion, but only if the + // fields are also used outside of the conversion. + // Mark fields as used by each other. + + assert(s1.NumFields() == s2.NumFields()) + for i := 0; i < s1.NumFields(); i++ { + g.see(s1.Field(i)) + g.see(s2.Field(i)) + g.seeAndUse(s1.Field(i), s2.Field(i), "struct conversion") + g.seeAndUse(s2.Field(i), s1.Field(i), "struct conversion") + } + } + case *ssa.MakeInterface: + case *ssa.Slice: + case *ssa.RunDefers: + // XXX use deferred functions + case *ssa.Convert: + // to unsafe.Pointer + if typ, ok := instr.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { + if ptr, ok := instr.X.Type().Underlying().(*types.Pointer); ok { + if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { + for i := 0; i < st.NumFields(); i++ { + g.seeAndUse(st.Field(i), fn, "unsafe conversion") + } + } + } + } + // from unsafe.Pointer + if typ, ok := instr.X.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { + if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { + if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { + for i := 0; i < st.NumFields(); i++ { + g.seeAndUse(st.Field(i), fn, "unsafe conversion") + } + } + } + } + case *ssa.TypeAssert: + g.seeAndUse(instr.AssertedType, fn, "type assert") + g.typ(instr.AssertedType) + case *ssa.MakeClosure: + g.seeAndUse(instr.Fn, fn, "make closure") + v := instr.Fn.(*ssa.Function) + if obj := v.Object(); obj != nil { + if cfn := g.pkg.Prog.FuncValue(obj.(*types.Func)); cfn != v { + // The called function is a $bound (or similar), + // process its instructions to get the call to the real function. + // Alternatively, we could mark the function as used by the $bound. + // + // We can detect the $bound because ssa.Function -> types.Object -> ssa.Function + // leads from the thunk to the real function. + g.instructions(v) + } + } + case *ssa.Alloc: + // nothing to do + case *ssa.UnOp: + // nothing to do + case *ssa.BinOp: + // nothing to do + case *ssa.If: + // nothing to do + case *ssa.Jump: + // nothing to do + case *ssa.IndexAddr: + // nothing to do + case *ssa.Extract: + // nothing to do + case *ssa.Panic: + // nothing to do + case *ssa.DebugRef: + // nothing to do + case *ssa.BlankStore: + // nothing to do + case *ssa.Phi: + // nothing to do + case *ssa.MakeMap: + // nothing to do + case *ssa.MapUpdate: + // nothing to do + case *ssa.Lookup: + // nothing to do + case *ssa.MakeSlice: + // nothing to do + case *ssa.Send: + // nothing to do + case *ssa.MakeChan: + // nothing to do + case *ssa.Range: + // nothing to do + case *ssa.Next: + // nothing to do + case *ssa.Index: + // nothing to do + case *ssa.Select: + // nothing to do + case *ssa.ChangeInterface: + // XXX + case *ssa.Go: + // XXX + case *ssa.Defer: + // XXX + default: + panic(fmt.Sprintf("unreachable: %T", instr)) + } + } + } +} + +// isNoCopyType reports whether a type represents the NoCopy sentinel +// type. The NoCopy type is a named struct with no fields and exactly +// one method `func Lock()` that is empty. +// +// FIXME(dh): currently we're not checking that the function body is +// empty. +func isNoCopyType(typ types.Type) bool { + st, ok := typ.Underlying().(*types.Struct) + if !ok { + return false + } + if st.NumFields() != 0 { + return false + } + + named, ok := typ.(*types.Named) + if !ok { + return false + } + if named.NumMethods() != 1 { + return false + } + meth := named.Method(0) + if meth.Name() != "Lock" { + return false + } + sig := meth.Type().(*types.Signature) + if sig.Params().Len() != 0 || sig.Results().Len() != 0 { + return false + } + return true +} diff --git a/unused2/unused_test.go b/unused2/unused_test.go new file mode 100644 index 000000000..4ed2c8c08 --- /dev/null +++ b/unused2/unused_test.go @@ -0,0 +1,19 @@ +package unused + +// Copyright (c) 2013 The Go Authors. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found at +// https://developers.google.com/open-source/licenses/bsd. + +import ( + "testing" + + "honnef.co/go/tools/lint/testutil" +) + +func TestAll(t *testing.T) { + checker := NewChecker() + l := NewLintChecker(checker) + testutil.TestAll(t, l, "") +} From 50677158f94cb83ee1e7a455f548603861e3e6ce Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 17 Mar 2019 05:39:19 +0100 Subject: [PATCH 039/254] unused2: some optimizations --- unused2/unused.go | 164 ++++++++++++++++++++++++++++------------------ 1 file changed, 102 insertions(+), 62 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index f831d058d..35c227fa3 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -17,8 +17,6 @@ import ( // basic types, empty signatures etc doesn't add any information to // the graph. // -// OPT(dh): deduplicate type nodes. [1]T and [1]T are the same type and can be merged -// // OPT(dh): deduplicate edges // // OPT(dh): don't track function calls into external packages @@ -172,9 +170,9 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { graph.entry(pkg.TypesInfo) graph.color(graph.Root) - for _, node := range graph.Nodes { + quieten := func(node *Node) { if node.seen { - continue + return } switch obj := node.obj.(type) { case *types.Var: @@ -187,59 +185,73 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { case *types.Named: for i := 0; i < obj.NumMethods(); i++ { m := pkg.SSA.Prog.FuncValue(obj.Method(i)) - if node, ok := graph.Nodes[m]; ok { + if node, ok := graph.nodeMaybe(m); ok { node.quiet = true } } case *types.Struct: for i := 0; i < obj.NumFields(); i++ { - if node, ok := graph.Nodes[obj.Field(i)]; ok { + if node, ok := graph.nodeMaybe(obj.Field(i)); ok { node.quiet = true } } } } for _, node := range graph.Nodes { - if !node.seen { - if node.quiet { - if debug { - fmt.Printf("n%d [color=purple];\n", node.id) - } - continue - } + quieten(node) + } + graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + quieten(value.(*Node)) + }) + + report := func(node *Node) { + if node.seen { + return + } + if node.quiet { if debug { - fmt.Printf("n%d [color=red];\n", node.id) + fmt.Printf("n%d [color=purple];\n", node.id) + } + return + } + if debug { + fmt.Printf("n%d [color=red];\n", node.id) + } + switch obj := node.obj.(type) { + case types.Object: + if obj.Pkg() == pkg.Package.Types { + pos := prog.Fset().Position(obj.Pos()) + out = append(out, Unused{ + Obj: obj, + Position: pos, + }) + } + case *ssa.Function: + if obj == nil { + // TODO(dh): how does this happen? + return } - switch obj := node.obj.(type) { - case types.Object: - if obj.Pkg() == pkg.Package.Types { - pos := prog.Fset().Position(obj.Pos()) - out = append(out, Unused{ - Obj: obj, - Position: pos, - }) - } - case *ssa.Function: - if obj == nil { - // TODO(dh): how does this happen? - continue - } - // OPT(dh): objects in other packages should never make it into the graph - if obj.Object() != nil && obj.Object().Pkg() == pkg.Types { - pos := prog.Fset().Position(obj.Pos()) - out = append(out, Unused{ - Obj: obj.Object(), - Position: pos, - }) - } - default: - if debug { - fmt.Printf("n%d [color=gray];\n", node.id) - } + // OPT(dh): objects in other packages should never make it into the graph + if obj.Object() != nil && obj.Object().Pkg() == pkg.Types { + pos := prog.Fset().Position(obj.Pos()) + out = append(out, Unused{ + Obj: obj.Object(), + Position: pos, + }) + } + default: + if debug { + fmt.Printf("n%d [color=gray];\n", node.id) } } } + for _, node := range graph.Nodes { + report(node) + } + graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + report(value.(*Node)) + }) } return out } @@ -251,19 +263,19 @@ type Graph struct { nodeCounter int - Root *Node - Nodes map[interface{}]*Node + Root *Node + TypeNodes typeutil.Map + Nodes map[interface{}]*Node - seenTypes map[types.Type]struct{} + seenTypes typeutil.Map seenFns map[*ssa.Function]struct{} } func NewGraph(pkg *ssa.Package) *Graph { g := &Graph{ - pkg: pkg, - Nodes: map[interface{}]*Node{}, - seenTypes: map[types.Type]struct{}{}, - seenFns: map[*ssa.Function]struct{}{}, + pkg: pkg, + Nodes: map[interface{}]*Node{}, + seenFns: map[*ssa.Function]struct{}{}, } g.Root = g.newNode(nil) if debug { @@ -292,6 +304,36 @@ type Node struct { quiet bool } +func (g *Graph) nodeMaybe(obj interface{}) (*Node, bool) { + if t, ok := obj.(types.Type); ok { + if v := g.TypeNodes.At(t); v != nil { + return v.(*Node), true + } + return nil, false + } + if node, ok := g.Nodes[obj]; ok { + return node, true + } + return nil, false +} + +func (g *Graph) node(obj interface{}) *Node { + if t, ok := obj.(types.Type); ok { + if v := g.TypeNodes.At(t); v != nil { + return v.(*Node) + } + node := g.newNode(obj) + g.TypeNodes.Set(t, node) + return node + } + if node, ok := g.Nodes[obj]; ok { + return node + } + node := g.newNode(obj) + g.Nodes[obj] = node + return node +} + func (g *Graph) newNode(obj interface{}) *Node { g.nodeCounter++ return &Node{ @@ -313,12 +355,11 @@ func (g *Graph) see(obj interface{}) { return } } - if g.Nodes[obj] == nil { - g.Nodes[obj] = g.newNode(obj) - if debug { - fmt.Printf("n%d [label=%q];\n", g.Nodes[obj].id, obj) - } + // add new node to graph + node := g.node(obj) + if debug { + fmt.Printf("n%d [label=%q];\n", node.id, obj) } } @@ -340,19 +381,18 @@ func (g *Graph) use(used, by interface{}, reason string) { return } } - usedNode := g.Nodes[used] - assert(usedNode != nil) + usedNode := g.node(used) if by == nil { g.Root.use(usedNode) if debug { fmt.Printf("n%d -> n%d [label=%q];\n", g.Root.id, usedNode.id, reason) } } else { - assert(g.Nodes[by] != nil) + byNode := g.node(by) if debug { - fmt.Printf("n%d -> n%d [label=%q];\n", g.Nodes[by].id, usedNode.id, reason) + fmt.Printf("n%d -> n%d [label=%q];\n", byNode.id, usedNode.id, reason) } - g.Nodes[by].use(usedNode) + byNode.use(usedNode) } } @@ -464,7 +504,7 @@ usesLoop: var ifaces []*types.Interface var notIfaces []types.Type - for t := range g.seenTypes { + g.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: ifaces = append(ifaces, t) @@ -473,7 +513,7 @@ usesLoop: notIfaces = append(notIfaces, t) } } - } + }) for _, iface := range ifaces { for _, t := range notIfaces { @@ -515,7 +555,7 @@ func (g *Graph) function(fn *ssa.Function) { } func (g *Graph) typ(t types.Type) { - if _, ok := g.seenTypes[t]; ok { + if g.seenTypes.At(t) != nil { return } if t, ok := t.(*types.Named); ok { @@ -523,7 +563,7 @@ func (g *Graph) typ(t types.Type) { return } } - g.seenTypes[t] = struct{}{} + g.seenTypes.Set(t, struct{}{}) g.see(t) switch t := t.(type) { @@ -663,7 +703,7 @@ func (g *Graph) instructions(fn *ssa.Function) { // See https://github.com/golang/go/issues/19670 g.see(v.Type()) - g.seeAndUse(v.Type(), fn, fmt.Sprintf("%s = %s", v.Name(), v)) + g.seeAndUse(v.Type(), fn, "instruction") g.typ(v.Type()) } } From a0d88f831ab5445b8300e42e497f0b997a9f9869 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 17 Mar 2019 05:47:12 +0100 Subject: [PATCH 040/254] unused2: use method set cache in one more place --- unused2/unused.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 35c227fa3..4e0bab1fb 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -520,8 +520,10 @@ usesLoop: if g.implements(t, iface) { for i := 0; i < iface.NumMethods(); i++ { // get the chain of embedded types that lead to the function implementing the interface - // OPT(dh): use method set cache - obj, path, _ := types.LookupFieldOrMethod(t, false, g.pkg.Pkg, iface.Method(i).Name()) + ms := g.msCache.MethodSet(t) + sel := ms.Lookup(g.pkg.Pkg, iface.Method(i).Name()) + obj := sel.Obj() + path := sel.Index() assert(obj != nil) if len(path) > 1 { base := lintdsl.Dereference(t).Underlying().(*types.Struct) From 2eb332640a566ca9508ddff4c86414e1322a9b88 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 17 Mar 2019 21:34:09 +0100 Subject: [PATCH 041/254] Vendor modified copy of go/types/typeutil Our Map implementation doesn't ignore the receivers in signatures. --- go/types/typeutil/callee.go | 46 ++++ go/types/typeutil/callee_test.go | 89 ++++++++ go/types/typeutil/example_test.go | 67 ++++++ go/types/typeutil/identical.go | 29 +++ go/types/typeutil/imports.go | 31 +++ go/types/typeutil/imports_test.go | 80 +++++++ go/types/typeutil/map.go | 315 ++++++++++++++++++++++++++++ go/types/typeutil/map_test.go | 174 +++++++++++++++ go/types/typeutil/methodsetcache.go | 72 +++++++ go/types/typeutil/ui.go | 52 +++++ go/types/typeutil/ui_test.go | 61 ++++++ unused2/unused.go | 2 +- 12 files changed, 1017 insertions(+), 1 deletion(-) create mode 100644 go/types/typeutil/callee.go create mode 100644 go/types/typeutil/callee_test.go create mode 100644 go/types/typeutil/example_test.go create mode 100644 go/types/typeutil/identical.go create mode 100644 go/types/typeutil/imports.go create mode 100644 go/types/typeutil/imports_test.go create mode 100644 go/types/typeutil/map.go create mode 100644 go/types/typeutil/map_test.go create mode 100644 go/types/typeutil/methodsetcache.go create mode 100644 go/types/typeutil/ui.go create mode 100644 go/types/typeutil/ui_test.go diff --git a/go/types/typeutil/callee.go b/go/types/typeutil/callee.go new file mode 100644 index 000000000..38f596daf --- /dev/null +++ b/go/types/typeutil/callee.go @@ -0,0 +1,46 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/ast/astutil" +) + +// Callee returns the named target of a function call, if any: +// a function, method, builtin, or variable. +func Callee(info *types.Info, call *ast.CallExpr) types.Object { + var obj types.Object + switch fun := astutil.Unparen(call.Fun).(type) { + case *ast.Ident: + obj = info.Uses[fun] // type, var, builtin, or declared func + case *ast.SelectorExpr: + if sel, ok := info.Selections[fun]; ok { + obj = sel.Obj() // method or field + } else { + obj = info.Uses[fun.Sel] // qualified identifier? + } + } + if _, ok := obj.(*types.TypeName); ok { + return nil // T(x) is a conversion, not a call + } + return obj +} + +// StaticCallee returns the target (function or method) of a static +// function call, if any. It returns nil for calls to builtins. +func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { + if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) { + return f + } + return nil +} + +func interfaceMethod(f *types.Func) bool { + recv := f.Type().(*types.Signature).Recv() + return recv != nil && types.IsInterface(recv.Type()) +} diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go new file mode 100644 index 000000000..6875d699f --- /dev/null +++ b/go/types/typeutil/callee_test.go @@ -0,0 +1,89 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil_test + +import ( + "go/ast" + "go/importer" + "go/parser" + "go/token" + "go/types" + "strings" + "testing" + + "honnef.co/go/tools/go/types/typeutil" +) + +func TestStaticCallee(t *testing.T) { + const src = `package p + +import "fmt" + +type T int + +func g(int) + +var f = g + +var x int + +type s struct{ f func(int) } +func (s) g(int) + +type I interface{ f(int) } + +var a struct{b struct{c s}} + +func calls() { + g(x) // a declared func + s{}.g(x) // a concrete method + a.b.c.g(x) // same + fmt.Println(x) // declared func, qualified identifier +} + +func noncalls() { + _ = T(x) // a type + f(x) // a var + panic(x) // a built-in + s{}.f(x) // a field + I(nil).f(x) // interface method +} +` + // parse + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "p.go", src, 0) + if err != nil { + t.Fatal(err) + } + + // type-check + info := &types.Info{ + Uses: make(map[*ast.Ident]types.Object), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + cfg := &types.Config{Importer: importer.For("source", nil)} + if _, err := cfg.Check("p", fset, []*ast.File{f}, info); err != nil { + t.Fatal(err) + } + + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok && strings.HasSuffix(decl.Name.Name, "calls") { + wantCallee := decl.Name.Name == "calls" // false within func noncalls() + ast.Inspect(decl.Body, func(n ast.Node) bool { + if call, ok := n.(*ast.CallExpr); ok { + fn := typeutil.StaticCallee(info, call) + if fn == nil && wantCallee { + t.Errorf("%s: StaticCallee returned nil", + fset.Position(call.Lparen)) + } else if fn != nil && !wantCallee { + t.Errorf("%s: StaticCallee returned %s, want nil", + fset.Position(call.Lparen), fn) + } + } + return true + }) + } + } +} diff --git a/go/types/typeutil/example_test.go b/go/types/typeutil/example_test.go new file mode 100644 index 000000000..60c4cb5a0 --- /dev/null +++ b/go/types/typeutil/example_test.go @@ -0,0 +1,67 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil_test + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "sort" + + "honnef.co/go/tools/go/types/typeutil" +) + +func ExampleMap() { + const source = `package P + +var X []string +var Y []string + +const p, q = 1.0, 2.0 + +func f(offset int32) (value byte, ok bool) +func g(rune) (uint8, bool) +` + + // Parse and type-check the package. + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "P.go", source, 0) + if err != nil { + panic(err) + } + pkg, err := new(types.Config).Check("P", fset, []*ast.File{f}, nil) + if err != nil { + panic(err) + } + + scope := pkg.Scope() + + // Group names of package-level objects by their type. + var namesByType typeutil.Map // value is []string + for _, name := range scope.Names() { + T := scope.Lookup(name).Type() + + names, _ := namesByType.At(T).([]string) + names = append(names, name) + namesByType.Set(T, names) + } + + // Format, sort, and print the map entries. + var lines []string + namesByType.Iterate(func(T types.Type, names interface{}) { + lines = append(lines, fmt.Sprintf("%s %s", names, T)) + }) + sort.Strings(lines) + for _, line := range lines { + fmt.Println(line) + } + + // Output: + // [X Y] []string + // [f g] func(offset int32) (value byte, ok bool) + // [p q] untyped float +} diff --git a/go/types/typeutil/identical.go b/go/types/typeutil/identical.go new file mode 100644 index 000000000..7eda29463 --- /dev/null +++ b/go/types/typeutil/identical.go @@ -0,0 +1,29 @@ +package typeutil + +import ( + "go/types" +) + +// Identical reports whether x and y are identical types. +// Unlike types.Identical, receivers of Signature types are not ignored. +func Identical(x, y types.Type) (ret bool) { + if !types.Identical(x, y) { + return false + } + sigX, ok := x.(*types.Signature) + if !ok { + return true + } + sigY, ok := y.(*types.Signature) + if !ok { + // should be impossible + return true + } + if sigX.Recv() == sigY.Recv() { + return true + } + if sigX.Recv() == nil || sigY.Recv() == nil { + return false + } + return Identical(sigX.Recv().Type(), sigY.Recv().Type()) +} diff --git a/go/types/typeutil/imports.go b/go/types/typeutil/imports.go new file mode 100644 index 000000000..9c441dba9 --- /dev/null +++ b/go/types/typeutil/imports.go @@ -0,0 +1,31 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +import "go/types" + +// Dependencies returns all dependencies of the specified packages. +// +// Dependent packages appear in topological order: if package P imports +// package Q, Q appears earlier than P in the result. +// The algorithm follows import statements in the order they +// appear in the source code, so the result is a total order. +// +func Dependencies(pkgs ...*types.Package) []*types.Package { + var result []*types.Package + seen := make(map[*types.Package]bool) + var visit func(pkgs []*types.Package) + visit = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !seen[p] { + seen[p] = true + visit(p.Imports()) + result = append(result, p) + } + } + } + visit(pkgs) + return result +} diff --git a/go/types/typeutil/imports_test.go b/go/types/typeutil/imports_test.go new file mode 100644 index 000000000..cd32806a7 --- /dev/null +++ b/go/types/typeutil/imports_test.go @@ -0,0 +1,80 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil_test + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "testing" + + "honnef.co/go/tools/go/types/typeutil" +) + +type closure map[string]*types.Package + +func (c closure) Import(path string) (*types.Package, error) { return c[path], nil } + +func TestDependencies(t *testing.T) { + packages := make(map[string]*types.Package) + conf := types.Config{ + Importer: closure(packages), + } + fset := token.NewFileSet() + + // All edges go to the right. + // /--D--B--A + // F \_C_/ + // \__E_/ + for i, content := range []string{ + `package a`, + `package c; import (_ "a")`, + `package b; import (_ "a")`, + `package e; import (_ "c")`, + `package d; import (_ "b"; _ "c")`, + `package f; import (_ "d"; _ "e")`, + } { + f, err := parser.ParseFile(fset, fmt.Sprintf("%d.go", i), content, 0) + if err != nil { + t.Fatal(err) + } + pkg, err := conf.Check(f.Name.Name, fset, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + packages[pkg.Path()] = pkg + } + + for _, test := range []struct { + roots, want string + }{ + {"a", "a"}, + {"b", "ab"}, + {"c", "ac"}, + {"d", "abcd"}, + {"e", "ace"}, + {"f", "abcdef"}, + + {"be", "abce"}, + {"eb", "aceb"}, + {"de", "abcde"}, + {"ed", "acebd"}, + {"ef", "acebdf"}, + } { + var pkgs []*types.Package + for _, r := range test.roots { + pkgs = append(pkgs, packages[string(r)]) + } + var got string + for _, p := range typeutil.Dependencies(pkgs...) { + got += p.Path() + } + if got != test.want { + t.Errorf("Dependencies(%q) = %q, want %q", test.roots, got, test.want) + } + } +} diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go new file mode 100644 index 000000000..db0b3bce7 --- /dev/null +++ b/go/types/typeutil/map.go @@ -0,0 +1,315 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeutil defines various utilities for types, such as Map, +// a mapping from types.Type to interface{} values. +package typeutil + +import ( + "bytes" + "fmt" + "go/types" + "reflect" +) + +// Map is a hash-table-based mapping from types (types.Type) to +// arbitrary interface{} values. The concrete types that implement +// the Type interface are pointers. Since they are not canonicalized, +// == cannot be used to check for equivalence, and thus we cannot +// simply use a Go map. +// +// Just as with map[K]V, a nil *Map is a valid empty map. +// +// Not thread-safe. +// +// This fork handles Signatures correctly, respecting method receivers. +// +type Map struct { + hasher Hasher // shared by many Maps + table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused + length int // number of map entries +} + +// entry is an entry (key/value association) in a hash bucket. +type entry struct { + key types.Type + value interface{} +} + +// SetHasher sets the hasher used by Map. +// +// All Hashers are functionally equivalent but contain internal state +// used to cache the results of hashing previously seen types. +// +// A single Hasher created by MakeHasher() may be shared among many +// Maps. This is recommended if the instances have many keys in +// common, as it will amortize the cost of hash computation. +// +// A Hasher may grow without bound as new types are seen. Even when a +// type is deleted from the map, the Hasher never shrinks, since other +// types in the map may reference the deleted type indirectly. +// +// Hashers are not thread-safe, and read-only operations such as +// Map.Lookup require updates to the hasher, so a full Mutex lock (not a +// read-lock) is require around all Map operations if a shared +// hasher is accessed from multiple threads. +// +// If SetHasher is not called, the Map will create a private hasher at +// the first call to Insert. +// +func (m *Map) SetHasher(hasher Hasher) { + m.hasher = hasher +} + +// Delete removes the entry with the given key, if any. +// It returns true if the entry was found. +// +func (m *Map) Delete(key types.Type) bool { + if m != nil && m.table != nil { + hash := m.hasher.Hash(key) + bucket := m.table[hash] + for i, e := range bucket { + if e.key != nil && Identical(key, e.key) { + // We can't compact the bucket as it + // would disturb iterators. + bucket[i] = entry{} + m.length-- + return true + } + } + } + return false +} + +// At returns the map entry for the given key. +// The result is nil if the entry is not present. +// +func (m *Map) At(key types.Type) interface{} { + if m != nil && m.table != nil { + for _, e := range m.table[m.hasher.Hash(key)] { + if e.key != nil && Identical(key, e.key) { + return e.value + } + } + } + return nil +} + +// Set sets the map entry for key to val, +// and returns the previous entry, if any. +func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) { + if m.table != nil { + hash := m.hasher.Hash(key) + bucket := m.table[hash] + var hole *entry + for i, e := range bucket { + if e.key == nil { + hole = &bucket[i] + } else if Identical(key, e.key) { + prev = e.value + bucket[i].value = value + return + } + } + + if hole != nil { + *hole = entry{key, value} // overwrite deleted entry + } else { + m.table[hash] = append(bucket, entry{key, value}) + } + } else { + if m.hasher.memo == nil { + m.hasher = MakeHasher() + } + hash := m.hasher.Hash(key) + m.table = map[uint32][]entry{hash: {entry{key, value}}} + } + + m.length++ + return +} + +// Len returns the number of map entries. +func (m *Map) Len() int { + if m != nil { + return m.length + } + return 0 +} + +// Iterate calls function f on each entry in the map in unspecified order. +// +// If f should mutate the map, Iterate provides the same guarantees as +// Go maps: if f deletes a map entry that Iterate has not yet reached, +// f will not be invoked for it, but if f inserts a map entry that +// Iterate has not yet reached, whether or not f will be invoked for +// it is unspecified. +// +func (m *Map) Iterate(f func(key types.Type, value interface{})) { + if m != nil { + for _, bucket := range m.table { + for _, e := range bucket { + if e.key != nil { + f(e.key, e.value) + } + } + } + } +} + +// Keys returns a new slice containing the set of map keys. +// The order is unspecified. +func (m *Map) Keys() []types.Type { + keys := make([]types.Type, 0, m.Len()) + m.Iterate(func(key types.Type, _ interface{}) { + keys = append(keys, key) + }) + return keys +} + +func (m *Map) toString(values bool) string { + if m == nil { + return "{}" + } + var buf bytes.Buffer + fmt.Fprint(&buf, "{") + sep := "" + m.Iterate(func(key types.Type, value interface{}) { + fmt.Fprint(&buf, sep) + sep = ", " + fmt.Fprint(&buf, key) + if values { + fmt.Fprintf(&buf, ": %q", value) + } + }) + fmt.Fprint(&buf, "}") + return buf.String() +} + +// String returns a string representation of the map's entries. +// Values are printed using fmt.Sprintf("%v", v). +// Order is unspecified. +// +func (m *Map) String() string { + return m.toString(true) +} + +// KeysString returns a string representation of the map's key set. +// Order is unspecified. +// +func (m *Map) KeysString() string { + return m.toString(false) +} + +//////////////////////////////////////////////////////////////////////// +// Hasher + +// A Hasher maps each type to its hash value. +// For efficiency, a hasher uses memoization; thus its memory +// footprint grows monotonically over time. +// Hashers are not thread-safe. +// Hashers have reference semantics. +// Call MakeHasher to create a Hasher. +type Hasher struct { + memo map[types.Type]uint32 +} + +// MakeHasher returns a new Hasher instance. +func MakeHasher() Hasher { + return Hasher{make(map[types.Type]uint32)} +} + +// Hash computes a hash value for the given type t such that +// Identical(t, t') => Hash(t) == Hash(t'). +func (h Hasher) Hash(t types.Type) uint32 { + hash, ok := h.memo[t] + if !ok { + hash = h.hashFor(t) + h.memo[t] = hash + } + return hash +} + +// hashString computes the Fowler–Noll–Vo hash of s. +func hashString(s string) uint32 { + var h uint32 + for i := 0; i < len(s); i++ { + h ^= uint32(s[i]) + h *= 16777619 + } + return h +} + +// hashFor computes the hash of t. +func (h Hasher) hashFor(t types.Type) uint32 { + // See Identical for rationale. + switch t := t.(type) { + case *types.Basic: + return uint32(t.Kind()) + + case *types.Array: + return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) + + case *types.Slice: + return 9049 + 2*h.Hash(t.Elem()) + + case *types.Struct: + var hash uint32 = 9059 + for i, n := 0, t.NumFields(); i < n; i++ { + f := t.Field(i) + if f.Anonymous() { + hash += 8861 + } + hash += hashString(t.Tag(i)) + hash += hashString(f.Name()) // (ignore f.Pkg) + hash += h.Hash(f.Type()) + } + return hash + + case *types.Pointer: + return 9067 + 2*h.Hash(t.Elem()) + + case *types.Signature: + var hash uint32 = 9091 + if t.Variadic() { + hash *= 8863 + } + return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) + + case *types.Interface: + var hash uint32 = 9103 + for i, n := 0, t.NumMethods(); i < n; i++ { + // See go/types.identicalMethods for rationale. + // Method order is not significant. + // Ignore m.Pkg(). + m := t.Method(i) + hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) + } + return hash + + case *types.Map: + return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) + + case *types.Chan: + return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) + + case *types.Named: + // Not safe with a copying GC; objects may move. + return uint32(reflect.ValueOf(t.Obj()).Pointer()) + + case *types.Tuple: + return h.hashTuple(t) + } + panic(t) +} + +func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { + // See go/types.identicalTypes for rationale. + n := tuple.Len() + var hash uint32 = 9137 + 2*uint32(n) + for i := 0; i < n; i++ { + hash += 3 * h.Hash(tuple.At(i).Type()) + } + return hash +} diff --git a/go/types/typeutil/map_test.go b/go/types/typeutil/map_test.go new file mode 100644 index 000000000..905376d8c --- /dev/null +++ b/go/types/typeutil/map_test.go @@ -0,0 +1,174 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil_test + +// TODO(adonovan): +// - test use of explicit hasher across two maps. +// - test hashcodes are consistent with equals for a range of types +// (e.g. all types generated by type-checking some body of real code). + +import ( + "go/types" + "testing" + + "honnef.co/go/tools/go/types/typeutil" +) + +var ( + tStr = types.Typ[types.String] // string + tPStr1 = types.NewPointer(tStr) // *string + tPStr2 = types.NewPointer(tStr) // *string, again + tInt = types.Typ[types.Int] // int + tChanInt1 = types.NewChan(types.RecvOnly, tInt) // <-chan int + tChanInt2 = types.NewChan(types.RecvOnly, tInt) // <-chan int, again +) + +func checkEqualButNotIdentical(t *testing.T, x, y types.Type, comment string) { + if !types.Identical(x, y) { + t.Errorf("%s: not equal: %s, %s", comment, x, y) + } + if x == y { + t.Errorf("%s: identical: %v, %v", comment, x, y) + } +} + +func TestAxioms(t *testing.T) { + checkEqualButNotIdentical(t, tPStr1, tPStr2, "tPstr{1,2}") + checkEqualButNotIdentical(t, tChanInt1, tChanInt2, "tChanInt{1,2}") +} + +func TestMap(t *testing.T) { + var tmap *typeutil.Map + + // All methods but Set are safe on on (*T)(nil). + tmap.Len() + tmap.At(tPStr1) + tmap.Delete(tPStr1) + tmap.KeysString() + tmap.String() + + tmap = new(typeutil.Map) + + // Length of empty map. + if l := tmap.Len(); l != 0 { + t.Errorf("Len() on empty Map: got %d, want 0", l) + } + // At of missing key. + if v := tmap.At(tPStr1); v != nil { + t.Errorf("At() on empty Map: got %v, want nil", v) + } + // Deletion of missing key. + if tmap.Delete(tPStr1) { + t.Errorf("Delete() on empty Map: got true, want false") + } + // Set of new key. + if prev := tmap.Set(tPStr1, "*string"); prev != nil { + t.Errorf("Set() on empty Map returned non-nil previous value %s", prev) + } + + // Now: {*string: "*string"} + + // Length of non-empty map. + if l := tmap.Len(); l != 1 { + t.Errorf("Len(): got %d, want 1", l) + } + // At via insertion key. + if v := tmap.At(tPStr1); v != "*string" { + t.Errorf("At(): got %q, want \"*string\"", v) + } + // At via equal key. + if v := tmap.At(tPStr2); v != "*string" { + t.Errorf("At(): got %q, want \"*string\"", v) + } + // Iteration over sole entry. + tmap.Iterate(func(key types.Type, value interface{}) { + if key != tPStr1 { + t.Errorf("Iterate: key: got %s, want %s", key, tPStr1) + } + if want := "*string"; value != want { + t.Errorf("Iterate: value: got %s, want %s", value, want) + } + }) + + // Setion with key equal to present one. + if prev := tmap.Set(tPStr2, "*string again"); prev != "*string" { + t.Errorf("Set() previous value: got %s, want \"*string\"", prev) + } + + // Setion of another association. + if prev := tmap.Set(tChanInt1, "<-chan int"); prev != nil { + t.Errorf("Set() previous value: got %s, want nil", prev) + } + + // Now: {*string: "*string again", <-chan int: "<-chan int"} + + want1 := "{*string: \"*string again\", <-chan int: \"<-chan int\"}" + want2 := "{<-chan int: \"<-chan int\", *string: \"*string again\"}" + if s := tmap.String(); s != want1 && s != want2 { + t.Errorf("String(): got %s, want %s", s, want1) + } + + want1 = "{*string, <-chan int}" + want2 = "{<-chan int, *string}" + if s := tmap.KeysString(); s != want1 && s != want2 { + t.Errorf("KeysString(): got %s, want %s", s, want1) + } + + // Keys(). + I := types.Identical + switch k := tmap.Keys(); { + case I(k[0], tChanInt1) && I(k[1], tPStr1): // ok + case I(k[1], tChanInt1) && I(k[0], tPStr1): // ok + default: + t.Errorf("Keys(): got %v, want %s", k, want2) + } + + if l := tmap.Len(); l != 2 { + t.Errorf("Len(): got %d, want 1", l) + } + // At via original key. + if v := tmap.At(tPStr1); v != "*string again" { + t.Errorf("At(): got %q, want \"*string again\"", v) + } + hamming := 1 + tmap.Iterate(func(key types.Type, value interface{}) { + switch { + case I(key, tChanInt1): + hamming *= 2 // ok + case I(key, tPStr1): + hamming *= 3 // ok + } + }) + if hamming != 6 { + t.Errorf("Iterate: hamming: got %d, want %d", hamming, 6) + } + + if v := tmap.At(tChanInt2); v != "<-chan int" { + t.Errorf("At(): got %q, want \"<-chan int\"", v) + } + // Deletion with key equal to present one. + if !tmap.Delete(tChanInt2) { + t.Errorf("Delete() of existing key: got false, want true") + } + + // Now: {*string: "*string again"} + + if l := tmap.Len(); l != 1 { + t.Errorf("Len(): got %d, want 1", l) + } + // Deletion again. + if !tmap.Delete(tPStr2) { + t.Errorf("Delete() of existing key: got false, want true") + } + + // Now: {} + + if l := tmap.Len(); l != 0 { + t.Errorf("Len(): got %d, want %d", l, 0) + } + if s := tmap.String(); s != "{}" { + t.Errorf("Len(): got %q, want %q", s, "") + } +} diff --git a/go/types/typeutil/methodsetcache.go b/go/types/typeutil/methodsetcache.go new file mode 100644 index 000000000..32084610f --- /dev/null +++ b/go/types/typeutil/methodsetcache.go @@ -0,0 +1,72 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file implements a cache of method sets. + +package typeutil + +import ( + "go/types" + "sync" +) + +// A MethodSetCache records the method set of each type T for which +// MethodSet(T) is called so that repeat queries are fast. +// The zero value is a ready-to-use cache instance. +type MethodSetCache struct { + mu sync.Mutex + named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N + others map[types.Type]*types.MethodSet // all other types +} + +// MethodSet returns the method set of type T. It is thread-safe. +// +// If cache is nil, this function is equivalent to types.NewMethodSet(T). +// Utility functions can thus expose an optional *MethodSetCache +// parameter to clients that care about performance. +// +func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet { + if cache == nil { + return types.NewMethodSet(T) + } + cache.mu.Lock() + defer cache.mu.Unlock() + + switch T := T.(type) { + case *types.Named: + return cache.lookupNamed(T).value + + case *types.Pointer: + if N, ok := T.Elem().(*types.Named); ok { + return cache.lookupNamed(N).pointer + } + } + + // all other types + // (The map uses pointer equivalence, not type identity.) + mset := cache.others[T] + if mset == nil { + mset = types.NewMethodSet(T) + if cache.others == nil { + cache.others = make(map[types.Type]*types.MethodSet) + } + cache.others[T] = mset + } + return mset +} + +func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } { + if cache.named == nil { + cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet }) + } + // Avoid recomputing mset(*T) for each distinct Pointer + // instance whose underlying type is a named type. + msets, ok := cache.named[named] + if !ok { + msets.value = types.NewMethodSet(named) + msets.pointer = types.NewMethodSet(types.NewPointer(named)) + cache.named[named] = msets + } + return msets +} diff --git a/go/types/typeutil/ui.go b/go/types/typeutil/ui.go new file mode 100644 index 000000000..9849c24ce --- /dev/null +++ b/go/types/typeutil/ui.go @@ -0,0 +1,52 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +// This file defines utilities for user interfaces that display types. + +import "go/types" + +// IntuitiveMethodSet returns the intuitive method set of a type T, +// which is the set of methods you can call on an addressable value of +// that type. +// +// The result always contains MethodSet(T), and is exactly MethodSet(T) +// for interface types and for pointer-to-concrete types. +// For all other concrete types T, the result additionally +// contains each method belonging to *T if there is no identically +// named method on T itself. +// +// This corresponds to user intuition about method sets; +// this function is intended only for user interfaces. +// +// The order of the result is as for types.MethodSet(T). +// +func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { + isPointerToConcrete := func(T types.Type) bool { + ptr, ok := T.(*types.Pointer) + return ok && !types.IsInterface(ptr.Elem()) + } + + var result []*types.Selection + mset := msets.MethodSet(T) + if types.IsInterface(T) || isPointerToConcrete(T) { + for i, n := 0, mset.Len(); i < n; i++ { + result = append(result, mset.At(i)) + } + } else { + // T is some other concrete type. + // Report methods of T and *T, preferring those of T. + pmset := msets.MethodSet(types.NewPointer(T)) + for i, n := 0, pmset.Len(); i < n; i++ { + meth := pmset.At(i) + if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil { + meth = m + } + result = append(result, meth) + } + + } + return result +} diff --git a/go/types/typeutil/ui_test.go b/go/types/typeutil/ui_test.go new file mode 100644 index 000000000..eba4c7fa0 --- /dev/null +++ b/go/types/typeutil/ui_test.go @@ -0,0 +1,61 @@ +package typeutil_test + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "strings" + "testing" + + "honnef.co/go/tools/go/types/typeutil" +) + +func TestIntuitiveMethodSet(t *testing.T) { + const source = ` +package P +type A int +func (A) f() +func (*A) g() +` + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "hello.go", source, 0) + if err != nil { + t.Fatal(err) + } + + var conf types.Config + pkg, err := conf.Check("P", fset, []*ast.File{f}, nil) + if err != nil { + t.Fatal(err) + } + qual := types.RelativeTo(pkg) + + for _, test := range []struct { + expr string // type expression + want string // intuitive method set + }{ + {"A", "(A).f (*A).g"}, + {"*A", "(*A).f (*A).g"}, + {"error", "(error).Error"}, + {"*error", ""}, + {"struct{A}", "(struct{A}).f (*struct{A}).g"}, + {"*struct{A}", "(*struct{A}).f (*struct{A}).g"}, + } { + tv, err := types.Eval(fset, pkg, 0, test.expr) + if err != nil { + t.Errorf("Eval(%s) failed: %v", test.expr, err) + } + var names []string + for _, m := range typeutil.IntuitiveMethodSet(tv.Type, nil) { + name := fmt.Sprintf("(%s).%s", types.TypeString(m.Recv(), qual), m.Obj().Name()) + names = append(names, name) + } + got := strings.Join(names, " ") + if got != test.want { + t.Errorf("IntuitiveMethodSet(%s) = %q, want %q", test.expr, got, test.want) + } + } +} diff --git a/unused2/unused.go b/unused2/unused.go index 4e0bab1fb..83ad3019f 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -7,7 +7,7 @@ import ( "go/types" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/types/typeutil" + "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" From 90cca819ea69f53fb4ef3dd637e5cedb1584f726 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 17 Mar 2019 22:48:34 +0100 Subject: [PATCH 042/254] unused2: remove use of PathEnclosingInterval Use a smarter implementation for consts. Doesn't currently handle blank identifiers. --- unused2/testdata/src/consts/consts.go | 5 ++ unused2/unused.go | 104 +++++++++++++++----------- 2 files changed, 65 insertions(+), 44 deletions(-) diff --git a/unused2/testdata/src/consts/consts.go b/unused2/testdata/src/consts/consts.go index 6906f1e38..f458cb011 100644 --- a/unused2/testdata/src/consts/consts.go +++ b/unused2/testdata/src/consts/consts.go @@ -5,6 +5,7 @@ const c1 = 1 const c2 = 1 const c3 = 1 const c4 = 1 +const C5 = 1 var _ = []int{c3: 1} @@ -16,3 +17,7 @@ func init() { _ = []int{c2: 1} var _ [c4]int } + +func Fn() { + const X = 1 // MATCH "X is unused" +} diff --git a/unused2/unused.go b/unused2/unused.go index 83ad3019f..5d8dd66e5 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -6,7 +6,6 @@ import ( "go/token" "go/types" - "golang.org/x/tools/go/ast/astutil" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintdsl" @@ -402,71 +401,88 @@ func (g *Graph) seeAndUse(used, by interface{}, reason string) { } func (g *Graph) entry(tinfo *types.Info) { - scopes := map[*types.Scope]ast.Node{} - for node, scope := range tinfo.Scopes { - switch node.(type) { - case *ast.File, *ast.FuncType: - scopes[scope] = node + // TODO rename Entry + + scopes := map[*types.Scope]*ssa.Function{} + for _, fn := range g.job.Program.InitialFunctions { + if fn.Pkg != g.pkg { + continue + } + if fn.Object() != nil { + scope := fn.Object().(*types.Func).Scope() + scopes[scope] = fn } } - // TODO rename Entry + + surroundingFunc := func(obj types.Object) *ssa.Function { + scope := obj.Parent() + for scope != nil { + if fn := scopes[scope]; fn != nil { + return fn + } + scope = scope.Parent() + } + return nil + } // SSA form won't tell us about locally scoped types that aren't // being used. Walk the list of Defs to get all named types. // // SSA form also won't tell us about constants; use Defs and Uses // to determine which constants exist and which are being used. - for def, obj := range tinfo.Defs { - if def.Name == "_" && obj != nil { - path, _ := astutil.PathEnclosingInterval(g.job.File(def), def.Pos(), def.Pos()) - for _, p := range path { - if decl, ok := p.(*ast.FuncDecl); ok { - fnObj := lintdsl.ObjectOf(g.job, decl.Name) - fn := g.pkg.Prog.FuncValue(fnObj.(*types.Func)) - g.see(fn) - // Note that this isn't necessarily the tightest - // possible match. This will only match named - // functions, not closures, for example. - g.seeAndUse(obj.Type(), fn, "defined as blank") - break - } - } - } + for _, obj := range tinfo.Defs { switch obj := obj.(type) { case *types.TypeName: g.see(obj) g.typ(obj.Type()) case *types.Const: g.see(obj) - // FIXME(dh): we don't know the scope of the constant, it - // may be local to a function - if obj.Exported() { + fn := surroundingFunc(obj) + if fn == nil && obj.Exported() { g.use(obj, nil, "exported constant") } g.typ(obj.Type()) g.seeAndUse(obj.Type(), obj, "constant type") } } -usesLoop: - for use, obj := range tinfo.Uses { - switch obj := obj.(type) { - case *types.Const: - path, _ := astutil.PathEnclosingInterval(g.job.File(use), use.Pos(), use.Pos()) - for _, p := range path { - if decl, ok := p.(*ast.FuncDecl); ok { - fnObj := lintdsl.ObjectOf(g.job, decl.Name) - fn := g.pkg.Prog.FuncValue(fnObj.(*types.Func)) - g.see(fn) - // Note that this isn't necessarily the tightest - // possible match. This will only match named - // functions, not closures, for example. - g.seeAndUse(obj, fn, "used constant") - continue usesLoop - } + + // Find constants being used inside functions + handledConsts := map[*ast.Ident]struct{}{} + for _, fn := range g.job.Program.InitialFunctions { + if fn.Pkg != g.pkg { + continue + } + node := fn.Syntax() + if node == nil { + continue + } + ast.Inspect(node, func(node ast.Node) bool { + ident, ok := node.(*ast.Ident) + if !ok { + return true } - // we couldn't find the surrounding function, so mark it as used by the root node. - g.seeAndUse(obj, nil, "used constant") + + obj, ok := tinfo.Uses[ident] + if !ok { + return true + } + switch obj := obj.(type) { + case *types.Const: + g.seeAndUse(obj, fn, "used constant") + } + return true + }) + } + // Find constants being used in non-function contexts + for ident, obj := range tinfo.Uses { + _, ok := obj.(*types.Const) + if !ok { + continue + } + if _, ok := handledConsts[ident]; ok { + continue } + g.seeAndUse(obj, nil, "used constant") } for _, m := range g.pkg.Members { From e7b15a9c5b2418208ebcfbea7a2ff41243fabaa9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 00:40:49 +0100 Subject: [PATCH 043/254] Use inspector package to speed up repeated AST traversals --- lint/lint.go | 3 + simple/lint.go | 813 +++++++++++++++++--------------------------- staticcheck/lint.go | 478 +++++++++----------------- stylecheck/lint.go | 58 ++-- 4 files changed, 505 insertions(+), 847 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index c81f6e826..5a9c66fa2 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -15,6 +15,7 @@ import ( "time" "unicode" + "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/packages" "honnef.co/go/tools/config" "honnef.co/go/tools/ssa" @@ -112,6 +113,7 @@ type Program struct { AllPackages []*packages.Package AllFunctions []*ssa.Function Files []*ast.File + Inspector *inspector.Inspector GoVersion int tokenFileMap map[*token.File]*ast.File @@ -334,6 +336,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { prog.astFileMap[f] = pkgMap[ssapkg] } } + prog.Inspector = inspector.New(prog.Files) for _, pkg := range allPkgs { for _, f := range pkg.Syntax { diff --git a/simple/lint.go b/simple/lint.go index 7f421b414..606b0505f 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -2,6 +2,7 @@ package simple // import "honnef.co/go/tools/simple" import ( + "fmt" "go/ast" "go/constant" "go/token" @@ -77,129 +78,119 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { } seen := map[ast.Node]struct{}{} - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { switch v := node.(type) { case *ast.ForStmt: if len(v.Body.List) != 1 { - return true + return } if !isSingleSelect(v.Body.List[0]) { - return true + return } if _, ok := v.Body.List[0].(*ast.SelectStmt).Body.List[0].(*ast.CommClause).Comm.(*ast.SendStmt); ok { // Don't suggest using range for channel sends - return true + return } seen[v.Body.List[0]] = struct{}{} j.Errorf(node, "should use for range instead of for { select {} }") case *ast.SelectStmt: if _, ok := seen[v]; ok { - return true + return } if !isSingleSelect(v) { - return true + return } j.Errorf(node, "should use a simple channel send/receive instead of select with a single case") - return true } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) } func (c *Checker) LintLoopCopy(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + loop := node.(*ast.RangeStmt) if loop.Key == nil { - return true + return } if len(loop.Body.List) != 1 { - return true + return } stmt, ok := loop.Body.List[0].(*ast.AssignStmt) if !ok { - return true + return } if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { - return true + return } lhs, ok := stmt.Lhs[0].(*ast.IndexExpr) if !ok { - return true + return } if _, ok := TypeOf(j, lhs.X).(*types.Slice); !ok { - return true + return } lidx, ok := lhs.Index.(*ast.Ident) if !ok { - return true + return } key, ok := loop.Key.(*ast.Ident) if !ok { - return true + return } if TypeOf(j, lhs) == nil || TypeOf(j, stmt.Rhs[0]) == nil { - return true + return } if ObjectOf(j, lidx) != ObjectOf(j, key) { - return true + return } if !types.Identical(TypeOf(j, lhs), TypeOf(j, stmt.Rhs[0])) { - return true + return } if _, ok := TypeOf(j, loop.X).(*types.Slice); !ok { - return true + return } if rhs, ok := stmt.Rhs[0].(*ast.IndexExpr); ok { rx, ok := rhs.X.(*ast.Ident) _ = rx if !ok { - return true + return } ridx, ok := rhs.Index.(*ast.Ident) if !ok { - return true + return } if ObjectOf(j, ridx) != ObjectOf(j, key) { - return true + return } } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok { value, ok := loop.Value.(*ast.Ident) if !ok { - return true + return } if ObjectOf(j, rhs) != ObjectOf(j, value) { - return true + return } } else { - return true + return } j.Errorf(loop, "should use copy() instead of a loop") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) LintIfBoolCmp(j *lint.Job) { - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok || (expr.Op != token.EQL && expr.Op != token.NEQ) { - return true + fn := func(node ast.Node) { + expr := node.(*ast.BinaryExpr) + if expr.Op != token.EQL && expr.Op != token.NEQ { + return } x := IsBoolConst(j, expr.X) y := IsBoolConst(j, expr.Y) if !x && !y { - return true + return } var other ast.Expr var val bool @@ -212,7 +203,7 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { } basic, ok := TypeOf(j, other).Underlying().(*types.Basic) if !ok || basic.Kind() != types.Bool { - return true + return } op := "" if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) { @@ -225,30 +216,27 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { r = "!" + r } if IsInTest(j, node) { - return true + return } j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintBytesBufferConversions(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok || len(call.Args) != 1 { - return true + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) + if len(call.Args) != 1 { + return } argCall, ok := call.Args[0].(*ast.CallExpr) if !ok { - return true + return } sel, ok := argCall.Fun.(*ast.SelectorExpr) if !ok { - return true + return } typ := TypeOf(j, call.Fun) @@ -258,11 +246,8 @@ func (c *Checker) LintBytesBufferConversions(j *lint.Job) { j.Errorf(call, "should use %v.Bytes() instead of %v", Render(j, sel.X), Render(j, call)) } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintStringsContains(j *lint.Job) { @@ -271,46 +256,43 @@ func (c *Checker) LintStringsContains(j *lint.Job) { -1: {token.GTR: true, token.NEQ: true, token.EQL: false}, 0: {token.GEQ: true, token.LSS: false}, } - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + expr := node.(*ast.BinaryExpr) switch expr.Op { case token.GEQ, token.GTR, token.NEQ, token.LSS, token.EQL: default: - return true + return } value, ok := ExprToInt(j, expr.Y) if !ok { - return true + return } allowedOps, ok := allowed[value] if !ok { - return true + return } b, ok := allowedOps[expr.Op] if !ok { - return true + return } call, ok := expr.X.(*ast.CallExpr) if !ok { - return true + return } sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } pkgIdent, ok := sel.X.(*ast.Ident) if !ok { - return true + return } funIdent := sel.Sel if pkgIdent.Name != "strings" && pkgIdent.Name != "bytes" { - return true + return } newFunc := "" switch funIdent.Name { @@ -321,7 +303,7 @@ func (c *Checker) LintStringsContains(j *lint.Job) { case "Index": newFunc = "Contains" default: - return true + return } prefix := "" @@ -329,33 +311,26 @@ func (c *Checker) LintStringsContains(j *lint.Job) { prefix = "!" } j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(j, call.Args)) - - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintBytesCompare(j *lint.Job) { - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + expr := node.(*ast.BinaryExpr) if expr.Op != token.NEQ && expr.Op != token.EQL { - return true + return } call, ok := expr.X.(*ast.CallExpr) if !ok { - return true + return } if !IsCallToAST(j, call, "bytes.Compare") { - return true + return } value, ok := ExprToInt(j, expr.Y) if !ok || value != 0 { - return true + return } args := RenderArgs(j, call.Args) prefix := "" @@ -363,70 +338,58 @@ func (c *Checker) LintBytesCompare(j *lint.Job) { prefix = "!" } j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintForTrue(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + loop := node.(*ast.ForStmt) if loop.Init != nil || loop.Post != nil { - return true + return } if !IsBoolConst(j, loop.Cond) || !BoolConst(j, loop.Cond) { - return true + return } j.Errorf(loop, "should use for {} instead of for true {}") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) LintRegexpRaw(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "regexp.MustCompile") && !IsCallToAST(j, call, "regexp.Compile") { - return true + return } sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } if len(call.Args) != 1 { // invalid function call - return true + return } lit, ok := call.Args[Arg("regexp.Compile.expr")].(*ast.BasicLit) if !ok { // TODO(dominikh): support string concat, maybe support constants - return true + return } if lit.Kind != token.STRING { // invalid function call - return true + return } if lit.Value[0] != '"' { // already a raw string - return true + return } val := lit.Value if !strings.Contains(val, `\\`) { - return true + return } if strings.Contains(val, "`") { - return true + return } bs := false @@ -441,34 +404,28 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { } if bs { // backslash followed by non-backslash -> escape sequence - return true + return } } j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintIfReturn(j *lint.Job) { - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + block := node.(*ast.BlockStmt) l := len(block.List) if l < 2 { - return true + return } n1, n2 := block.List[l-2], block.List[l-1] if len(block.List) >= 3 { if _, ok := block.List[l-3].(*ast.IfStmt); ok { // Do not flag a series of if statements - return true + return } } // if statement with no init, no else, a single condition @@ -476,48 +433,45 @@ func (c *Checker) LintIfReturn(j *lint.Job) { // statement in the body, that returns a boolean constant ifs, ok := n1.(*ast.IfStmt) if !ok { - return true + return } if ifs.Else != nil || ifs.Init != nil { - return true + return } if len(ifs.Body.List) != 1 { - return true + return } if op, ok := ifs.Cond.(*ast.BinaryExpr); ok { switch op.Op { case token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: default: - return true + return } } ret1, ok := ifs.Body.List[0].(*ast.ReturnStmt) if !ok { - return true + return } if len(ret1.Results) != 1 { - return true + return } if !IsBoolConst(j, ret1.Results[0]) { - return true + return } ret2, ok := n2.(*ast.ReturnStmt) if !ok { - return true + return } if len(ret2.Results) != 1 { - return true + return } if !IsBoolConst(j, ret2.Results[0]) { - return true + return } j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } // LintRedundantNilCheckWithLen checks for the following reduntant nil-checks: @@ -545,89 +499,86 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { return true, c.Val().Kind() == constant.Int && c.Val().String() == "0" } - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { // check that expr is "x || y" or "x && y" - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + expr := node.(*ast.BinaryExpr) if expr.Op != token.LOR && expr.Op != token.LAND { - return true + return } eqNil := expr.Op == token.LOR // check that x is "xx == nil" or "xx != nil" x, ok := expr.X.(*ast.BinaryExpr) if !ok { - return true + return } if eqNil && x.Op != token.EQL { - return true + return } if !eqNil && x.Op != token.NEQ { - return true + return } xx, ok := x.X.(*ast.Ident) if !ok { - return true + return } if !IsNil(j, x.Y) { - return true + return } // check that y is "len(xx) == 0" or "len(xx) ... " y, ok := expr.Y.(*ast.BinaryExpr) if !ok { - return true + return } if eqNil && y.Op != token.EQL { // must be len(xx) *==* 0 - return false + return } yx, ok := y.X.(*ast.CallExpr) if !ok { - return true + return } yxFun, ok := yx.Fun.(*ast.Ident) if !ok || yxFun.Name != "len" || len(yx.Args) != 1 { - return true + return } yxArg, ok := yx.Args[Arg("len.v")].(*ast.Ident) if !ok { - return true + return } if yxArg.Name != xx.Name { - return true + return } if eqNil && !IsZero(y.Y) { // must be len(x) == *0* - return true + return } if !eqNil { isConst, isZero := isConstZero(y.Y) if !isConst { - return true + return } switch y.Op { case token.EQL: // avoid false positive for "xx != nil && len(xx) == 0" if isZero { - return true + return } case token.GEQ: // avoid false positive for "xx != nil && len(xx) >= 0" if isZero { - return true + return } case token.NEQ: // avoid false positive for "xx != nil && len(xx) != " if !isZero { - return true + return } case token.GTR: // ok default: - return true + return } } @@ -642,50 +593,41 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { case *types.Chan: nilType = "nil channels" default: - return true + return } j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintSlicing(j *lint.Job) { - fn := func(node ast.Node) bool { - n, ok := node.(*ast.SliceExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + n := node.(*ast.SliceExpr) if n.Max != nil { - return true + return } s, ok := n.X.(*ast.Ident) if !ok || s.Obj == nil { - return true + return } call, ok := n.High.(*ast.CallExpr) if !ok || len(call.Args) != 1 || call.Ellipsis.IsValid() { - return true + return } fun, ok := call.Fun.(*ast.Ident) if !ok || fun.Name != "len" { - return true + return } if _, ok := ObjectOf(j, fun).(*types.Builtin); !ok { - return true + return } arg, ok := call.Args[Arg("len.v")].(*ast.Ident) if !ok || arg.Obj != s.Obj { - return true + return } j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) } func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { @@ -706,46 +648,43 @@ func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { } func (c *Checker) LintLoopAppend(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + loop := node.(*ast.RangeStmt) if !IsBlank(loop.Key) { - return true + return } val, ok := loop.Value.(*ast.Ident) if !ok { - return true + return } if len(loop.Body.List) != 1 { - return true + return } stmt, ok := loop.Body.List[0].(*ast.AssignStmt) if !ok { - return true + return } if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { - return true + return } if refersTo(j, stmt.Lhs[0], val) { - return true + return } call, ok := stmt.Rhs[0].(*ast.CallExpr) if !ok { - return true + return } if len(call.Args) != 2 || call.Ellipsis.IsValid() { - return true + return } fun, ok := call.Fun.(*ast.Ident) if !ok { - return true + return } obj := ObjectOf(j, fun) fn, ok := obj.(*types.Builtin) if !ok || fn.Name() != "append" { - return true + return } src := TypeOf(j, loop.X) @@ -753,85 +692,67 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { // TODO(dominikh) remove nil check once Go issue #15173 has // been fixed if src == nil { - return true + return } if !types.Identical(src, dst) { - return true + return } if Render(j, stmt.Lhs[0]) != Render(j, call.Args[Arg("append.slice")]) { - return true + return } el, ok := call.Args[Arg("append.elems")].(*ast.Ident) if !ok { - return true + return } if ObjectOf(j, val) != ObjectOf(j, el) { - return true + return } j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", Render(j, stmt.Lhs[0]), Render(j, call.Args[Arg("append.slice")]), Render(j, loop.X)) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) LintTimeSince(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } if !IsCallToAST(j, sel.X, "time.Now") { - return true + return } if sel.Sel.Name != "Sub" { - return true + return } j.Errorf(call, "should use time.Since instead of time.Now().Sub") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintTimeUntil(j *lint.Job) { if !IsGoVersion(j, 8) { return } - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "(time.Time).Sub") { - return true + return } if !IsCallToAST(j, call.Args[Arg("(time.Time).Sub.u")], "time.Now") { - return true + return } j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { fn1 := func(node ast.Node) { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return - } + assign := node.(*ast.AssignStmt) if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { return } @@ -858,10 +779,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } fn2 := func(node ast.Node) { - stmt, ok := node.(*ast.AssignStmt) - if !ok { - return - } + stmt := node.(*ast.AssignStmt) if len(stmt.Lhs) != len(stmt.Rhs) { return } @@ -882,10 +800,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } fn3 := func(node ast.Node) { - rs, ok := node.(*ast.RangeStmt) - if !ok { - return - } + rs := node.(*ast.RangeStmt) // for x, _ if !IsBlank(rs.Key) && IsBlank(rs.Value) { @@ -897,45 +812,39 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } } - fn := func(node ast.Node) bool { - fn1(node) - fn2(node) - if IsGoVersion(j, 4) { - fn3(node) - } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) + j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1) + j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2) + if IsGoVersion(j, 4) { + j.Program.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3) } } func (c *Checker) LintSimplerStructConversion(j *lint.Job) { var skip ast.Node - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { // Do not suggest type conversion between pointers if unary, ok := node.(*ast.UnaryExpr); ok && unary.Op == token.AND { if lit, ok := unary.X.(*ast.CompositeLit); ok { skip = lit } - return true + return } if node == skip { - return true + return } lit, ok := node.(*ast.CompositeLit) if !ok { - return true + return } typ1, _ := TypeOf(j, lit.Type).(*types.Named) if typ1 == nil { - return true + return } s1, ok := typ1.Underlying().(*types.Struct) if !ok { - return true + return } var typ2 *types.Named @@ -953,10 +862,10 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { return typ, ident, typ != nil } if len(lit.Elts) == 0 { - return true + return } if s1.NumFields() != len(lit.Elts) { - return true + return } for i, elt := range lit.Elts { var t types.Type @@ -966,39 +875,39 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { case *ast.SelectorExpr: t, id, ok = getSelType(elt) if !ok { - return true + return } if i >= s1.NumFields() || s1.Field(i).Name() != elt.Sel.Name { - return true + return } case *ast.KeyValueExpr: var sel *ast.SelectorExpr sel, ok = elt.Value.(*ast.SelectorExpr) if !ok { - return true + return } if elt.Key.(*ast.Ident).Name != sel.Sel.Name { - return true + return } t, id, ok = getSelType(elt.Value) } if !ok { - return true + return } // All fields must be initialized from the same object if ident != nil && ident.Obj != id.Obj { - return true + return } typ2, _ = t.(*types.Named) if typ2 == nil { - return true + return } ident = id } if typ2 == nil { - return true + return } if typ1.Obj().Pkg() != typ2.Obj().Pkg() { @@ -1007,32 +916,29 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { // by coincidence. Furthermore, if the dependency ever // adds more fields to its type, it could break the code // that relies on the type conversion to work. - return true + return } s2, ok := typ2.Underlying().(*types.Struct) if !ok { - return true + return } if typ1 == typ2 { - return true + return } if IsGoVersion(j, 8) { if !types.IdenticalIgnoreTags(s1, s2) { - return true + return } } else { if !types.Identical(s1, s2) { - return true + return } } j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) } func (c *Checker) LintTrim(j *lint.Job) { @@ -1066,26 +972,23 @@ func (c *Checker) LintTrim(j *lint.Job) { return sameNonDynamic(call.Args[Arg("len.v")], ident) } - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { var pkg string var fun string - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + ifstmt := node.(*ast.IfStmt) if ifstmt.Init != nil { - return true + return } if ifstmt.Else != nil { - return true + return } if len(ifstmt.Body.List) != 1 { - return true + return } condCall, ok := ifstmt.Cond.(*ast.CallExpr) if !ok { - return true + return } switch { case IsCallToAST(j, condCall, "strings.HasPrefix"): @@ -1107,27 +1010,27 @@ func (c *Checker) LintTrim(j *lint.Job) { pkg = "bytes" fun = "Contains" default: - return true + return } assign, ok := ifstmt.Body.List[0].(*ast.AssignStmt) if !ok { - return true + return } if assign.Tok != token.ASSIGN { - return true + return } if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return true + return } if !sameNonDynamic(condCall.Args[0], assign.Lhs[0]) { - return true + return } switch rhs := assign.Rhs[0].(type) { case *ast.CallExpr: if len(rhs.Args) < 2 || !sameNonDynamic(condCall.Args[0], rhs.Args[0]) || !sameNonDynamic(condCall.Args[1], rhs.Args[1]) { - return true + return } if IsCallToAST(j, condCall, "strings.HasPrefix") && IsCallToAST(j, rhs, "strings.TrimPrefix") || IsCallToAST(j, condCall, "strings.HasSuffix") && IsCallToAST(j, rhs, "strings.TrimSuffix") || @@ -1137,17 +1040,17 @@ func (c *Checker) LintTrim(j *lint.Job) { IsCallToAST(j, condCall, "bytes.Contains") && IsCallToAST(j, rhs, "bytes.Replace") { j.Errorf(ifstmt, "should replace this if statement with an unconditional %s", CallNameAST(j, rhs)) } - return true + return case *ast.SliceExpr: slice := rhs if !ok { - return true + return } if slice.Slice3 { - return true + return } if !sameNonDynamic(slice.X, condCall.Args[0]) { - return true + return } var index ast.Expr switch fun { @@ -1155,14 +1058,14 @@ func (c *Checker) LintTrim(j *lint.Job) { // TODO(dh) We could detect a High that is len(s), but another // rule will already flag that, anyway. if slice.High != nil { - return true + return } index = slice.Low case "HasSuffix": if slice.Low != nil { n, ok := ExprToInt(j, slice.Low) if !ok || n != 0 { - return true + return } } index = slice.High @@ -1171,59 +1074,59 @@ func (c *Checker) LintTrim(j *lint.Job) { switch index := index.(type) { case *ast.CallExpr: if fun != "HasPrefix" { - return true + return } if fn, ok := index.Fun.(*ast.Ident); !ok || fn.Name != "len" { - return true + return } if len(index.Args) != 1 { - return true + return } id3 := index.Args[Arg("len.v")] switch oid3 := condCall.Args[1].(type) { case *ast.BasicLit: if pkg != "strings" { - return false + return } lit, ok := id3.(*ast.BasicLit) if !ok { - return true + return } s1, ok1 := ExprToString(j, lit) s2, ok2 := ExprToString(j, condCall.Args[1]) if !ok1 || !ok2 || s1 != s2 { - return true + return } default: if !sameNonDynamic(id3, oid3) { - return true + return } } case *ast.BasicLit, *ast.Ident: if fun != "HasPrefix" { - return true + return } if pkg != "strings" { - return true + return } string, ok1 := ExprToString(j, condCall.Args[1]) int, ok2 := ExprToInt(j, slice.Low) if !ok1 || !ok2 || int != int64(len(string)) { - return true + return } case *ast.BinaryExpr: if fun != "HasSuffix" { - return true + return } if index.Op != token.SUB { - return true + return } if !isLenOnIdent(index.X, condCall.Args[0]) || !isLenOnIdent(index.Y, condCall.Args[1]) { - return true + return } default: - return true + return } var replacement string @@ -1234,14 +1137,9 @@ func (c *Checker) LintTrim(j *lint.Job) { replacement = "TrimSuffix" } j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) - return true - default: - return true } } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) LintLoopSlide(j *lint.Job) { @@ -1250,7 +1148,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { // TODO(dh): detect length that is an expression, not a variable name // TODO(dh): support sliding to a different offset than the beginning of the slice - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { /* for i := 0; i < n; i++ { bs[i] = bs[offset+i] @@ -1261,103 +1159,97 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { copy(bs[:n], bs[offset:offset+n]) */ - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 1 || loop.Init == nil || loop.Cond == nil || loop.Post == nil { - return true + loop := node.(*ast.ForStmt) + if len(loop.Body.List) != 1 || loop.Init == nil || loop.Cond == nil || loop.Post == nil { + return } assign, ok := loop.Init.(*ast.AssignStmt) if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || !IsZero(assign.Rhs[0]) { - return true + return } initvar, ok := assign.Lhs[0].(*ast.Ident) if !ok { - return true + return } post, ok := loop.Post.(*ast.IncDecStmt) if !ok || post.Tok != token.INC { - return true + return } postvar, ok := post.X.(*ast.Ident) if !ok || ObjectOf(j, postvar) != ObjectOf(j, initvar) { - return true + return } bin, ok := loop.Cond.(*ast.BinaryExpr) if !ok || bin.Op != token.LSS { - return true + return } binx, ok := bin.X.(*ast.Ident) if !ok || ObjectOf(j, binx) != ObjectOf(j, initvar) { - return true + return } biny, ok := bin.Y.(*ast.Ident) if !ok { - return true + return } assign, ok = loop.Body.List[0].(*ast.AssignStmt) if !ok || len(assign.Lhs) != 1 || len(assign.Rhs) != 1 || assign.Tok != token.ASSIGN { - return true + return } lhs, ok := assign.Lhs[0].(*ast.IndexExpr) if !ok { - return true + return } rhs, ok := assign.Rhs[0].(*ast.IndexExpr) if !ok { - return true + return } bs1, ok := lhs.X.(*ast.Ident) if !ok { - return true + return } bs2, ok := rhs.X.(*ast.Ident) if !ok { - return true + return } obj1 := ObjectOf(j, bs1) obj2 := ObjectOf(j, bs2) if obj1 != obj2 { - return true + return } if _, ok := obj1.Type().Underlying().(*types.Slice); !ok { - return true + return } index1, ok := lhs.Index.(*ast.Ident) if !ok || ObjectOf(j, index1) != ObjectOf(j, initvar) { - return true + return } index2, ok := rhs.Index.(*ast.BinaryExpr) if !ok || index2.Op != token.ADD { - return true + return } add1, ok := index2.X.(*ast.Ident) if !ok { - return true + return } add2, ok := index2.Y.(*ast.Ident) if !ok || ObjectOf(j, add2) != ObjectOf(j, initvar) { - return true + return } j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(j, bs1), Render(j, biny), Render(j, bs1), Render(j, add1)) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) LintMakeLenCap(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "make" { // FIXME check whether make is indeed the built-in function - return true + return } switch len(call.Args) { case 2: @@ -1376,11 +1268,8 @@ func (c *Checker) LintMakeLenCap(j *lint.Job) { Render(j, call.Args[Arg("make.t")]), Render(j, call.Args[Arg("make.size[0]")])) } } - return false - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintAssertNotNil(j *lint.Job) { @@ -1405,106 +1294,92 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { } return true } - fn1 := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + fn1 := func(node ast.Node) { + ifstmt := node.(*ast.IfStmt) assign, ok := ifstmt.Init.(*ast.AssignStmt) if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) { - return true + return } assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) if !ok { - return true + return } binop, ok := ifstmt.Cond.(*ast.BinaryExpr) if !ok || binop.Op != token.LAND { - return true + return } assertIdent, ok := assert.X.(*ast.Ident) if !ok { - return true + return } assignIdent, ok := assign.Lhs[1].(*ast.Ident) if !ok { - return true + return } if !(isNilCheck(assertIdent, binop.X) && isOKCheck(assignIdent, binop.Y)) && !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) { - return true + return } j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) - return true } - fn2 := func(node ast.Node) bool { + fn2 := func(node ast.Node) { // Check that outer ifstmt is an 'if x != nil {}' - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + ifstmt := node.(*ast.IfStmt) if ifstmt.Init != nil { - return true + return } if ifstmt.Else != nil { - return true + return } if len(ifstmt.Body.List) != 1 { - return true + return } binop, ok := ifstmt.Cond.(*ast.BinaryExpr) if !ok { - return true + return } if binop.Op != token.NEQ { - return true + return } lhs, ok := binop.X.(*ast.Ident) if !ok { - return true + return } if !IsNil(j, binop.Y) { - return true + return } // Check that inner ifstmt is an `if _, ok := x.(T); ok {}` ifstmt, ok = ifstmt.Body.List[0].(*ast.IfStmt) if !ok { - return true + return } assign, ok := ifstmt.Init.(*ast.AssignStmt) if !ok || len(assign.Lhs) != 2 || len(assign.Rhs) != 1 || !IsBlank(assign.Lhs[0]) { - return true + return } assert, ok := assign.Rhs[0].(*ast.TypeAssertExpr) if !ok { - return true + return } assertIdent, ok := assert.X.(*ast.Ident) if !ok { - return true + return } if lhs.Obj != assertIdent.Obj { - return true + return } assignIdent, ok := assign.Lhs[1].(*ast.Ident) if !ok { - return true + return } if !isOKCheck(assignIdent, ifstmt.Cond) { - return true + return } j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) - return true - } - fn := func(node ast.Node) bool { - b1 := fn1(node) - b2 := fn2(node) - return b1 || b2 - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) + j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) } func (c *Checker) LintDeclareAssign(j *lint.Job) { @@ -1530,13 +1405,10 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { }) return num >= 2 } - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + block := node.(*ast.BlockStmt) if len(block.List) < 2 { - return true + return } for i, stmt := range block.List[:len(block.List)-1] { _ = i @@ -1577,19 +1449,13 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { j.Errorf(decl, "should merge variable declaration with assignment on next line") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } func (c *Checker) LintRedundantBreak(j *lint.Job) { fn1 := func(node ast.Node) { - clause, ok := node.(*ast.CaseClause) - if !ok { - return - } + clause := node.(*ast.CaseClause) if len(clause.Body) < 2 { return } @@ -1610,7 +1476,7 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { ret = x.Type.Results body = x.Body default: - return + panic(fmt.Sprintf("unreachable: %T", node)) } // if the func has results, a return can't be redundant. // similarly, if there are no statements, there can be @@ -1626,14 +1492,8 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { // checked x.Type.Results to be nil. j.Errorf(rst, "redundant return statement") } - fn := func(node ast.Node) bool { - fn1(node) - fn2(node) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) + j.Program.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) } func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool { @@ -1666,26 +1526,23 @@ func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool { } func (c *Checker) LintRedundantSprintf(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "fmt.Sprintf") { - return true + return } if len(call.Args) != 2 { - return true + return } if s, ok := ExprToString(j, call.Args[Arg("fmt.Sprintf.format")]); !ok || s != "%s" { - return true + return } arg := call.Args[Arg("fmt.Sprintf.a[0]")] typ := TypeOf(j, arg) if c.Implements(j, typ, "fmt.Stringer") { j.Errorf(call, "should use String() instead of fmt.Sprintf") - return true + return } if typ.Underlying() == types.Universe.Lookup("string").Type() { @@ -1695,28 +1552,22 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) { j.Errorf(call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { if !IsCallToAST(j, node, "errors.New") { - return true + return } call := node.(*ast.CallExpr) if !IsCallToAST(j, call.Args[Arg("errors.New.text")], "fmt.Sprintf") { - return true + return } j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintRangeStringRunes(j *lint.Job) { @@ -1724,45 +1575,38 @@ func (c *Checker) LintRangeStringRunes(j *lint.Job) { } func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } - + fn := func(node ast.Node) { + ifstmt := node.(*ast.IfStmt) cond, ok := ifstmt.Cond.(*ast.BinaryExpr) if !ok { - return true + return } if cond.Op != token.NEQ || !IsNil(j, cond.Y) || len(ifstmt.Body.List) != 1 { - return true + return } loop, ok := ifstmt.Body.List[0].(*ast.RangeStmt) if !ok { - return true + return } ifXIdent, ok := cond.X.(*ast.Ident) if !ok { - return true + return } rangeXIdent, ok := loop.X.(*ast.Ident) if !ok { - return true + return } if ifXIdent.Obj != rangeXIdent.Obj { - return true + return } switch TypeOf(j, rangeXIdent).(type) { case *types.Slice, *types.Map: j.Errorf(node, "unnecessary nil check around range") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func isPermissibleSort(j *lint.Job, node ast.Node) bool { @@ -1787,7 +1631,7 @@ func isPermissibleSort(j *lint.Job, node ast.Node) bool { } func (c *Checker) LintSortHelpers(j *lint.Job) { - fnFuncs := func(node ast.Node) bool { + fn := func(node ast.Node) { var body *ast.BlockStmt switch node := node.(type) { case *ast.FuncLit: @@ -1795,10 +1639,10 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { case *ast.FuncDecl: body = node.Body default: - return true + panic(fmt.Sprintf("unreachable: %T", node)) } if body == nil { - return true + return } type Error struct { @@ -1836,17 +1680,14 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { ast.Inspect(body, fnSorts) if permissible { - return false + return } for _, err := range errors { j.Errorf(err.node, "%s", err.msg) } - return false - } - - for _, f := range j.Program.Files { - ast.Inspect(f, fnFuncs) + return } + j.Program.Inspector.Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) } func (c *Checker) LintGuardedDelete(j *lint.Job) { @@ -1877,65 +1718,56 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { key = index.Index return ident, index.X, key, true } - fn := func(node ast.Node) bool { - stmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + stmt := node.(*ast.IfStmt) if len(stmt.Body.List) != 1 { - return true + return } if stmt.Else != nil { - return true + return } expr, ok := stmt.Body.List[0].(*ast.ExprStmt) if !ok { - return true + return } call, ok := expr.X.(*ast.CallExpr) if !ok { - return true + return } if !IsCallToAST(j, call, "delete") { - return true + return } b, m, key, ok := isCommaOkMapIndex(stmt.Init) if !ok { - return true + return } if cond, ok := stmt.Cond.(*ast.Ident); !ok || ObjectOf(j, cond) != ObjectOf(j, b) { - return true + return } if Render(j, call.Args[0]) != Render(j, m) || Render(j, call.Args[1]) != Render(j, key) { - return true + return } j.Errorf(stmt, "unnecessary guard around call to delete") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { - fn := func(node ast.Node) bool { - stmt, ok := node.(*ast.TypeSwitchStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + stmt := node.(*ast.TypeSwitchStmt) if stmt.Init != nil { // bailing out for now, can't anticipate how type switches with initializers are being used - return true + return } expr, ok := stmt.Assign.(*ast.ExprStmt) if !ok { // the user is in fact assigning the result - return true + return } assert := expr.X.(*ast.TypeAssertExpr) ident, ok := assert.X.(*ast.Ident) if !ok { - return true + return } x := ObjectOf(j, ident) var allOffenders []ast.Node @@ -1985,9 +1817,6 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { } j.Errorf(expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(j, ident), Render(j, ident), at) } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) } diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 40c3ff0f1..f1f7c853e 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -610,14 +610,11 @@ func hasType(j *lint.Job, expr ast.Expr, name string) bool { } func (c *Checker) CheckUntrappableSignal(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAnyAST(j, call, "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") { - return true + return } for _, arg := range call.Args { if conv, ok := arg.(*ast.CallExpr); ok && isName(j, conv.Fun, "os.Signal") { @@ -631,26 +628,20 @@ func (c *Checker) CheckUntrappableSignal(j *lint.Job) { j.Errorf(arg, "%s signal cannot be trapped", Render(j, arg)) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckTemplate(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) var kind string if IsCallToAST(j, call, "(*text/template.Template).Parse") { kind = "text" } else if IsCallToAST(j, call, "(*html/template.Template).Parse") { kind = "html" } else { - return true + return } sel := call.Fun.(*ast.SelectorExpr) if !IsCallToAST(j, sel.X, "text/template.New") && @@ -659,11 +650,11 @@ func (c *Checker) CheckTemplate(j *lint.Job) { // different delims. A better solution with less false // negatives would use data flow analysis to see where the // template comes from and where it has been - return true + return } s, ok := ExprToString(j, call.Args[Arg("(*text/template.Template).Parse.text")]) if !ok { - return true + return } var err error switch kind { @@ -678,35 +669,29 @@ func (c *Checker) CheckTemplate(j *lint.Job) { j.Errorf(call.Args[Arg("(*text/template.Template).Parse.text")], "%s", err) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "time.Sleep") { - return true + return } lit, ok := call.Args[Arg("time.Sleep.d")].(*ast.BasicLit) if !ok { - return true + return } n, err := strconv.Atoi(lit.Value) if err != nil { - return true + return } if n == 0 || n > 120 { // time.Sleep(0) is a seldom used pattern in concurrency // tests. >120 might be intentional. 120 was chosen // because the user could've meant 2 minutes. - return true + return } recommendation := "time.Sleep(time.Nanosecond)" if n != 1 { @@ -714,64 +699,55 @@ func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { } j.Errorf(call.Args[Arg("time.Sleep.d")], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { - fn := func(node ast.Node) bool { - g, ok := node.(*ast.GoStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + g := node.(*ast.GoStmt) fun, ok := g.Call.Fun.(*ast.FuncLit) if !ok { - return true + return } if len(fun.Body.List) == 0 { - return true + return } stmt, ok := fun.Body.List[0].(*ast.ExprStmt) if !ok { - return true + return } call, ok := stmt.X.(*ast.CallExpr) if !ok { - return true + return } sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } fn, ok := ObjectOf(j, sel.Sel).(*types.Func) if !ok { - return true + return } if fn.FullName() == "(*sync.WaitGroup).Add" { j.Errorf(sel, "should call %s before starting the goroutine to avoid a race", Render(j, stmt)) } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn) } func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 0 || loop.Post != nil { - return true + fn := func(node ast.Node) { + loop := node.(*ast.ForStmt) + if len(loop.Body.List) != 0 || loop.Post != nil { + return } if loop.Init != nil { // TODO(dh): this isn't strictly necessary, it just makes // the check easier. - return true + return } // An empty loop is bad news in two cases: 1) The loop has no // condition. In that case, it's just a loop that spins @@ -787,34 +763,30 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { if loop.Cond != nil { if hasSideEffects(loop.Cond) { - return true + return } if ident, ok := loop.Cond.(*ast.Ident); ok { if k, ok := ObjectOf(j, ident).(*types.Const); ok { if !constant.BoolVal(k.Val()) { // don't flag `for false {}` loops. They're a debug aid. - return true + return } } } j.Errorf(loop, "loop condition never changes or has a race condition") } j.Errorf(loop, "this loop will spin, using 100%% CPU") - - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { mightExit := false var defers []ast.Stmt - loop, ok := node.(*ast.ForStmt) - if !ok || loop.Cond != nil { - return true + loop := node.(*ast.ForStmt) + if loop.Cond != nil { + return } fn2 := func(node ast.Node) bool { switch stmt := node.(type) { @@ -838,28 +810,22 @@ func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { } ast.Inspect(loop.Body, fn2) if mightExit { - return true + return } for _, stmt := range defers { j.Errorf(stmt, "defers in this infinite loop will never run") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.RangeStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + loop := node.(*ast.RangeStmt) typ := TypeOf(j, loop.X) - _, ok = typ.Underlying().(*types.Chan) + _, ok := typ.Underlying().(*types.Chan) if !ok { - return true + return } fn2 := func(node ast.Node) bool { switch stmt := node.(type) { @@ -872,17 +838,14 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { return true } ast.Inspect(loop.Body, fn2) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) CheckTestMainExit(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { if !isTestMain(j, node) { - return true + return } arg := ObjectOf(j, node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) @@ -923,11 +886,8 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { if !callsExit && callsRun { j.Errorf(node, "TestMain should call os.Exit to set exit code") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder(nil, fn) } func isTestMain(j *lint.Job, node ast.Node) bool { @@ -949,64 +909,52 @@ func isTestMain(j *lint.Job, node ast.Node) bool { } func (c *Checker) CheckExec(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if !IsCallToAST(j, call, "os/exec.Command") { - return true + return } val, ok := ExprToString(j, call.Args[Arg("os/exec.Command.name")]) if !ok { - return true + return } if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { - return true + return } j.Errorf(call.Args[Arg("os/exec.Command.name")], "first argument to exec.Command looks like a shell command, but a program name or path are expected") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { - fn := func(node ast.Node) bool { - loop, ok := node.(*ast.ForStmt) - if !ok || len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil { - return true + fn := func(node ast.Node) { + loop := node.(*ast.ForStmt) + if len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil { + return } sel, ok := loop.Body.List[0].(*ast.SelectStmt) if !ok { - return true + return } for _, c := range sel.Body.List { if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 { j.Errorf(comm, "should not have an empty default case in a for+select loop. The loop will spin.") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { - fn := func(node ast.Node) bool { - op, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + op := node.(*ast.BinaryExpr) switch op.Op { case token.EQL, token.NEQ: if basic, ok := TypeOf(j, op.X).Underlying().(*types.Basic); ok { if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { // f == f and f != f might be used to check for NaN - return true + return } } case token.SUB, token.QUO, token.AND, token.REM, token.OR, token.XOR, token.AND_NOT, @@ -1014,22 +962,19 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { default: // For some ops, such as + and *, it can make sense to // have identical operands - return true + return } if Render(j, op.X) != Render(j, op.Y) { - return true + return } j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckScopedBreak(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { var body *ast.BlockStmt switch node := node.(type) { case *ast.ForStmt: @@ -1037,7 +982,7 @@ func (c *Checker) CheckScopedBreak(j *lint.Job) { case *ast.RangeStmt: body = node.Body default: - return true + panic(fmt.Sprintf("unreachable: %T", node)) } for _, stmt := range body.List { var blocks [][]ast.Stmt @@ -1082,52 +1027,40 @@ func (c *Checker) CheckScopedBreak(j *lint.Job) { } } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) } func (c *Checker) CheckUnsafePrintf(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) var arg int if IsCallToAnyAST(j, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { arg = Arg("fmt.Printf.format") } else if IsCallToAnyAST(j, call, "fmt.Fprintf") { arg = Arg("fmt.Fprintf.format") } else { - return true + return } if len(call.Args) != arg+1 { - return true + return } switch call.Args[arg].(type) { case *ast.CallExpr, *ast.Ident: default: - return true + return } j.Errorf(call.Args[arg], "printf-style function with dynamic format string and no further arguments should use print-style function instead") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckEarlyDefer(j *lint.Job) { - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + block := node.(*ast.BlockStmt) if len(block.List) < 2 { - return true + return } for i, stmt := range block.List { if i == len(block.List)-1 { @@ -1187,11 +1120,8 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { } j.Errorf(def, "should check returned error before deferring %s", Render(j, def.Call)) } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } func selectorX(sel *ast.SelectorExpr) ast.Node { @@ -1241,13 +1171,10 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { return sel.X, fn.Name(), true } - fn := func(node ast.Node) bool { - block, ok := node.(*ast.BlockStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + block := node.(*ast.BlockStmt) if len(block.List) < 2 { - return true + return } for i := range block.List[:len(block.List)-1] { sel1, method1, ok1 := mutexParams(block.List[i]) @@ -1261,11 +1188,8 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { j.Errorf(block.List[i+1], "empty critical section") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } // cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't @@ -1273,7 +1197,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`) func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { if unary, ok := node.(*ast.UnaryExpr); ok { if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { ident, ok := star.X.(*ast.Ident) @@ -1288,11 +1212,8 @@ func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { j.Errorf(star, "*&x will be simplified to x. It will not copy x.") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) } func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { @@ -1326,7 +1247,7 @@ func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { } func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node, _ bool) bool { assign, ok := node.(*ast.AssignStmt) if ok { // TODO(dh): This risks missing some Header reads, for @@ -1360,36 +1281,28 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) return true } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Program.Inspector.Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) } func (c *Checker) CheckBenchmarkN(j *lint.Job) { - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + assign := node.(*ast.AssignStmt) if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return true + return } sel, ok := assign.Lhs[0].(*ast.SelectorExpr) if !ok { - return true + return } if sel.Sel.Name != "N" { - return true + return } if !hasType(j, sel.X, "*testing.B") { - return true + return } j.Errorf(assign, "should not assign to %s", Render(j, sel)) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { @@ -1536,15 +1449,12 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { return IsObject(ObjectOf(j, sel.Sel), name) } - fn := func(node ast.Node) bool { - expr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + expr := node.(*ast.BinaryExpr) tx := TypeOf(j, expr.X) basic, ok := tx.Underlying().(*types.Basic) if !ok { - return true + return } var max string @@ -1608,11 +1518,8 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) { @@ -1799,7 +1706,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { // // - any nested, unlabelled continue, even if it is in another // loop or closure. - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { var body *ast.BlockStmt switch fn := node.(type) { case *ast.FuncDecl: @@ -1807,10 +1714,10 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { case *ast.FuncLit: body = fn.Body default: - return true + panic(fmt.Sprintf("unreachable: %T", node)) } if body == nil { - return true + return } labels := map[*ast.Object]ast.Stmt{} ast.Inspect(body, func(node ast.Node) bool { @@ -1894,82 +1801,67 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { } return true }) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) } func (c *Checker) CheckNilContext(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) if len(call.Args) == 0 { - return true + return } if typ, ok := TypeOf(j, call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { - return true + return } sig, ok := TypeOf(j, call.Fun).(*types.Signature) if !ok { - return true + return } if sig.Params().Len() == 0 { - return true + return } if !IsType(sig.Params().At(0).Type(), "context.Context") { - return true + return } j.Errorf(call.Args[0], "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckSeeker(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { - return true + return } if sel.Sel.Name != "Seek" { - return true + return } if len(call.Args) != 2 { - return true + return } arg0, ok := call.Args[Arg("(io.Seeker).Seek.offset")].(*ast.SelectorExpr) if !ok { - return true + return } switch arg0.Sel.Name { case "SeekStart", "SeekCurrent", "SeekEnd": default: - return true + return } pkg, ok := arg0.X.(*ast.Ident) if !ok { - return true + return } if pkg.Name != "io" { - return true + return } j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { @@ -2301,24 +2193,18 @@ func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { } func (c *Checker) CheckDoubleNegation(j *lint.Job) { - fn := func(node ast.Node) bool { - unary1, ok := node.(*ast.UnaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + unary1 := node.(*ast.UnaryExpr) unary2, ok := unary1.X.(*ast.UnaryExpr) if !ok { - return true + return } if unary1.Op != token.NOT || unary2.Op != token.NOT { - return true + return } j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) } func hasSideEffects(node ast.Node) bool { @@ -2354,21 +2240,18 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { } return inits, conds } - fn := func(node ast.Node) bool { - ifstmt, ok := node.(*ast.IfStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + ifstmt := node.(*ast.IfStmt) if seen[ifstmt] { - return true + return } inits, conds := collectConds(ifstmt, nil, nil) if len(inits) > 0 { - return true + return } for _, cond := range conds { if hasSideEffects(cond) { - return true + return } } counts := map[string]int{} @@ -2379,11 +2262,8 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { j.Errorf(cond, "this condition occurs multiple times in this if/else if chain") } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { @@ -2425,14 +2305,11 @@ func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { } func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + call := node.(*ast.CallExpr) sig, ok := TypeOf(j, call.Fun).(*types.Signature) if !ok { - return true + return } n := sig.Params().Len() var args []int @@ -2460,11 +2337,8 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { j.Errorf(call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckPureFunctions(j *lint.Job) { @@ -2525,8 +2399,8 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { var ssafn *ssa.Function stack := 0 - fn := func(node ast.Node) bool { - if node == nil { + fn := func(node ast.Node, push bool) bool { + if !push { stack-- } else { stack++ @@ -2590,9 +2464,7 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { }) } } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Program.Inspector.Nodes(nil, fn) } func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { @@ -2826,13 +2698,10 @@ func (c *Checker) CheckRangeStringRunes(j *lint.Job) { } func (c *Checker) CheckSelfAssignment(j *lint.Job) { - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + assign := node.(*ast.AssignStmt) if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) { - return true + return } for i, stmt := range assign.Lhs { rlh := Render(j, stmt) @@ -2841,11 +2710,8 @@ func (c *Checker) CheckSelfAssignment(j *lint.Job) { j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func buildTagsIdentical(s1, s2 []string) bool { @@ -2918,16 +2784,13 @@ func (c *Checker) CheckSillyRegexp(j *lint.Job) { } func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { - fn := func(node ast.Node) bool { - decl, ok := node.(*ast.GenDecl) - if !ok { - return true - } + fn := func(node ast.Node) { + decl := node.(*ast.GenDecl) if !decl.Lparen.IsValid() { - return true + return } if decl.Tok != token.CONST { - return true + return } groups := GroupSpecs(j, decl.Specs) @@ -2965,11 +2828,8 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { } j.Errorf(group[0], "only the first constant in this group has an explicit type") } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) } func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { @@ -3035,11 +2895,8 @@ func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { } func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { - fn := func(node ast.Node) bool { - binExpr, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + binExpr := node.(*ast.BinaryExpr) var negative bool switch binExpr.Op { @@ -3048,7 +2905,7 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { case token.NEQ: negative = true default: - return true + return } const ( @@ -3062,7 +2919,7 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { } else if IsCallToAST(j, binExpr.X, up) && IsCallToAST(j, binExpr.Y, up) { call = up } else { - return true + return } bang := "" @@ -3071,12 +2928,9 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { } j.Errorf(binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) - return true } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { @@ -3102,11 +2956,8 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { return nil, nil, false } - fn := func(node ast.Node) bool { - tsStmt, ok := node.(*ast.TypeSwitchStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + tsStmt := node.(*ast.TypeSwitchStmt) type ccAndTypes struct { cc *ast.CaseClause @@ -3133,7 +2984,7 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { if len(ccs) <= 1 { // Zero or one case clauses, nothing to check. - return true + return } // Check if case clauses following cc have types that are subsumed by cc. @@ -3144,28 +2995,21 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { } } } - - return true } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) - } + j.Program.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) } func (c *Checker) CheckSingleArgAppend(j *lint.Job) { - fn := func(node ast.Node) bool { + fn := func(node ast.Node) { if !IsCallToAST(j, node, "append") { - return true + return } - call, _ := node.(*ast.CallExpr) + call := node.(*ast.CallExpr) if len(call.Args) != 1 { - return true + return } j.Errorf(call, "x = append(y) is equivalent to x = y") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 86031be3b..ae5039439 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -178,14 +178,14 @@ func (c *Checker) CheckIncDec(j *lint.Job) { // x += 2 // ... // x += 1 - fn := func(node ast.Node) bool { - assign, ok := node.(*ast.AssignStmt) - if !ok || (assign.Tok != token.ADD_ASSIGN && assign.Tok != token.SUB_ASSIGN) { - return true + fn := func(node ast.Node) { + assign := node.(*ast.AssignStmt) + if assign.Tok != token.ADD_ASSIGN && assign.Tok != token.SUB_ASSIGN { + return } if (len(assign.Lhs) != 1 || len(assign.Rhs) != 1) || !IsIntLiteral(assign.Rhs[0], "1") { - return true + return } suffix := "" @@ -197,11 +197,8 @@ func (c *Checker) CheckIncDec(j *lint.Job) { } j.Errorf(assign, "should replace %s with %s%s", Render(j, assign), Render(j, assign.Lhs[0]), suffix) - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func (c *Checker) CheckErrorReturn(j *lint.Job) { @@ -604,11 +601,8 @@ func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) { } func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { - fn := func(node ast.Node) bool { - stmt, ok := node.(*ast.SwitchStmt) - if !ok { - return true - } + fn := func(node ast.Node) { + stmt := node.(*ast.SwitchStmt) list := stmt.Body.List for i, c := range list { if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 { @@ -616,42 +610,33 @@ func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { break } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn) } func (c *Checker) CheckYodaConditions(j *lint.Job) { - fn := func(node ast.Node) bool { - cond, ok := node.(*ast.BinaryExpr) - if !ok { - return true - } + fn := func(node ast.Node) { + cond := node.(*ast.BinaryExpr) if cond.Op != token.EQL && cond.Op != token.NEQ { - return true + return } if _, ok := cond.X.(*ast.BasicLit); !ok { - return true + return } if _, ok := cond.Y.(*ast.BasicLit); ok { // Don't flag lit == lit conditions, just in case - return true + return } j.Errorf(cond, "don't use Yoda conditions") - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { - fn := func(node ast.Node) bool { - lit, ok := node.(*ast.BasicLit) - if !ok || lit.Kind != token.STRING { - return true + fn := func(node ast.Node) { + lit := node.(*ast.BasicLit) + if lit.Kind != token.STRING { + return } for _, r := range lit.Value { if unicode.Is(unicode.Cf, r) { @@ -660,9 +645,6 @@ func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { j.Errorf(lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) } } - return true - } - for _, f := range j.Program.Files { - ast.Inspect(f, fn) } + j.Program.Inspector.Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn) } From a53acc14c5cd0138f7ef63c699841082732f8d10 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 01:53:36 +0100 Subject: [PATCH 044/254] lint: add test case for multiple linter directives --- lint/testdata/src/Test/line-ignores.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lint/testdata/src/Test/line-ignores.go b/lint/testdata/src/Test/line-ignores.go index ce0e3feb7..77660e69c 100644 --- a/lint/testdata/src/Test/line-ignores.go +++ b/lint/testdata/src/Test/line-ignores.go @@ -6,6 +6,7 @@ package pkg func fn1() {} // MATCH "test problem" //lint:ignore TEST1000 This should be ignored, because ... +//lint:ignore XXX1000 Testing that multiple linter directives work correctly func fn2() {} //lint:ignore TEST1000 @@ -17,5 +18,5 @@ func fn4() { var _ int } -// MATCH:11 "malformed linter directive" -// MATCH:16 "this linter directive didn't match anything" +// MATCH:12 "malformed linter directive" +// MATCH:17 "this linter directive didn't match anything" From 49486d2d3901994520d334de4ee0b478aa695012 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 02:25:24 +0100 Subject: [PATCH 045/254] unused2: compute all scopes in one go The overall increased memory usage is offset by less garbage (no repeated building of maps) and only looping over all functions once. --- unused2/unused.go | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 5d8dd66e5..17fb5e18a 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -162,10 +162,19 @@ func NewChecker() *Checker { type Checker struct{} func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { + scopes := map[*types.Scope]*ssa.Function{} + for _, fn := range j.Program.InitialFunctions { + if fn.Object() != nil { + scope := fn.Object().(*types.Func).Scope() + scopes[scope] = fn + } + } + var out []Unused for _, pkg := range prog.InitialPackages { graph := NewGraph(pkg.SSA) graph.job = j + graph.scopes = scopes graph.entry(pkg.TypesInfo) graph.color(graph.Root) @@ -259,6 +268,7 @@ type Graph struct { job *lint.Job pkg *ssa.Package msCache typeutil.MethodSetCache + scopes map[*types.Scope]*ssa.Function nodeCounter int @@ -403,21 +413,10 @@ func (g *Graph) seeAndUse(used, by interface{}, reason string) { func (g *Graph) entry(tinfo *types.Info) { // TODO rename Entry - scopes := map[*types.Scope]*ssa.Function{} - for _, fn := range g.job.Program.InitialFunctions { - if fn.Pkg != g.pkg { - continue - } - if fn.Object() != nil { - scope := fn.Object().(*types.Func).Scope() - scopes[scope] = fn - } - } - surroundingFunc := func(obj types.Object) *ssa.Function { scope := obj.Parent() for scope != nil { - if fn := scopes[scope]; fn != nil { + if fn := g.scopes[scope]; fn != nil { return fn } scope = scope.Parent() From 18f3feffb37fb402a59a115966036f7a969aa0fc Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 02:26:47 +0100 Subject: [PATCH 046/254] unused2: cull some unnecessary graph nodes --- unused2/unused.go | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/unused2/unused.go b/unused2/unused.go index 17fb5e18a..9f88e90c3 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -357,7 +357,28 @@ func (n *Node) use(node *Node) { n.used[node] = struct{}{} } +func isIrrelevantType(obj interface{}) bool { + if T, ok := obj.(types.Type); ok { + T = lintdsl.Dereference(T) + switch T := T.(type) { + case *types.Array: + return isIrrelevantType(T.Elem()) + case *types.Slice: + return isIrrelevantType(T.Elem()) + case *types.Basic: + return true + case *types.Tuple: + return T.Len() == 0 + } + } + return false +} + func (g *Graph) see(obj interface{}) { + if isIrrelevantType(obj) { + return + } + assert(obj != nil) if obj, ok := obj.(types.Object); ok { if obj.Pkg() != g.pkg.Pkg { @@ -373,6 +394,10 @@ func (g *Graph) see(obj interface{}) { } func (g *Graph) use(used, by interface{}, reason string) { + if isIrrelevantType(used) { + return + } + assert(used != nil) if _, ok := used.(*types.Func); ok { assert(g.pkg.Prog.FuncValue(used.(*types.Func)) == nil) From f139527bf6937a0d40cc193b67737cbe70dcbc0f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 02:34:44 +0100 Subject: [PATCH 047/254] unused2: cull even more types, use decision to skip some code --- unused2/unused.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/unused2/unused.go b/unused2/unused.go index 9f88e90c3..07bdc88e9 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -369,6 +369,10 @@ func isIrrelevantType(obj interface{}) bool { return true case *types.Tuple: return T.Len() == 0 + case *types.Signature: + return T.Recv() == nil && T.Params().Len() == 0 && T.Results().Len() == 0 + case *types.Interface: + return T.NumMethods() == 0 } } return false @@ -606,6 +610,9 @@ func (g *Graph) typ(t types.Type) { } } g.seenTypes.Set(t, struct{}{}) + if isIrrelevantType(t) { + return + } g.see(t) switch t := t.(type) { From 66294c2e2d119d19adaa5d942acea395545a9d33 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 02:44:35 +0100 Subject: [PATCH 048/254] unused2: improve debug printing Don't print duplicate nodes or edges --- unused2/unused.go | 37 ++++++++++++++++++++++--------------- 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 07bdc88e9..32935a597 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -326,21 +326,21 @@ func (g *Graph) nodeMaybe(obj interface{}) (*Node, bool) { return nil, false } -func (g *Graph) node(obj interface{}) *Node { +func (g *Graph) node(obj interface{}) (node *Node, new bool) { if t, ok := obj.(types.Type); ok { if v := g.TypeNodes.At(t); v != nil { - return v.(*Node) + return v.(*Node), false } node := g.newNode(obj) g.TypeNodes.Set(t, node) - return node + return node, true } if node, ok := g.Nodes[obj]; ok { - return node + return node, false } - node := g.newNode(obj) + node = g.newNode(obj) g.Nodes[obj] = node - return node + return node, true } func (g *Graph) newNode(obj interface{}) *Node { @@ -352,9 +352,13 @@ func (g *Graph) newNode(obj interface{}) *Node { } } -func (n *Node) use(node *Node) { +func (n *Node) use(node *Node) (new bool) { assert(node != nil) + if _, ok := n.used[node]; ok { + return false + } n.used[node] = struct{}{} + return true } func isIrrelevantType(obj interface{}) bool { @@ -373,6 +377,8 @@ func isIrrelevantType(obj interface{}) bool { return T.Recv() == nil && T.Params().Len() == 0 && T.Results().Len() == 0 case *types.Interface: return T.NumMethods() == 0 + default: + return false } } return false @@ -391,8 +397,8 @@ func (g *Graph) see(obj interface{}) { } // add new node to graph - node := g.node(obj) - if debug { + node, new := g.node(obj) + if debug && new { fmt.Printf("n%d [label=%q];\n", node.id, obj) } } @@ -419,18 +425,19 @@ func (g *Graph) use(used, by interface{}, reason string) { return } } - usedNode := g.node(used) + usedNode, _ := g.node(used) if by == nil { - g.Root.use(usedNode) - if debug { + new := g.Root.use(usedNode) + if debug && new { fmt.Printf("n%d -> n%d [label=%q];\n", g.Root.id, usedNode.id, reason) } } else { - byNode := g.node(by) - if debug { + byNode, _ := g.node(by) + new := byNode.use(usedNode) + if debug && new { fmt.Printf("n%d -> n%d [label=%q];\n", byNode.id, usedNode.id, reason) } - byNode.use(usedNode) + } } From 9294fc9bd2f6bba7042462cc6be5b249209d277c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 03:22:58 +0100 Subject: [PATCH 049/254] unused2: don't cull objects with nil package Some objects that really should have non-nil packages, such as variables in some tuples (potentially those synthesized by SSA?), have nil packages. We still want to track those, as they may make objects in the package reachable. --- unused2/unused.go | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 32935a597..827449a72 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -390,7 +390,7 @@ func (g *Graph) see(obj interface{}) { } assert(obj != nil) - if obj, ok := obj.(types.Object); ok { + if obj, ok := obj.(types.Object); ok && obj.Pkg() != nil { if obj.Pkg() != g.pkg.Pkg { return } @@ -415,25 +415,27 @@ func (g *Graph) use(used, by interface{}, reason string) { if _, ok := by.(*types.Func); ok { assert(g.pkg.Prog.FuncValue(by.(*types.Func)) == nil) } - if obj, ok := used.(types.Object); ok { + if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { if obj.Pkg() != g.pkg.Pkg { return } } - if obj, ok := by.(types.Object); ok { + if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { if obj.Pkg() != g.pkg.Pkg { return } } - usedNode, _ := g.node(used) + usedNode, new := g.node(used) + assert(!new) if by == nil { new := g.Root.use(usedNode) if debug && new { fmt.Printf("n%d -> n%d [label=%q];\n", g.Root.id, usedNode.id, reason) } } else { - byNode, _ := g.node(by) - new := byNode.use(usedNode) + byNode, new := g.node(by) + assert(!new) + new = byNode.use(usedNode) if debug && new { fmt.Printf("n%d -> n%d [label=%q];\n", byNode.id, usedNode.id, reason) } @@ -487,6 +489,7 @@ func (g *Graph) entry(tinfo *types.Info) { if fn.Pkg != g.pkg { continue } + g.see(fn) node := fn.Syntax() if node == nil { continue @@ -611,7 +614,7 @@ func (g *Graph) typ(t types.Type) { if g.seenTypes.At(t) != nil { return } - if t, ok := t.(*types.Named); ok { + if t, ok := t.(*types.Named); ok && t.Obj().Pkg() != nil { if t.Obj().Pkg() != g.pkg.Pkg { return } @@ -758,7 +761,6 @@ func (g *Graph) instructions(fn *ssa.Function) { if _, ok := v.(*ssa.Range); !ok { // See https://github.com/golang/go/issues/19670 - g.see(v.Type()) g.seeAndUse(v.Type(), fn, "instruction") g.typ(v.Type()) } From 0b0230b05df2f75a5c3104864e49c9a86b5afad4 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 03:24:59 +0100 Subject: [PATCH 050/254] unused2: cull more objects --- unused2/unused.go | 30 ++++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 827449a72..0fdce8178 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -362,6 +362,14 @@ func (n *Node) use(node *Node) (new bool) { } func isIrrelevantType(obj interface{}) bool { + if obj, ok := obj.(types.Object); ok { + switch obj := obj.(type) { + case *types.Var: + return !obj.IsField() && isIrrelevantType(obj.Type()) + default: + return false + } + } if T, ok := obj.(types.Type); ok { T = lintdsl.Dereference(T) switch T := T.(type) { @@ -372,9 +380,27 @@ func isIrrelevantType(obj interface{}) bool { case *types.Basic: return true case *types.Tuple: - return T.Len() == 0 + for i := 0; i < T.Len(); i++ { + if !isIrrelevantType(T.At(i).Type()) { + return false + } + } + return true case *types.Signature: - return T.Recv() == nil && T.Params().Len() == 0 && T.Results().Len() == 0 + if T.Recv() != nil { + return false + } + for i := 0; i < T.Params().Len(); i++ { + if !isIrrelevantType(T.Params().At(i)) { + return false + } + } + for i := 0; i < T.Results().Len(); i++ { + if !isIrrelevantType(T.Results().At(i)) { + return false + } + } + return true case *types.Interface: return T.NumMethods() == 0 default: From 5897b2aa8ebc7d107cb0a756e3b4e89a86248c67 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 03:35:11 +0100 Subject: [PATCH 051/254] unused2: rename isIrrelevantType and document it --- unused2/unused.go | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 0fdce8178..52db31fae 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -361,11 +361,19 @@ func (n *Node) use(node *Node) (new bool) { return true } -func isIrrelevantType(obj interface{}) bool { +// isIrrelevant reports whether an object's presence in the graph is +// of any relevance. A lot of objects will never have outgoing edges, +// nor meaningful incoming ones. Examples are basic types and empty +// signatures, among many others. +// +// Dropping these objects should have no effect on correctness, but +// may improve performance. It also helps with debugging, as it +// greatly reduces the size of the graph. +func isIrrelevant(obj interface{}) bool { if obj, ok := obj.(types.Object); ok { switch obj := obj.(type) { case *types.Var: - return !obj.IsField() && isIrrelevantType(obj.Type()) + return !obj.IsField() && isIrrelevant(obj.Type()) default: return false } @@ -374,14 +382,14 @@ func isIrrelevantType(obj interface{}) bool { T = lintdsl.Dereference(T) switch T := T.(type) { case *types.Array: - return isIrrelevantType(T.Elem()) + return isIrrelevant(T.Elem()) case *types.Slice: - return isIrrelevantType(T.Elem()) + return isIrrelevant(T.Elem()) case *types.Basic: return true case *types.Tuple: for i := 0; i < T.Len(); i++ { - if !isIrrelevantType(T.At(i).Type()) { + if !isIrrelevant(T.At(i).Type()) { return false } } @@ -391,12 +399,12 @@ func isIrrelevantType(obj interface{}) bool { return false } for i := 0; i < T.Params().Len(); i++ { - if !isIrrelevantType(T.Params().At(i)) { + if !isIrrelevant(T.Params().At(i)) { return false } } for i := 0; i < T.Results().Len(); i++ { - if !isIrrelevantType(T.Results().At(i)) { + if !isIrrelevant(T.Results().At(i)) { return false } } @@ -411,7 +419,7 @@ func isIrrelevantType(obj interface{}) bool { } func (g *Graph) see(obj interface{}) { - if isIrrelevantType(obj) { + if isIrrelevant(obj) { return } @@ -430,7 +438,7 @@ func (g *Graph) see(obj interface{}) { } func (g *Graph) use(used, by interface{}, reason string) { - if isIrrelevantType(used) { + if isIrrelevant(used) { return } @@ -646,7 +654,7 @@ func (g *Graph) typ(t types.Type) { } } g.seenTypes.Set(t, struct{}{}) - if isIrrelevantType(t) { + if isIrrelevant(t) { return } From e033f169b57ba4aa660fd036f10b90cfaae3aa1e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 04:22:14 +0100 Subject: [PATCH 052/254] unused2: don't be over-eager in simplifying debug graph --- unused2/unused.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 52db31fae..7ce9b4077 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -307,7 +307,7 @@ func (g *Graph) color(root *Node) { type Node struct { obj interface{} id int - used map[*Node]struct{} + used map[*Node]string seen bool quiet bool @@ -348,16 +348,16 @@ func (g *Graph) newNode(obj interface{}) *Node { return &Node{ obj: obj, id: g.nodeCounter, - used: map[*Node]struct{}{}, + used: map[*Node]string{}, } } -func (n *Node) use(node *Node) (new bool) { +func (n *Node) use(node *Node, reason string) (new bool) { assert(node != nil) - if _, ok := n.used[node]; ok { + if s, ok := n.used[node]; ok && s == reason { return false } - n.used[node] = struct{}{} + n.used[node] = reason return true } @@ -462,14 +462,14 @@ func (g *Graph) use(used, by interface{}, reason string) { usedNode, new := g.node(used) assert(!new) if by == nil { - new := g.Root.use(usedNode) + new := g.Root.use(usedNode, reason) if debug && new { fmt.Printf("n%d -> n%d [label=%q];\n", g.Root.id, usedNode.id, reason) } } else { byNode, new := g.node(by) assert(!new) - new = byNode.use(usedNode) + new = byNode.use(usedNode, reason) if debug && new { fmt.Printf("n%d -> n%d [label=%q];\n", byNode.id, usedNode.id, reason) } From a2ac9027d589d6345dfbb345f10d9e573f946223 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 05:23:23 +0100 Subject: [PATCH 053/254] unused2: document code --- unused2/unused.go | 86 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 64 insertions(+), 22 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 7ce9b4077..09b5f89af 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -33,55 +33,61 @@ TODO known reflect TODO error interface - packages use: - - exported named types - - exported functions + - (2525) exported named types + - (5252) exported functions - exported variables - - exported constants - - init functions + - (321) exported constants + - (6719) init functions - TODO functions exported to cgo + - (4644) the main function iff in the main package - named types use: - - exported methods + - (9728) exported methods - variables and constants use: - their types - functions use: - - all their arguments, return parameters and receivers - - anonymous functions defined beneath them - - functions they return. we assume that someone else will call the returned function - - functions/interface methods they call + - (1663) all their arguments, return parameters and receivers + - (9567) anonymous functions defined beneath them + - (2521) closures and bound methods. + this implements a simplified model where a function is used merely by being referenced, even if it is never called. + that way we don't have to keep track of closures escaping functions. + - (8103) functions they return. we assume that someone else will call the returned function + - (9681) functions/interface methods they call - types they instantiate or convert to - - fields they read or write - - fields whose addresses they return + - (1323) fields they access - types of all instructions - conversions use: - - when converting between two equivalent structs, the fields in + - (6885) when converting between two equivalent structs, the fields in either struct use each other. the fields are relevant for the conversion, but only if the fields are also accessed outside the conversion. - - when converting to or from unsafe.Pointer, mark all fields as used. + - (4029) when converting to or from unsafe.Pointer, mark all fields as used. - structs use: - - fields of type NoCopy sentinel - - exported fields + - (4946) fields of type NoCopy sentinel + - (2701) exported fields - embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) - - embedded fields that have exported methods (recursively) - - embedded structs that have exported fields (recursively) + - (6090) embedded fields that have exported methods (recursively) + - (8728) embedded structs that have exported fields (recursively) - field accesses use fields - interfaces use: - - all their methods. we really have no idea what is going on with interfaces. + - (3393) all their methods. we really have no idea what is going on with interfaces. - matching methods on types that implement this interface. we assume that types are meant to implement as many interfaces as possible. takes into consideration embedding into possibly unnamed types. -- interface calls use: - - the called interface method -- thunks and other generated wrappers call the real function +- Inherent uses: + - thunks and other generated wrappers call the real function + - (254) instructions use their types + - (5749) variables use their types + - (6108) types use their underlying and element types + - (853) conversions use the type they convert to - things named _ are used */ @@ -510,6 +516,7 @@ func (g *Graph) entry(tinfo *types.Info) { g.see(obj) fn := surroundingFunc(obj) if fn == nil && obj.Exported() { + // (321) packages use exported constants g.use(obj, nil, "exported constant") } g.typ(obj.Type()) @@ -566,13 +573,16 @@ func (g *Graph) entry(tinfo *types.Info) { case *ssa.Function: g.see(m) if m.Name() == "init" { + // (6719) packages use init functions g.use(m, nil, "init function") } // This branch catches top-level functions, not methods. if m.Object() != nil && m.Object().Exported() { + // (5252) packages use exported functions g.use(m, nil, "exported top-level function") } if m.Name() == "main" && g.pkg.Pkg.Name() == "main" { + // (4644) packages use the main function iff in the main package g.use(m, nil, "main function") } g.function(m) @@ -580,6 +590,7 @@ func (g *Graph) entry(tinfo *types.Info) { if m.Object() != nil { g.see(m.Object()) if m.Object().Exported() { + // (2525) packages use exported named types g.use(m.Object(), nil, "exported top-level type") } } @@ -635,10 +646,12 @@ func (g *Graph) entry(tinfo *types.Info) { } func (g *Graph) function(fn *ssa.Function) { + // (1663) functions use all their arguments, return parameters and receivers g.seeAndUse(fn.Signature, fn, "function signature") g.signature(fn.Signature) g.instructions(fn) for _, anon := range fn.AnonFuncs { + // (9567) functions use anonymous functions defined beneath them g.seeAndUse(anon, fn, "anonymous function") g.function(anon) } @@ -664,8 +677,10 @@ func (g *Graph) typ(t types.Type) { for i := 0; i < t.NumFields(); i++ { g.see(t.Field(i)) if t.Field(i).Exported() { + // (2701) structs use exported fields g.use(t.Field(i), t, "exported struct field") } else if isNoCopyType(t.Field(i).Type()) { + // (4946) structs use fields of type NoCopy sentinel g.use(t.Field(i), t, "NoCopy sentinel") } if t.Field(i).Anonymous() { @@ -673,6 +688,7 @@ func (g *Graph) typ(t types.Type) { ms := g.msCache.MethodSet(t.Field(i).Type()) for j := 0; j < ms.Len(); j++ { if ms.At(j).Obj().Exported() { + // (6090) structs use embedded fields that have exported methods (recursively) g.use(t.Field(i), t, "extends exported method set") break } @@ -702,6 +718,7 @@ func (g *Graph) typ(t types.Type) { } // does the embedded field contribute exported fields? if hasExportedField(t.Field(i).Type()) { + // (8728) structs use embedded structs that have exported fields (recursively) g.use(t.Field(i), t, "extends exported fields") } @@ -711,6 +728,7 @@ func (g *Graph) typ(t types.Type) { case *types.Basic: // Nothing to do case *types.Named: + // (6108) types use their underlying and element types g.seeAndUse(t.Underlying(), t, "underlying type") g.seeAndUse(t.Obj(), t, "type name") g.seeAndUse(t, t.Obj(), "named type") @@ -719,6 +737,7 @@ func (g *Graph) typ(t types.Type) { meth := g.pkg.Prog.FuncValue(t.Method(i)) g.see(meth) if meth.Object() != nil && meth.Object().Exported() { + // (9728) named types use exported methods g.use(meth, t, "exported method") } g.function(meth) @@ -726,10 +745,13 @@ func (g *Graph) typ(t types.Type) { g.typ(t.Underlying()) case *types.Slice: + // (6108) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Map: + // (6108) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") + // (6108) types use their underlying and element types g.seeAndUse(t.Key(), t, "key type") g.typ(t.Elem()) g.typ(t.Key()) @@ -738,21 +760,26 @@ func (g *Graph) typ(t types.Type) { case *types.Interface: for i := 0; i < t.NumMethods(); i++ { m := t.Method(i) + // (3393) interfaces use all their methods. we really have no idea what is going on with interfaces. g.seeAndUse(m, t, "interface method") g.seeAndUse(m.Type().(*types.Signature), m, "signature") g.signature(m.Type().(*types.Signature)) } case *types.Array: + // (6108) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Pointer: + // (6108) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Chan: + // (6108) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Tuple: for i := 0; i < t.Len(); i++ { + // (6108) types use their underlying and element types g.seeAndUse(t.At(i), t, "tuple element") g.variable(t.At(i)) } @@ -762,6 +789,7 @@ func (g *Graph) typ(t types.Type) { } func (g *Graph) variable(v *types.Var) { + // (5749) variables use their types g.seeAndUse(v.Type(), v, "variable type") g.typ(v.Type()) } @@ -795,6 +823,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if _, ok := v.(*ssa.Range); !ok { // See https://github.com/golang/go/issues/19670 + // (254) instructions use their types g.seeAndUse(v.Type(), fn, "instruction") g.typ(v.Type()) } @@ -803,10 +832,12 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.Field: st := instr.X.Type().Underlying().(*types.Struct) field := st.Field(instr.Field) + // (1323) functions use fields they access g.seeAndUse(field, fn, "field access") case *ssa.FieldAddr: st := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) field := st.Field(instr.Field) + // (1323) functions use fields they access g.seeAndUse(field, fn, "field access") case *ssa.Store: case *ssa.Call: @@ -821,6 +852,7 @@ func (g *Graph) instructions(fn *ssa.Function) { seen[v] = struct{}{} switch v := v.(type) { case *ssa.Function: + // (9681) functions use functions/interface methods they call g.seeAndUse(v, fn, "function call") if obj := v.Object(); obj != nil { if cfn := g.pkg.Prog.FuncValue(obj.(*types.Func)); cfn != v { @@ -846,6 +878,7 @@ func (g *Graph) instructions(fn *ssa.Function) { // non-interface call useCall(c.Value) } else { + // (9681) functions use functions/interface methods they call g.seeAndUse(c.Method, fn, "interface call") } case *ssa.Return: @@ -858,9 +891,10 @@ func (g *Graph) instructions(fn *ssa.Function) { seen[v] = struct{}{} switch v := v.(type) { case *ssa.Function: + // (8103) functions use functions they return. we assume that someone else will call the returned function g.seeAndUse(v, fn, "returning function") case *ssa.MakeClosure: - g.seeAndUse(v.Fn, fn, "returning closure") + // nothing to do. 8103 doesn't apply because this case is covered by 2521. case *ssa.Phi: for _, e := range v.Edges { handleReturn(e) @@ -871,6 +905,7 @@ func (g *Graph) instructions(fn *ssa.Function) { handleReturn(v) } case *ssa.ChangeType: + // (853) conversions use the type they convert to g.seeAndUse(instr.Type(), fn, "conversion") g.typ(instr.Type()) @@ -886,6 +921,10 @@ func (g *Graph) instructions(fn *ssa.Function) { for i := 0; i < s1.NumFields(); i++ { g.see(s1.Field(i)) g.see(s2.Field(i)) + // (6885) when converting between two equivalent structs, the fields in + // either struct use each other. the fields are relevant for the + // conversion, but only if the fields are also accessed outside the + // conversion. g.seeAndUse(s1.Field(i), s2.Field(i), "struct conversion") g.seeAndUse(s2.Field(i), s1.Field(i), "struct conversion") } @@ -900,6 +939,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if ptr, ok := instr.X.Type().Underlying().(*types.Pointer); ok { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { + // (4029) when converting to or from unsafe.Pointer, mark all fields as used. g.seeAndUse(st.Field(i), fn, "unsafe conversion") } } @@ -910,6 +950,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { + // (4029) when converting to or from unsafe.Pointer, mark all fields as used. g.seeAndUse(st.Field(i), fn, "unsafe conversion") } } @@ -919,6 +960,7 @@ func (g *Graph) instructions(fn *ssa.Function) { g.seeAndUse(instr.AssertedType, fn, "type assert") g.typ(instr.AssertedType) case *ssa.MakeClosure: + // (2521) functions use closures and bound methods. g.seeAndUse(instr.Fn, fn, "make closure") v := instr.Fn.(*ssa.Function) if obj := v.Object(); obj != nil { From b04844d6a7638d30e076db5be7e857009d9c33c1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 06:33:08 +0100 Subject: [PATCH 054/254] unused2: improve handling of interfaces --- unused2/unused.go | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 09b5f89af..a6d81bb4c 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -69,18 +69,28 @@ TODO error interface - structs use: - (4946) fields of type NoCopy sentinel - (2701) exported fields - - embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) + - (7540) embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) - (6090) embedded fields that have exported methods (recursively) - (8728) embedded structs that have exported fields (recursively) - field accesses use fields -- interfaces use: - - (3393) all their methods. we really have no idea what is going on with interfaces. - - matching methods on types that implement this interface. - we assume that types are meant to implement as many interfaces as possible. - takes into consideration embedding into possibly unnamed types. +- (6020) How we handle interfaces: + - (8080) We do not technically care about interfaces that only consist of + exported methods. Exported methods on concrete types are always + marked as used. + - Any concrete type implements all known interfaces. Even if it isn't + assigned to any interfaces in our code, the user may receive a value + of the type and expect to pass it back to us through an interface. + Concrete types use their methods that implement interfaces. If the + type is used, it uses those methods. Otherwise, it doesn't. This + way, types aren't incorrectly marked reachable through the edge + from method to type. + + - (3393) All interface methods are marked as used, even if they never get + called. This is to accomodate sum types (unexported interface + method that must exist but never gets called.) - Inherent uses: - thunks and other generated wrappers call the real function @@ -606,6 +616,7 @@ func (g *Graph) entry(tinfo *types.Info) { g.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: + // OPT(dh): (8080) we only need interfaces that have unexported methods ifaces = append(ifaces, t) default: if _, ok := t.Underlying().(*types.Interface); !ok { @@ -614,6 +625,7 @@ func (g *Graph) entry(tinfo *types.Info) { } }) + // (6020) handle interfaces for _, iface := range ifaces { for _, t := range notIfaces { if g.implements(t, iface) { @@ -628,16 +640,17 @@ func (g *Graph) entry(tinfo *types.Info) { base := lintdsl.Dereference(t).Underlying().(*types.Struct) for _, idx := range path[:len(path)-1] { next := base.Field(idx) + // (7540) structs use embedded fields that help implement interfaces g.seeAndUse(next, base, "helps implement") base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct) } } if fn := g.pkg.Prog.FuncValue(obj.(*types.Func)); fn != nil { // actual function - g.seeAndUse(fn, iface, "implements") + g.seeAndUse(fn, t, "implements") } else { // interface method - g.seeAndUse(obj, iface, "implements") + g.seeAndUse(obj, t, "implements") } } } @@ -760,7 +773,7 @@ func (g *Graph) typ(t types.Type) { case *types.Interface: for i := 0; i < t.NumMethods(); i++ { m := t.Method(i) - // (3393) interfaces use all their methods. we really have no idea what is going on with interfaces. + // (3393) All interface methods are marked as used g.seeAndUse(m, t, "interface method") g.seeAndUse(m.Type().(*types.Signature), m, "signature") g.signature(m.Type().(*types.Signature)) From 6e62ea20ef5a6d1b023572eb44da72ec4bdcee4f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 06:40:52 +0100 Subject: [PATCH 055/254] unused2: replace random numbers with sequential numbers --- unused2/unused.go | 126 +++++++++++++++++++++++----------------------- 1 file changed, 63 insertions(+), 63 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index a6d81bb4c..1eff3c98d 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -33,50 +33,50 @@ TODO known reflect TODO error interface - packages use: - - (2525) exported named types - - (5252) exported functions + - (1.1) exported named types + - (1.2) exported functions - exported variables - - (321) exported constants - - (6719) init functions + - (1.4) exported constants + - (1.5) init functions - TODO functions exported to cgo - - (4644) the main function iff in the main package + - (1.7) the main function iff in the main package - named types use: - - (9728) exported methods + - (2.1) exported methods - variables and constants use: - their types - functions use: - - (1663) all their arguments, return parameters and receivers - - (9567) anonymous functions defined beneath them - - (2521) closures and bound methods. + - (4.1) all their arguments, return parameters and receivers + - (4.2) anonymous functions defined beneath them + - (4.3) closures and bound methods. this implements a simplified model where a function is used merely by being referenced, even if it is never called. that way we don't have to keep track of closures escaping functions. - - (8103) functions they return. we assume that someone else will call the returned function - - (9681) functions/interface methods they call + - (4.4) functions they return. we assume that someone else will call the returned function + - (4.5) functions/interface methods they call - types they instantiate or convert to - - (1323) fields they access + - (4.7) fields they access - types of all instructions - conversions use: - - (6885) when converting between two equivalent structs, the fields in + - (5.1) when converting between two equivalent structs, the fields in either struct use each other. the fields are relevant for the conversion, but only if the fields are also accessed outside the conversion. - - (4029) when converting to or from unsafe.Pointer, mark all fields as used. + - (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - structs use: - - (4946) fields of type NoCopy sentinel - - (2701) exported fields - - (7540) embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) - - (6090) embedded fields that have exported methods (recursively) - - (8728) embedded structs that have exported fields (recursively) + - (6.1) fields of type NoCopy sentinel + - (6.2) exported fields + - (6.3) embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) + - (6.4) embedded fields that have exported methods (recursively) + - (6.5) embedded structs that have exported fields (recursively) - field accesses use fields -- (6020) How we handle interfaces: - - (8080) We do not technically care about interfaces that only consist of +- (8.0) How we handle interfaces: + - (8.1) We do not technically care about interfaces that only consist of exported methods. Exported methods on concrete types are always marked as used. - Any concrete type implements all known interfaces. Even if it isn't @@ -88,16 +88,16 @@ TODO error interface way, types aren't incorrectly marked reachable through the edge from method to type. - - (3393) All interface methods are marked as used, even if they never get + - (8.3) All interface methods are marked as used, even if they never get called. This is to accomodate sum types (unexported interface method that must exist but never gets called.) - Inherent uses: - thunks and other generated wrappers call the real function - - (254) instructions use their types - - (5749) variables use their types - - (6108) types use their underlying and element types - - (853) conversions use the type they convert to + - (9.2) instructions use their types + - (9.3) variables use their types + - (9.4) types use their underlying and element types + - (9.5) conversions use the type they convert to - things named _ are used */ @@ -526,7 +526,7 @@ func (g *Graph) entry(tinfo *types.Info) { g.see(obj) fn := surroundingFunc(obj) if fn == nil && obj.Exported() { - // (321) packages use exported constants + // (1.4) packages use exported constants g.use(obj, nil, "exported constant") } g.typ(obj.Type()) @@ -583,16 +583,16 @@ func (g *Graph) entry(tinfo *types.Info) { case *ssa.Function: g.see(m) if m.Name() == "init" { - // (6719) packages use init functions + // (1.5) packages use init functions g.use(m, nil, "init function") } // This branch catches top-level functions, not methods. if m.Object() != nil && m.Object().Exported() { - // (5252) packages use exported functions + // (1.2) packages use exported functions g.use(m, nil, "exported top-level function") } if m.Name() == "main" && g.pkg.Pkg.Name() == "main" { - // (4644) packages use the main function iff in the main package + // (1.7) packages use the main function iff in the main package g.use(m, nil, "main function") } g.function(m) @@ -600,7 +600,7 @@ func (g *Graph) entry(tinfo *types.Info) { if m.Object() != nil { g.see(m.Object()) if m.Object().Exported() { - // (2525) packages use exported named types + // (1.1) packages use exported named types g.use(m.Object(), nil, "exported top-level type") } } @@ -616,7 +616,7 @@ func (g *Graph) entry(tinfo *types.Info) { g.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: - // OPT(dh): (8080) we only need interfaces that have unexported methods + // OPT(dh): (8.1) we only need interfaces that have unexported methods ifaces = append(ifaces, t) default: if _, ok := t.Underlying().(*types.Interface); !ok { @@ -625,7 +625,7 @@ func (g *Graph) entry(tinfo *types.Info) { } }) - // (6020) handle interfaces + // (8.0) handle interfaces for _, iface := range ifaces { for _, t := range notIfaces { if g.implements(t, iface) { @@ -640,7 +640,7 @@ func (g *Graph) entry(tinfo *types.Info) { base := lintdsl.Dereference(t).Underlying().(*types.Struct) for _, idx := range path[:len(path)-1] { next := base.Field(idx) - // (7540) structs use embedded fields that help implement interfaces + // (6.3) structs use embedded fields that help implement interfaces g.seeAndUse(next, base, "helps implement") base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct) } @@ -659,12 +659,12 @@ func (g *Graph) entry(tinfo *types.Info) { } func (g *Graph) function(fn *ssa.Function) { - // (1663) functions use all their arguments, return parameters and receivers + // (4.1) functions use all their arguments, return parameters and receivers g.seeAndUse(fn.Signature, fn, "function signature") g.signature(fn.Signature) g.instructions(fn) for _, anon := range fn.AnonFuncs { - // (9567) functions use anonymous functions defined beneath them + // (4.2) functions use anonymous functions defined beneath them g.seeAndUse(anon, fn, "anonymous function") g.function(anon) } @@ -690,10 +690,10 @@ func (g *Graph) typ(t types.Type) { for i := 0; i < t.NumFields(); i++ { g.see(t.Field(i)) if t.Field(i).Exported() { - // (2701) structs use exported fields + // (6.2) structs use exported fields g.use(t.Field(i), t, "exported struct field") } else if isNoCopyType(t.Field(i).Type()) { - // (4946) structs use fields of type NoCopy sentinel + // (6.1) structs use fields of type NoCopy sentinel g.use(t.Field(i), t, "NoCopy sentinel") } if t.Field(i).Anonymous() { @@ -701,7 +701,7 @@ func (g *Graph) typ(t types.Type) { ms := g.msCache.MethodSet(t.Field(i).Type()) for j := 0; j < ms.Len(); j++ { if ms.At(j).Obj().Exported() { - // (6090) structs use embedded fields that have exported methods (recursively) + // (6.4) structs use embedded fields that have exported methods (recursively) g.use(t.Field(i), t, "extends exported method set") break } @@ -731,7 +731,7 @@ func (g *Graph) typ(t types.Type) { } // does the embedded field contribute exported fields? if hasExportedField(t.Field(i).Type()) { - // (8728) structs use embedded structs that have exported fields (recursively) + // (6.5) structs use embedded structs that have exported fields (recursively) g.use(t.Field(i), t, "extends exported fields") } @@ -741,7 +741,7 @@ func (g *Graph) typ(t types.Type) { case *types.Basic: // Nothing to do case *types.Named: - // (6108) types use their underlying and element types + // (9.4) types use their underlying and element types g.seeAndUse(t.Underlying(), t, "underlying type") g.seeAndUse(t.Obj(), t, "type name") g.seeAndUse(t, t.Obj(), "named type") @@ -750,7 +750,7 @@ func (g *Graph) typ(t types.Type) { meth := g.pkg.Prog.FuncValue(t.Method(i)) g.see(meth) if meth.Object() != nil && meth.Object().Exported() { - // (9728) named types use exported methods + // (2.1) named types use exported methods g.use(meth, t, "exported method") } g.function(meth) @@ -758,13 +758,13 @@ func (g *Graph) typ(t types.Type) { g.typ(t.Underlying()) case *types.Slice: - // (6108) types use their underlying and element types + // (9.4) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Map: - // (6108) types use their underlying and element types + // (9.4) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") - // (6108) types use their underlying and element types + // (9.4) types use their underlying and element types g.seeAndUse(t.Key(), t, "key type") g.typ(t.Elem()) g.typ(t.Key()) @@ -773,26 +773,26 @@ func (g *Graph) typ(t types.Type) { case *types.Interface: for i := 0; i < t.NumMethods(); i++ { m := t.Method(i) - // (3393) All interface methods are marked as used + // (8.3) All interface methods are marked as used g.seeAndUse(m, t, "interface method") g.seeAndUse(m.Type().(*types.Signature), m, "signature") g.signature(m.Type().(*types.Signature)) } case *types.Array: - // (6108) types use their underlying and element types + // (9.4) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Pointer: - // (6108) types use their underlying and element types + // (9.4) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Chan: - // (6108) types use their underlying and element types + // (9.4) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Tuple: for i := 0; i < t.Len(); i++ { - // (6108) types use their underlying and element types + // (9.4) types use their underlying and element types g.seeAndUse(t.At(i), t, "tuple element") g.variable(t.At(i)) } @@ -802,7 +802,7 @@ func (g *Graph) typ(t types.Type) { } func (g *Graph) variable(v *types.Var) { - // (5749) variables use their types + // (9.3) variables use their types g.seeAndUse(v.Type(), v, "variable type") g.typ(v.Type()) } @@ -836,7 +836,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if _, ok := v.(*ssa.Range); !ok { // See https://github.com/golang/go/issues/19670 - // (254) instructions use their types + // (9.2) instructions use their types g.seeAndUse(v.Type(), fn, "instruction") g.typ(v.Type()) } @@ -845,12 +845,12 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.Field: st := instr.X.Type().Underlying().(*types.Struct) field := st.Field(instr.Field) - // (1323) functions use fields they access + // (4.7) functions use fields they access g.seeAndUse(field, fn, "field access") case *ssa.FieldAddr: st := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) field := st.Field(instr.Field) - // (1323) functions use fields they access + // (4.7) functions use fields they access g.seeAndUse(field, fn, "field access") case *ssa.Store: case *ssa.Call: @@ -865,7 +865,7 @@ func (g *Graph) instructions(fn *ssa.Function) { seen[v] = struct{}{} switch v := v.(type) { case *ssa.Function: - // (9681) functions use functions/interface methods they call + // (4.5) functions use functions/interface methods they call g.seeAndUse(v, fn, "function call") if obj := v.Object(); obj != nil { if cfn := g.pkg.Prog.FuncValue(obj.(*types.Func)); cfn != v { @@ -891,7 +891,7 @@ func (g *Graph) instructions(fn *ssa.Function) { // non-interface call useCall(c.Value) } else { - // (9681) functions use functions/interface methods they call + // (4.5) functions use functions/interface methods they call g.seeAndUse(c.Method, fn, "interface call") } case *ssa.Return: @@ -904,10 +904,10 @@ func (g *Graph) instructions(fn *ssa.Function) { seen[v] = struct{}{} switch v := v.(type) { case *ssa.Function: - // (8103) functions use functions they return. we assume that someone else will call the returned function + // (4.4) functions use functions they return. we assume that someone else will call the returned function g.seeAndUse(v, fn, "returning function") case *ssa.MakeClosure: - // nothing to do. 8103 doesn't apply because this case is covered by 2521. + // nothing to do. 4.4 doesn't apply because this case is covered by 4.3. case *ssa.Phi: for _, e := range v.Edges { handleReturn(e) @@ -918,7 +918,7 @@ func (g *Graph) instructions(fn *ssa.Function) { handleReturn(v) } case *ssa.ChangeType: - // (853) conversions use the type they convert to + // (9.5) conversions use the type they convert to g.seeAndUse(instr.Type(), fn, "conversion") g.typ(instr.Type()) @@ -934,7 +934,7 @@ func (g *Graph) instructions(fn *ssa.Function) { for i := 0; i < s1.NumFields(); i++ { g.see(s1.Field(i)) g.see(s2.Field(i)) - // (6885) when converting between two equivalent structs, the fields in + // (5.1) when converting between two equivalent structs, the fields in // either struct use each other. the fields are relevant for the // conversion, but only if the fields are also accessed outside the // conversion. @@ -952,7 +952,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if ptr, ok := instr.X.Type().Underlying().(*types.Pointer); ok { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { - // (4029) when converting to or from unsafe.Pointer, mark all fields as used. + // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. g.seeAndUse(st.Field(i), fn, "unsafe conversion") } } @@ -963,7 +963,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { - // (4029) when converting to or from unsafe.Pointer, mark all fields as used. + // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. g.seeAndUse(st.Field(i), fn, "unsafe conversion") } } @@ -973,7 +973,7 @@ func (g *Graph) instructions(fn *ssa.Function) { g.seeAndUse(instr.AssertedType, fn, "type assert") g.typ(instr.AssertedType) case *ssa.MakeClosure: - // (2521) functions use closures and bound methods. + // (4.3) functions use closures and bound methods. g.seeAndUse(instr.Fn, fn, "make closure") v := instr.Fn.(*ssa.Function) if obj := v.Object(); obj != nil { From 897f8204285af18fd62eba6da4432b3eb8fb8073 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 06:44:26 +0100 Subject: [PATCH 056/254] unused2: update TODOs --- unused2/unused.go | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 1eff3c98d..c24cdae04 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -12,14 +12,6 @@ import ( "honnef.co/go/tools/ssa" ) -// OPT(dh): optimize graph by not storing irrelevant nodes. storing -// basic types, empty signatures etc doesn't add any information to -// the graph. -// -// OPT(dh): deduplicate edges -// -// OPT(dh): don't track function calls into external packages - // TODO(dh): conversions between structs mark fields as used, but the // conversion itself isn't part of that subgraph. even if the function // containing the conversion is unused, the fields will be marked as @@ -29,9 +21,6 @@ const debug = false /* -TODO known reflect -TODO error interface - - packages use: - (1.1) exported named types - (1.2) exported functions @@ -99,7 +88,7 @@ TODO error interface - (9.4) types use their underlying and element types - (9.5) conversions use the type they convert to -- things named _ are used +- TODO things named _ are used */ func assert(b bool) { From c5094b897e332f3f2ed8eeac451f2ed2f24c1eea Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 19:59:47 +0100 Subject: [PATCH 057/254] unused2: certain exported objects aren't automatically used in package main --- unused2/unused.go | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index c24cdae04..ed1703ab1 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -22,10 +22,10 @@ const debug = false /* - packages use: - - (1.1) exported named types - - (1.2) exported functions - - exported variables - - (1.4) exported constants + - (1.1) exported named types (unless in package main) + - (1.2) exported functions (unless in package main) + - TODO exported variables (TODO unless in package main) + - (1.4) exported constants (unless in package main) - (1.5) init functions - TODO functions exported to cgo - (1.7) the main function iff in the main package @@ -514,8 +514,8 @@ func (g *Graph) entry(tinfo *types.Info) { case *types.Const: g.see(obj) fn := surroundingFunc(obj) - if fn == nil && obj.Exported() { - // (1.4) packages use exported constants + if fn == nil && obj.Exported() && g.pkg.Pkg.Name() != "main" { + // (1.4) packages use exported constants (unless in package main) g.use(obj, nil, "exported constant") } g.typ(obj.Type()) @@ -576,8 +576,8 @@ func (g *Graph) entry(tinfo *types.Info) { g.use(m, nil, "init function") } // This branch catches top-level functions, not methods. - if m.Object() != nil && m.Object().Exported() { - // (1.2) packages use exported functions + if m.Object() != nil && m.Object().Exported() && g.pkg.Pkg.Name() != "main" { + // (1.2) packages use exported functions (unless in package main) g.use(m, nil, "exported top-level function") } if m.Name() == "main" && g.pkg.Pkg.Name() == "main" { @@ -588,8 +588,8 @@ func (g *Graph) entry(tinfo *types.Info) { case *ssa.Type: if m.Object() != nil { g.see(m.Object()) - if m.Object().Exported() { - // (1.1) packages use exported named types + if m.Object().Exported() && g.pkg.Pkg.Name() != "main" { + // (1.1) packages use exported named types (unless in package main) g.use(m.Object(), nil, "exported top-level type") } } From 3290ad0ba963d901becfccb3f0ee3674dc8a315d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 20:12:23 +0100 Subject: [PATCH 058/254] unused2: track global variables --- unused2/unused.go | 40 ++++++++++++++++++++++++++++------------ 1 file changed, 28 insertions(+), 12 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index ed1703ab1..d5327fea0 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -24,7 +24,7 @@ const debug = false - packages use: - (1.1) exported named types (unless in package main) - (1.2) exported functions (unless in package main) - - TODO exported variables (TODO unless in package main) + - (1.3) exported variables (unless in package main) - (1.4) exported constants (unless in package main) - (1.5) init functions - TODO functions exported to cgo @@ -46,7 +46,7 @@ const debug = false - (4.5) functions/interface methods they call - types they instantiate or convert to - (4.7) fields they access - - types of all instructions + - (4.8) types of all instructions - conversions use: - (5.1) when converting between two equivalent structs, the fields in @@ -88,6 +88,8 @@ const debug = false - (9.4) types use their underlying and element types - (9.5) conversions use the type they convert to +- (10.1) dereferences use variables + - TODO things named _ are used */ @@ -188,13 +190,6 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { return } switch obj := node.obj.(type) { - case *types.Var: - if !obj.IsField() { - // Only flag fields. Variables are either - // arguments or local variables, neither of which - // should ever be reported. - node.quiet = true - } case *types.Named: for i := 0; i < obj.NumMethods(); i++ { m := pkg.SSA.Prog.FuncValue(obj.Method(i)) @@ -378,7 +373,15 @@ func isIrrelevant(obj interface{}) bool { if obj, ok := obj.(types.Object); ok { switch obj := obj.(type) { case *types.Var: - return !obj.IsField() && isIrrelevant(obj.Type()) + if obj.IsField() { + // We need to track package fields + return false + } + if obj.Parent() == obj.Pkg().Scope() { + // We need to track package-level variables + return false + } + return isIrrelevant(obj.Type()) default: return false } @@ -568,7 +571,13 @@ func (g *Graph) entry(tinfo *types.Info) { case *ssa.NamedConst: // XXX case *ssa.Global: - // XXX + if m.Object() != nil { + g.see(m.Object()) + if m.Object().Exported() && g.pkg.Pkg.Name() != "main" { + // (1.3) packages use exported variables (unless in package main) + g.use(m.Object(), nil, "exported top-level variable") + } + } case *ssa.Function: g.see(m) if m.Name() == "init" { @@ -979,7 +988,14 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.Alloc: // nothing to do case *ssa.UnOp: - // nothing to do + if instr.Op == token.MUL { + if v, ok := instr.X.(*ssa.Global); ok { + if v.Object() != nil { + // (10.1) dereferences use variables + g.seeAndUse(v.Object(), fn, "variable read") + } + } + } case *ssa.BinOp: // nothing to do case *ssa.If: From 0cc18f27277ee93036c172fb88417cc00c6999cb Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 20:13:05 +0100 Subject: [PATCH 059/254] unused2: update documentation --- unused2/unused.go | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index d5327fea0..97a6d3af8 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -83,10 +83,9 @@ const debug = false - Inherent uses: - thunks and other generated wrappers call the real function - - (9.2) instructions use their types - - (9.3) variables use their types - - (9.4) types use their underlying and element types - - (9.5) conversions use the type they convert to + - (9.2) variables use their types + - (9.3) types use their underlying and element types + - (9.4) conversions use the type they convert to - (10.1) dereferences use variables @@ -739,7 +738,7 @@ func (g *Graph) typ(t types.Type) { case *types.Basic: // Nothing to do case *types.Named: - // (9.4) types use their underlying and element types + // (9.3) types use their underlying and element types g.seeAndUse(t.Underlying(), t, "underlying type") g.seeAndUse(t.Obj(), t, "type name") g.seeAndUse(t, t.Obj(), "named type") @@ -756,13 +755,13 @@ func (g *Graph) typ(t types.Type) { g.typ(t.Underlying()) case *types.Slice: - // (9.4) types use their underlying and element types + // (9.3) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Map: - // (9.4) types use their underlying and element types + // (9.3) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") - // (9.4) types use their underlying and element types + // (9.3) types use their underlying and element types g.seeAndUse(t.Key(), t, "key type") g.typ(t.Elem()) g.typ(t.Key()) @@ -777,20 +776,20 @@ func (g *Graph) typ(t types.Type) { g.signature(m.Type().(*types.Signature)) } case *types.Array: - // (9.4) types use their underlying and element types + // (9.3) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Pointer: - // (9.4) types use their underlying and element types + // (9.3) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Chan: - // (9.4) types use their underlying and element types + // (9.3) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") g.typ(t.Elem()) case *types.Tuple: for i := 0; i < t.Len(); i++ { - // (9.4) types use their underlying and element types + // (9.3) types use their underlying and element types g.seeAndUse(t.At(i), t, "tuple element") g.variable(t.At(i)) } @@ -800,7 +799,7 @@ func (g *Graph) typ(t types.Type) { } func (g *Graph) variable(v *types.Var) { - // (9.3) variables use their types + // (9.2) variables use their types g.seeAndUse(v.Type(), v, "variable type") g.typ(v.Type()) } @@ -834,7 +833,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if _, ok := v.(*ssa.Range); !ok { // See https://github.com/golang/go/issues/19670 - // (9.2) instructions use their types + // (4.8) instructions use their types g.seeAndUse(v.Type(), fn, "instruction") g.typ(v.Type()) } @@ -916,7 +915,7 @@ func (g *Graph) instructions(fn *ssa.Function) { handleReturn(v) } case *ssa.ChangeType: - // (9.5) conversions use the type they convert to + // (9.4) conversions use the type they convert to g.seeAndUse(instr.Type(), fn, "conversion") g.typ(instr.Type()) From 0fa93cdd95c47cb2c2b6d3a01e2bba923956f269 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 23:09:32 +0100 Subject: [PATCH 060/254] unused2: don't crash due to variables without a package --- unused2/unused.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 97a6d3af8..b3c5864d5 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -86,8 +86,7 @@ const debug = false - (9.2) variables use their types - (9.3) types use their underlying and element types - (9.4) conversions use the type they convert to - -- (10.1) dereferences use variables + - (9.5) dereferences use variables - TODO things named _ are used */ @@ -376,7 +375,7 @@ func isIrrelevant(obj interface{}) bool { // We need to track package fields return false } - if obj.Parent() == obj.Pkg().Scope() { + if obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() { // We need to track package-level variables return false } @@ -990,7 +989,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if instr.Op == token.MUL { if v, ok := instr.X.(*ssa.Global); ok { if v.Object() != nil { - // (10.1) dereferences use variables + // (9.5) dereferences use variables g.seeAndUse(v.Object(), fn, "variable read") } } From 71211c2824fecf4fbd61b3fdda8f4c6a0d45b013 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 23:10:28 +0100 Subject: [PATCH 061/254] unused2: don't flag unused receivers/params/results of unused functions --- unused2/unused.go | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/unused2/unused.go b/unused2/unused.go index b3c5864d5..fb6655577 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -183,11 +183,32 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { graph.entry(pkg.TypesInfo) graph.color(graph.Root) + // if a node is unused, don't report any of the node's + // children as unused. for example, if a function is unused, + // don't flag its receiver. if a named type is unused, don't + // flag its methods. quieten := func(node *Node) { if node.seen { return } switch obj := node.obj.(type) { + case *ssa.Function: + sig := obj.Type().(*types.Signature) + if sig.Recv() != nil { + if node, ok := graph.nodeMaybe(sig.Recv()); ok { + node.quiet = true + } + } + for i := 0; i < sig.Params().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { + node.quiet = true + } + } + for i := 0; i < sig.Results().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { + node.quiet = true + } + } case *types.Named: for i := 0; i < obj.NumMethods(); i++ { m := pkg.SSA.Prog.FuncValue(obj.Method(i)) From 157af158d9c413fda7ac715bf57199742f38ced1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 23:15:44 +0100 Subject: [PATCH 062/254] unused2: don't report unnamed variables --- unused2/unused.go | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/unused2/unused.go b/unused2/unused.go index fb6655577..c92d9e742 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -245,6 +245,17 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { fmt.Printf("n%d [color=red];\n", node.id) } switch obj := node.obj.(type) { + case *types.Var: + // don't report unnamed variables (receivers, interface embedding) + if obj.Name() != "" || obj.IsField() { + if obj.Pkg() == pkg.Package.Types { + pos := prog.Fset().Position(obj.Pos()) + out = append(out, Unused{ + Obj: obj, + Position: pos, + }) + } + } case types.Object: if obj.Pkg() == pkg.Package.Types { pos := prog.Fset().Position(obj.Pos()) From 93c26fdd14ba8992de11bc53b88872b6902ea606 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 23:22:47 +0100 Subject: [PATCH 063/254] unused2: use types used in blank stores --- unused2/unused.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unused2/unused.go b/unused2/unused.go index c92d9e742..ba5d3c9b7 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -1041,7 +1041,8 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.DebugRef: // nothing to do case *ssa.BlankStore: - // nothing to do + // catch `_ = typedConst` + g.seeAndUse(instr.Val.Type(), fn, "blank store") case *ssa.Phi: // nothing to do case *ssa.MakeMap: From 47c5e6cb0e9a04308f183d113a6b43271666040b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 23:23:46 +0100 Subject: [PATCH 064/254] unused2: improve tests --- unused2/testdata/src/blank/blank.go | 2 ++ .../src/exported_fields/exported_fields.go | 2 +- unused2/testdata/src/functions/functions.go | 2 +- unused2/testdata/src/interfaces/interfaces.go | 35 +++++++++++++++++-- 4 files changed, 37 insertions(+), 4 deletions(-) diff --git a/unused2/testdata/src/blank/blank.go b/unused2/testdata/src/blank/blank.go index d220bdff3..994e5a402 100644 --- a/unused2/testdata/src/blank/blank.go +++ b/unused2/testdata/src/blank/blank.go @@ -5,6 +5,7 @@ import _ "fmt" type t1 struct{} // MATCH /t1 is unused/ type t2 struct{} type t3 struct{} +type t4 struct{} var _ = t2{} @@ -15,6 +16,7 @@ func fn1() { // MATCH /fn1 is unused/ func fn2() { _ = t3{} + var _ t4 } func init() { diff --git a/unused2/testdata/src/exported_fields/exported_fields.go b/unused2/testdata/src/exported_fields/exported_fields.go index 64686ccab..2c909a8c4 100644 --- a/unused2/testdata/src/exported_fields/exported_fields.go +++ b/unused2/testdata/src/exported_fields/exported_fields.go @@ -32,5 +32,5 @@ type codeNode struct { } func init() { - var _ codeResponse + _ = codeResponse{} } diff --git a/unused2/testdata/src/functions/functions.go b/unused2/testdata/src/functions/functions.go index 80b5b5d0b..8434e8587 100644 --- a/unused2/testdata/src/functions/functions.go +++ b/unused2/testdata/src/functions/functions.go @@ -26,7 +26,7 @@ func fn4() { type bar int _ = x - var _ bar + _ = bar(0) } func init() { diff --git a/unused2/testdata/src/interfaces/interfaces.go b/unused2/testdata/src/interfaces/interfaces.go index e810549a7..cb507a07e 100644 --- a/unused2/testdata/src/interfaces/interfaces.go +++ b/unused2/testdata/src/interfaces/interfaces.go @@ -10,6 +10,37 @@ func (t) fn1() {} func (t) fn2() {} // MATCH /fn2 is unused/ func init() { - var _ I - var _ t + _ = t{} +} + +type I1 interface { + Foo() +} + +type I2 interface { + Foo() + bar() +} + +type t1 struct{} +type t2 struct{} +type t3 struct{} +type t4 struct{ t3 } + +func (t1) Foo() {} +func (t2) Foo() {} +func (t2) bar() {} +func (t3) Foo() {} +func (t3) bar() {} + +func Fn() { + var v1 t1 + var v2 t2 + var v3 t3 + var v4 t4 + _ = v1 + _ = v2 + _ = v3 + var x interface{} = v4 + _ = x.(I2) } From f663574cf042a770340c5593bb1e4d5ad481aa54 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 23:24:30 +0100 Subject: [PATCH 065/254] unused2: don't flag objects called _ --- unused2/unused.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index ba5d3c9b7..0e6bffd0c 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -17,7 +17,7 @@ import ( // containing the conversion is unused, the fields will be marked as // used. -const debug = false +const debug = true /* @@ -257,7 +257,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } } case types.Object: - if obj.Pkg() == pkg.Package.Types { + if obj.Pkg() == pkg.Package.Types && obj.Name() != "_" { pos := prog.Fset().Position(obj.Pos()) out = append(out, Unused{ Obj: obj, From 0a29bb55d29752c9db7b779bc06cb5d7e048c223 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 18 Mar 2019 23:45:51 +0100 Subject: [PATCH 066/254] unused2: use cgo exported functions --- unused2/unused.go | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/unused2/unused.go b/unused2/unused.go index 0e6bffd0c..6321a41aa 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -5,6 +5,7 @@ import ( "go/ast" "go/token" "go/types" + "strings" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/lint" @@ -27,7 +28,7 @@ const debug = true - (1.3) exported variables (unless in package main) - (1.4) exported constants (unless in package main) - (1.5) init functions - - TODO functions exported to cgo + - (1.6) functions exported to cgo - (1.7) the main function iff in the main package - named types use: @@ -623,6 +624,17 @@ func (g *Graph) entry(tinfo *types.Info) { // (1.7) packages use the main function iff in the main package g.use(m, nil, "main function") } + if m.Syntax() != nil { + doc := m.Syntax().(*ast.FuncDecl).Doc + if doc != nil { + for _, cmt := range doc.List { + if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { + // (1.6) packages use functions exported to cgo + g.use(m, nil, "cgo exported") + } + } + } + } g.function(m) case *ssa.Type: if m.Object() != nil { From 0717c8be849848e934ba4f5329cdcd87bbd767c2 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 19 Mar 2019 00:30:36 +0100 Subject: [PATCH 067/254] unused2: make sure we catch all uses of global variables --- unused2/testdata/src/variables/variables.go | 9 +++ unused2/unused.go | 65 +++++++++++++++++---- 2 files changed, 63 insertions(+), 11 deletions(-) create mode 100644 unused2/testdata/src/variables/variables.go diff --git a/unused2/testdata/src/variables/variables.go b/unused2/testdata/src/variables/variables.go new file mode 100644 index 000000000..65c44b2e1 --- /dev/null +++ b/unused2/testdata/src/variables/variables.go @@ -0,0 +1,9 @@ +package pkg + +var a byte +var b [16]byte + +func Fn() { + println(a) + _ = b[:] +} diff --git a/unused2/unused.go b/unused2/unused.go index 6321a41aa..cfc38fefe 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -87,7 +87,9 @@ const debug = true - (9.2) variables use their types - (9.3) types use their underlying and element types - (9.4) conversions use the type they convert to - - (9.5) dereferences use variables + - (9.5) instructions use their operands + - (9.6) instructions use their operands' types + - (9.7) variable _reads_ use variables, writes do not - TODO things named _ are used */ @@ -872,6 +874,28 @@ func (g *Graph) instructions(fn *ssa.Function) { for _, b := range fn.Blocks { for _, instr := range b.Instrs { + ops := instr.Operands(nil) + switch instr.(type) { + case *ssa.Store: + // (9.7) variable _reads_ use variables, writes do not + ops = ops[1:] + case *ssa.DebugRef: + ops = nil + } + for _, arg := range ops { + walkPhi(*arg, func(v ssa.Value) { + switch v := v.(type) { + case *ssa.Const: + // (9.6) instructions use their operands' types + g.seeAndUse(v.Type(), fn, "constant's type") + case *ssa.Global: + if v.Object() != nil { + // (9.5) instructions use their operands + g.seeAndUse(v.Object(), fn, "instruction operand") + } + } + }) + } if v, ok := instr.(ssa.Value); ok { if _, ok := v.(*ssa.Range); !ok { // See https://github.com/golang/go/issues/19670 @@ -983,7 +1007,9 @@ func (g *Graph) instructions(fn *ssa.Function) { } } case *ssa.MakeInterface: + // nothing to doX case *ssa.Slice: + // nothing to do case *ssa.RunDefers: // XXX use deferred functions case *ssa.Convert: @@ -1030,14 +1056,7 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.Alloc: // nothing to do case *ssa.UnOp: - if instr.Op == token.MUL { - if v, ok := instr.X.(*ssa.Global); ok { - if v.Object() != nil { - // (9.5) dereferences use variables - g.seeAndUse(v.Object(), fn, "variable read") - } - } - } + // nothing to do case *ssa.BinOp: // nothing to do case *ssa.If: @@ -1053,8 +1072,7 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.DebugRef: // nothing to do case *ssa.BlankStore: - // catch `_ = typedConst` - g.seeAndUse(instr.Val.Type(), fn, "blank store") + // nothing to do case *ssa.Phi: // nothing to do case *ssa.MakeMap: @@ -1122,3 +1140,28 @@ func isNoCopyType(typ types.Type) bool { } return true } + +func walkPhi(v ssa.Value, fn func(v ssa.Value)) { + phi, ok := v.(*ssa.Phi) + if !ok { + fn(v) + return + } + + seen := map[ssa.Value]struct{}{} + var impl func(v *ssa.Phi) + impl = func(v *ssa.Phi) { + if _, ok := seen[v]; ok { + return + } + seen[v] = struct{}{} + for _, e := range v.Edges { + if ev, ok := e.(*ssa.Phi); ok { + impl(ev) + } else { + fn(e) + } + } + } + impl(phi) +} From d116c56a00f35fccc49f42d6368582e8967f9e22 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 19 Mar 2019 02:19:48 +0100 Subject: [PATCH 068/254] unused2: handle more instructions generically --- unused2/unused.go | 96 ++++++++++------------------------------------- 1 file changed, 19 insertions(+), 77 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index cfc38fefe..0c211974a 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -885,6 +885,13 @@ func (g *Graph) instructions(fn *ssa.Function) { for _, arg := range ops { walkPhi(*arg, func(v ssa.Value) { switch v := v.(type) { + case *ssa.Function: + // (4.3) functions use closures and bound methods. + // (4.5) functions use functions they call + // (9.5) instructions use their operands + // (4.4) functions use functions they return. we assume that someone else will call the returned function + g.seeAndUse(v, fn, "instruction operand") + g.function(v) case *ssa.Const: // (9.6) instructions use their operands' types g.seeAndUse(v.Type(), fn, "constant's type") @@ -901,6 +908,7 @@ func (g *Graph) instructions(fn *ssa.Function) { // See https://github.com/golang/go/issues/19670 // (4.8) instructions use their types + // (9.4) conversions use the type they convert to g.seeAndUse(v.Type(), fn, "instruction") g.typ(v.Type()) } @@ -917,74 +925,19 @@ func (g *Graph) instructions(fn *ssa.Function) { // (4.7) functions use fields they access g.seeAndUse(field, fn, "field access") case *ssa.Store: + // nothing to do, handled generically by operands case *ssa.Call: c := instr.Common() if !c.IsInvoke() { - seen := map[ssa.Value]struct{}{} - var useCall func(v ssa.Value) - useCall = func(v ssa.Value) { - if _, ok := seen[v]; ok { - return - } - seen[v] = struct{}{} - switch v := v.(type) { - case *ssa.Function: - // (4.5) functions use functions/interface methods they call - g.seeAndUse(v, fn, "function call") - if obj := v.Object(); obj != nil { - if cfn := g.pkg.Prog.FuncValue(obj.(*types.Func)); cfn != v { - // The called function is a thunk (or similar), - // process its instructions to get the call to the real function. - // Alternatively, we could mark the function as used by the thunk. - // - // We can detect the thunk because ssa.Function -> types.Object -> ssa.Function - // leads from the thunk to the real function. - g.instructions(v) - } - } - case *ssa.MakeClosure: - useCall(v.Fn) - case *ssa.Builtin: - // nothing to do - case *ssa.Phi: - for _, e := range v.Edges { - useCall(e) - } - } - } - // non-interface call - useCall(c.Value) + // handled generically as an instruction operand } else { // (4.5) functions use functions/interface methods they call g.seeAndUse(c.Method, fn, "interface call") } case *ssa.Return: - seen := map[ssa.Value]struct{}{} - var handleReturn func(v ssa.Value) - handleReturn = func(v ssa.Value) { - if _, ok := seen[v]; ok { - return - } - seen[v] = struct{}{} - switch v := v.(type) { - case *ssa.Function: - // (4.4) functions use functions they return. we assume that someone else will call the returned function - g.seeAndUse(v, fn, "returning function") - case *ssa.MakeClosure: - // nothing to do. 4.4 doesn't apply because this case is covered by 4.3. - case *ssa.Phi: - for _, e := range v.Edges { - handleReturn(e) - } - } - } - for _, v := range instr.Results { - handleReturn(v) - } + // nothing to do, handled generically by operands case *ssa.ChangeType: - // (9.4) conversions use the type they convert to - g.seeAndUse(instr.Type(), fn, "conversion") - g.typ(instr.Type()) + // conversion type handled generically s1, ok1 := lintdsl.Dereference(instr.Type()).Underlying().(*types.Struct) s2, ok2 := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) @@ -1007,9 +960,9 @@ func (g *Graph) instructions(fn *ssa.Function) { } } case *ssa.MakeInterface: - // nothing to doX + // nothing to do, handled generically by operands case *ssa.Slice: - // nothing to do + // nothing to do, handled generically by operands case *ssa.RunDefers: // XXX use deferred functions case *ssa.Convert: @@ -1036,23 +989,12 @@ func (g *Graph) instructions(fn *ssa.Function) { } } case *ssa.TypeAssert: - g.seeAndUse(instr.AssertedType, fn, "type assert") - g.typ(instr.AssertedType) + // nothing to do, handled generically by instruction + // type (possibly a tuple, which contains the asserted + // to type). redundantly handled by the type of + // ssa.Extract, too case *ssa.MakeClosure: - // (4.3) functions use closures and bound methods. - g.seeAndUse(instr.Fn, fn, "make closure") - v := instr.Fn.(*ssa.Function) - if obj := v.Object(); obj != nil { - if cfn := g.pkg.Prog.FuncValue(obj.(*types.Func)); cfn != v { - // The called function is a $bound (or similar), - // process its instructions to get the call to the real function. - // Alternatively, we could mark the function as used by the $bound. - // - // We can detect the $bound because ssa.Function -> types.Object -> ssa.Function - // leads from the thunk to the real function. - g.instructions(v) - } - } + // nothing to do, handled generically by operands case *ssa.Alloc: // nothing to do case *ssa.UnOp: From b3336c956ddc55e5f142756769e5282d64629fcb Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 19 Mar 2019 02:46:28 +0100 Subject: [PATCH 069/254] unused2: don't process functions in other packages, do less work --- unused2/unused.go | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 0c211974a..e8c74429f 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -701,6 +701,14 @@ func (g *Graph) entry(tinfo *types.Info) { } func (g *Graph) function(fn *ssa.Function) { + if fn.Package() != nil && fn.Package() != g.pkg { + return + } + if _, ok := g.seenFns[fn]; ok { + return + } + g.seenFns[fn] = struct{}{} + // (4.1) functions use all their arguments, return parameters and receivers g.seeAndUse(fn.Signature, fn, "function signature") g.signature(fn.Signature) @@ -867,11 +875,6 @@ func (g *Graph) signature(sig *types.Signature) { } func (g *Graph) instructions(fn *ssa.Function) { - if _, ok := g.seenFns[fn]; ok { - return - } - g.seenFns[fn] = struct{}{} - for _, b := range fn.Blocks { for _, instr := range b.Instrs { ops := instr.Operands(nil) From 17d56bb1a87418800970f1922858b243619cc8cb Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 19 Mar 2019 03:16:53 +0100 Subject: [PATCH 070/254] unused2: don't report unused methods of unused interface --- unused2/testdata/src/nested/nested.go | 3 +-- unused2/testdata/src/quiet/quiet.go | 21 +++++++++++++++++++++ unused2/unused.go | 7 +++++++ 3 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 unused2/testdata/src/quiet/quiet.go diff --git a/unused2/testdata/src/nested/nested.go b/unused2/testdata/src/nested/nested.go index ade2c0dbb..518172830 100644 --- a/unused2/testdata/src/nested/nested.go +++ b/unused2/testdata/src/nested/nested.go @@ -7,9 +7,8 @@ func (t) fragment() {} func fn() bool { // MATCH /fn is unused/ var v interface{} = t{} switch obj := v.(type) { - // XXX it shouldn't report fragment(), because fn is unused case interface { - fragment() // MATCH /fragment is unused/ + fragment() }: obj.fragment() } diff --git a/unused2/testdata/src/quiet/quiet.go b/unused2/testdata/src/quiet/quiet.go new file mode 100644 index 000000000..dbdf47d1e --- /dev/null +++ b/unused2/testdata/src/quiet/quiet.go @@ -0,0 +1,21 @@ +package pkg + +type iface interface { // MATCH "type iface is unused" + foo() +} + +type t1 struct{} // MATCH "type t1 is unused" +func (t1) foo() {} + +type t2 struct{} + +func (t t2) bar(arg int) (ret int) { return 0 } // MATCH "func t2.bar is unused" + +func init() { + _ = t2{} +} + +type t3 struct { // MATCH "type t3 is unused" + a int + b int +} diff --git a/unused2/unused.go b/unused2/unused.go index e8c74429f..7c8af6b44 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -225,6 +225,13 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { node.quiet = true } } + case *types.Interface: + for i := 0; i < obj.NumExplicitMethods(); i++ { + m := obj.ExplicitMethod(i) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true + } + } } } for _, node := range graph.Nodes { From c06a69779b3fd3f786216c055493216972c6ff67 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 19 Mar 2019 06:09:35 +0100 Subject: [PATCH 071/254] unused2: delete only_in_test test case Code that is only used by the test variant should be defined in the test variant. --- unused2/testdata/src/only_in_test/pkg.go | 3 --- unused2/testdata/src/only_in_test/pkg_test.go | 7 ------- 2 files changed, 10 deletions(-) delete mode 100644 unused2/testdata/src/only_in_test/pkg.go delete mode 100644 unused2/testdata/src/only_in_test/pkg_test.go diff --git a/unused2/testdata/src/only_in_test/pkg.go b/unused2/testdata/src/only_in_test/pkg.go deleted file mode 100644 index ca2d5b3cd..000000000 --- a/unused2/testdata/src/only_in_test/pkg.go +++ /dev/null @@ -1,3 +0,0 @@ -package pkg - -func fn() {} diff --git a/unused2/testdata/src/only_in_test/pkg_test.go b/unused2/testdata/src/only_in_test/pkg_test.go deleted file mode 100644 index 99fdfd753..000000000 --- a/unused2/testdata/src/only_in_test/pkg_test.go +++ /dev/null @@ -1,7 +0,0 @@ -package pkg - -import "testing" - -func TestPkg(t *testing.T) { - fn() -} From 44b561d161686bc3726147ab33a461d6c976dce7 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 19 Mar 2019 06:25:26 +0100 Subject: [PATCH 072/254] unused2: update TODOs --- unused2/testdata/src/defer/defer.go | 16 ++++++++++++++++ unused2/unused.go | 10 +++++----- 2 files changed, 21 insertions(+), 5 deletions(-) create mode 100644 unused2/testdata/src/defer/defer.go diff --git a/unused2/testdata/src/defer/defer.go b/unused2/testdata/src/defer/defer.go new file mode 100644 index 000000000..a6cfdee73 --- /dev/null +++ b/unused2/testdata/src/defer/defer.go @@ -0,0 +1,16 @@ +package pkg + +type t struct{} + +func (t) fn1() {} +func (t) fn2() {} +func fn1() {} +func fn2() {} + +func Fn() { + var v t + defer fn1() + defer v.fn1() + go fn2() + go v.fn2() +} diff --git a/unused2/unused.go b/unused2/unused.go index 7c8af6b44..2aa2853a8 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -609,7 +609,7 @@ func (g *Graph) entry(tinfo *types.Info) { for _, m := range g.pkg.Members { switch m := m.(type) { case *ssa.NamedConst: - // XXX + // nothing to do, we collect all constants from Defs case *ssa.Global: if m.Object() != nil { g.see(m.Object()) @@ -974,7 +974,7 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.Slice: // nothing to do, handled generically by operands case *ssa.RunDefers: - // XXX use deferred functions + // nothing to do, the deferred functions are already marked use by defering them. case *ssa.Convert: // to unsafe.Pointer if typ, ok := instr.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { @@ -1048,11 +1048,11 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.Select: // nothing to do case *ssa.ChangeInterface: - // XXX + // nothing to do case *ssa.Go: - // XXX + // nothing to do, handled generically by operands case *ssa.Defer: - // XXX + // nothing to do, handled generically by operands default: panic(fmt.Sprintf("unreachable: %T", instr)) } From 3e913d42fb6282d2f42e40eda15c3a2ee564d3f8 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 19 Mar 2019 09:00:38 +0100 Subject: [PATCH 073/254] ssa: explicitly initialize blank variables with implicit zero value --- ssa/builder.go | 37 +++++++++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/ssa/builder.go b/ssa/builder.go index 032819a2a..9684f4044 100644 --- a/ssa/builder.go +++ b/ssa/builder.go @@ -995,10 +995,39 @@ func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { // e.g. var x, y int // Locals are implicitly zero-initialized. for _, id := range spec.Names { - if !isBlankIdent(id) { - lhs := fn.addLocalForIdent(id) - if fn.debugInfo() { - emitDebugRef(fn, id, lhs, true) + lhs := fn.addLocalForIdent(id) + if fn.debugInfo() { + emitDebugRef(fn, id, lhs, true) + } + // create explicit zero initialisation for blank + // identifiers so that we get BlankStores and the locals + // don't get optimized away + if isBlankIdent(id) { + // We don't need explicit code for structs and arrays, + // their locals don't get optimized away. + T := fn.Pkg.typeOf(spec.Type) + switch U := T.Underlying().(type) { + case *types.Pointer, *types.Slice, *types.Map, *types.Chan, *types.Signature, *types.Interface: + lval := b.addr(fn, id, false) + lval.store(fn, NewConst(nil, T)) + case *types.Basic: + n := U.Info() + + var k exact.Value + switch { + case (n & types.IsBoolean) != 0: + k = exact.MakeBool(false) + case (n & types.IsInteger) != 0: + k = exact.MakeInt64(0) + case (n & types.IsFloat) != 0: + k = exact.MakeFloat64(0) + case (n & types.IsComplex) != 0: + k = exact.MakeImag(exact.MakeInt64(0)) + case (n & types.IsString) != 0: + k = exact.MakeString("") + } + lval := b.addr(fn, id, false) + lval.store(fn, NewConst(k, T)) } } } From 3e9d2ef278bd09efe1224e968fdac5fe54ba563f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 20 Mar 2019 22:29:01 +0100 Subject: [PATCH 074/254] unused2: add support for whole program mode --- lint/lint.go | 3 + unused2/implements.go | 21 +-- unused2/unused.go | 341 ++++++++++++++++++++++++++++++++---------- 3 files changed, 279 insertions(+), 86 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 5a9c66fa2..5d806131c 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -473,6 +473,9 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { stats.Jobs = append(stats.Jobs, JobStat{j.check.ID, j.duration}) } for _, p := range j.problems { + if p.Package == nil { + panic(fmt.Sprintf("internal error: problem at position %s has nil package", p.Position)) + } allowedChecks := FilterChecks(allChecks, p.Package.Config.Checks) if l.ignore(p) { diff --git a/unused2/implements.go b/unused2/implements.go index 7a5579d73..f3b134503 100644 --- a/unused2/implements.go +++ b/unused2/implements.go @@ -37,10 +37,10 @@ func sameId(obj types.Object, pkg *types.Package, name string) bool { return pkg.Path() == obj.Pkg().Path() } -func (g *Graph) implements(V types.Type, T *types.Interface) bool { +func (g *Graph) implements(V types.Type, T *types.Interface) ([]*types.Selection, bool) { // fast path for common case if T.Empty() { - return true + return nil, true } if ityp, _ := V.Underlying().(*types.Interface); ityp != nil { @@ -49,31 +49,34 @@ func (g *Graph) implements(V types.Type, T *types.Interface) bool { _, obj := lookupMethod(ityp, m.Pkg(), m.Name()) switch { case obj == nil: - return false + return nil, false case !types.Identical(obj.Type(), m.Type()): - return false + return nil, false } } - return true + return nil, true } // A concrete type implements T if it implements all methods of T. ms := g.msCache.MethodSet(V) + var sels []*types.Selection for i := 0; i < T.NumMethods(); i++ { m := T.Method(i) sel := ms.Lookup(m.Pkg(), m.Name()) if sel == nil { - return false + return nil, false } f, _ := sel.Obj().(*types.Func) if f == nil { - return false + return nil, false } if !types.Identical(f.Type(), m.Type()) { - return false + return nil, false } + + sels = append(sels, sel) } - return true + return sels, true } diff --git a/unused2/unused.go b/unused2/unused.go index 2aa2853a8..e3a6202c2 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -18,7 +18,7 @@ import ( // containing the conversion is unused, the fields will be marked as // used. -const debug = true +const debug = false /* @@ -91,7 +91,15 @@ const debug = true - (9.6) instructions use their operands' types - (9.7) variable _reads_ use variables, writes do not -- TODO things named _ are used + +- Differences in whole program mode: + - (e1) all packages share a single graph + - (e2) types aim to implement all exported interfaces from all packages + - (e3) exported identifiers aren't automatically used. for fields and + methods this poses extra issues due to reflection. We assume + that all exported fields are used. We also maintain a list of + known reflection-based method callers. + */ func assert(b bool) { @@ -167,7 +175,9 @@ func NewChecker() *Checker { return &Checker{} } -type Checker struct{} +type Checker struct { + WholeProgram bool +} func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { scopes := map[*types.Scope]*ssa.Function{} @@ -179,11 +189,51 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } var out []Unused - for _, pkg := range prog.InitialPackages { - graph := NewGraph(pkg.SSA) + processPkgs := func(pkgs ...*lint.Pkg) { + graph := NewGraph() + graph.wholeProgram = c.WholeProgram graph.job = j graph.scopes = scopes - graph.entry(pkg.TypesInfo) + + for _, pkg := range pkgs { + if pkg.PkgPath == "unsafe" { + continue + } + graph.entry(pkg.SSA, pkg.TypesInfo) + } + + if c.WholeProgram { + var ifaces []*types.Interface + var notIfaces []types.Type + + // implement as many interfaces as possible + graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { + switch t := t.(type) { + case *types.Interface: + ifaces = append(ifaces, t) + default: + if _, ok := t.Underlying().(*types.Interface); !ok { + notIfaces = append(notIfaces, t) + } + } + }) + + for _, pkg := range prog.AllPackages { + ifaces = append(ifaces, interfacesFromExportData(pkg.Types)...) + } + + // (8.0) handle interfaces + // (e2) types aim to implement all exported interfaces from all packages + for _, iface := range ifaces { + for _, t := range notIfaces { + if sels, ok := graph.implements(t, iface); ok { + for _, sel := range sels { + graph.useMethod(t, sel, t, "implements") + } + } + } + } + } graph.color(graph.Root) // if a node is unused, don't report any of the node's @@ -214,7 +264,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } case *types.Named: for i := 0; i < obj.NumMethods(); i++ { - m := pkg.SSA.Prog.FuncValue(obj.Method(i)) + m := pkgs[0].SSA.Prog.FuncValue(obj.Method(i)) if node, ok := graph.nodeMaybe(m); ok { node.quiet = true } @@ -251,6 +301,44 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } return } + + type packager1 interface { + Pkg() *types.Package + } + type packager2 interface { + Package() *ssa.Package + } + + // do not report objects from packages we aren't checking. + checkPkg: + switch obj := node.obj.(type) { + case packager1: + for _, pkg := range pkgs { + if pkg.Types == obj.Pkg() { + break checkPkg + } + } + if debug { + fmt.Printf("n%d [color=yellow];\n", node.id) + } + return + case packager2: + // This happens to filter $bound and $thunk, which + // should be fine, since we wouldn't want to report + // them, anyway. Remember that this filtering is only + // for the output, it doesn't affect the reachability + // of nodes in the graph. + for _, pkg := range pkgs { + if pkg.SSA == obj.Package() { + break checkPkg + } + } + if debug { + fmt.Printf("n%d [color=yellow];\n", node.id) + } + return + } + if debug { fmt.Printf("n%d [color=red];\n", node.id) } @@ -258,16 +346,14 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { case *types.Var: // don't report unnamed variables (receivers, interface embedding) if obj.Name() != "" || obj.IsField() { - if obj.Pkg() == pkg.Package.Types { - pos := prog.Fset().Position(obj.Pos()) - out = append(out, Unused{ - Obj: obj, - Position: pos, - }) - } + pos := prog.Fset().Position(obj.Pos()) + out = append(out, Unused{ + Obj: obj, + Position: pos, + }) } case types.Object: - if obj.Pkg() == pkg.Package.Types && obj.Name() != "_" { + if obj.Name() != "_" { pos := prog.Fset().Position(obj.Pos()) out = append(out, Unused{ Obj: obj, @@ -279,15 +365,15 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { // TODO(dh): how does this happen? return } - - // OPT(dh): objects in other packages should never make it into the graph - if obj.Object() != nil && obj.Object().Pkg() == pkg.Types { - pos := prog.Fset().Position(obj.Pos()) - out = append(out, Unused{ - Obj: obj.Object(), - Position: pos, - }) + if obj.Object() == nil { + // Closures + return } + pos := prog.Fset().Position(obj.Pos()) + out = append(out, Unused{ + Obj: obj.Object(), + Position: pos, + }) default: if debug { fmt.Printf("n%d [color=gray];\n", node.id) @@ -301,6 +387,15 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { report(value.(*Node)) }) } + + if c.WholeProgram { + // (e1) all packages share a single graph + processPkgs(prog.InitialPackages...) + } else { + for _, pkg := range prog.InitialPackages { + processPkgs(pkg) + } + } return out } @@ -310,6 +405,8 @@ type Graph struct { msCache typeutil.MethodSetCache scopes map[*types.Scope]*ssa.Function + wholeProgram bool + nodeCounter int Root *Node @@ -320,9 +417,8 @@ type Graph struct { seenFns map[*ssa.Function]struct{} } -func NewGraph(pkg *ssa.Package) *Graph { +func NewGraph() *Graph { g := &Graph{ - pkg: pkg, Nodes: map[interface{}]*Node{}, seenFns: map[*ssa.Function]struct{}{}, } @@ -530,8 +626,9 @@ func (g *Graph) seeAndUse(used, by interface{}, reason string) { g.use(used, by, reason) } -func (g *Graph) entry(tinfo *types.Info) { +func (g *Graph) entry(pkg *ssa.Package, tinfo *types.Info) { // TODO rename Entry + g.pkg = pkg surroundingFunc := func(obj types.Object) *ssa.Function { scope := obj.Parent() @@ -557,7 +654,7 @@ func (g *Graph) entry(tinfo *types.Info) { case *types.Const: g.see(obj) fn := surroundingFunc(obj) - if fn == nil && obj.Exported() && g.pkg.Pkg.Name() != "main" { + if fn == nil && obj.Exported() && g.pkg.Pkg.Name() != "main" && !g.wholeProgram { // (1.4) packages use exported constants (unless in package main) g.use(obj, nil, "exported constant") } @@ -613,7 +710,7 @@ func (g *Graph) entry(tinfo *types.Info) { case *ssa.Global: if m.Object() != nil { g.see(m.Object()) - if m.Object().Exported() && g.pkg.Pkg.Name() != "main" { + if m.Object().Exported() && g.pkg.Pkg.Name() != "main" && !g.wholeProgram { // (1.3) packages use exported variables (unless in package main) g.use(m.Object(), nil, "exported top-level variable") } @@ -625,7 +722,7 @@ func (g *Graph) entry(tinfo *types.Info) { g.use(m, nil, "init function") } // This branch catches top-level functions, not methods. - if m.Object() != nil && m.Object().Exported() && g.pkg.Pkg.Name() != "main" { + if m.Object() != nil && m.Object().Exported() && g.pkg.Pkg.Name() != "main" && !g.wholeProgram { // (1.2) packages use exported functions (unless in package main) g.use(m, nil, "exported top-level function") } @@ -648,7 +745,7 @@ func (g *Graph) entry(tinfo *types.Info) { case *ssa.Type: if m.Object() != nil { g.see(m.Object()) - if m.Object().Exported() && g.pkg.Pkg.Name() != "main" { + if m.Object().Exported() && g.pkg.Pkg.Name() != "main" && !g.wholeProgram { // (1.1) packages use exported named types (unless in package main) g.use(m.Object(), nil, "exported top-level type") } @@ -659,47 +756,36 @@ func (g *Graph) entry(tinfo *types.Info) { } } - var ifaces []*types.Interface - var notIfaces []types.Type + if !g.wholeProgram { + // When not in whole program mode we process one package per + // graph, which means g.seenTypes only contains types of + // interest to us. In whole program mode, we're better off + // processing all interfaces at once, globally, both for + // performance reasons and because in whole program mode we + // actually care about all interfaces, not just the subset + // that has unexported methods. - g.seenTypes.Iterate(func(t types.Type, _ interface{}) { - switch t := t.(type) { - case *types.Interface: - // OPT(dh): (8.1) we only need interfaces that have unexported methods - ifaces = append(ifaces, t) - default: - if _, ok := t.Underlying().(*types.Interface); !ok { - notIfaces = append(notIfaces, t) + var ifaces []*types.Interface + var notIfaces []types.Type + + g.seenTypes.Iterate(func(t types.Type, _ interface{}) { + switch t := t.(type) { + case *types.Interface: + // OPT(dh): (8.1) we only need interfaces that have unexported methods + ifaces = append(ifaces, t) + default: + if _, ok := t.Underlying().(*types.Interface); !ok { + notIfaces = append(notIfaces, t) + } } - } - }) - - // (8.0) handle interfaces - for _, iface := range ifaces { - for _, t := range notIfaces { - if g.implements(t, iface) { - for i := 0; i < iface.NumMethods(); i++ { - // get the chain of embedded types that lead to the function implementing the interface - ms := g.msCache.MethodSet(t) - sel := ms.Lookup(g.pkg.Pkg, iface.Method(i).Name()) - obj := sel.Obj() - path := sel.Index() - assert(obj != nil) - if len(path) > 1 { - base := lintdsl.Dereference(t).Underlying().(*types.Struct) - for _, idx := range path[:len(path)-1] { - next := base.Field(idx) - // (6.3) structs use embedded fields that help implement interfaces - g.seeAndUse(next, base, "helps implement") - base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct) - } - } - if fn := g.pkg.Prog.FuncValue(obj.(*types.Func)); fn != nil { - // actual function - g.seeAndUse(fn, t, "implements") - } else { - // interface method - g.seeAndUse(obj, t, "implements") + }) + + // (8.0) handle interfaces + for _, iface := range ifaces { + for _, t := range notIfaces { + if sels, ok := g.implements(t, iface); ok { + for _, sel := range sels { + g.useMethod(t, sel, t, "implements") } } } @@ -707,6 +793,28 @@ func (g *Graph) entry(tinfo *types.Info) { } } +func (g *Graph) useMethod(t types.Type, sel *types.Selection, by interface{}, reason string) { + obj := sel.Obj() + path := sel.Index() + assert(obj != nil) + if len(path) > 1 { + base := lintdsl.Dereference(t).Underlying().(*types.Struct) + for _, idx := range path[:len(path)-1] { + next := base.Field(idx) + // (6.3) structs use embedded fields that help implement interfaces + g.seeAndUse(next, base, "provides method") + base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct) + } + } + if fn := g.pkg.Prog.FuncValue(obj.(*types.Func)); fn != nil { + // actual function + g.seeAndUse(fn, by, reason) + } else { + // interface method + g.seeAndUse(obj, by, reason) + } +} + func (g *Graph) function(fn *ssa.Function) { if fn.Package() != nil && fn.Package() != g.pkg { return @@ -754,13 +862,16 @@ func (g *Graph) typ(t types.Type) { g.use(t.Field(i), t, "NoCopy sentinel") } if t.Field(i).Anonymous() { - // does the embedded field contribute exported methods to the method set? - ms := g.msCache.MethodSet(t.Field(i).Type()) - for j := 0; j < ms.Len(); j++ { - if ms.At(j).Obj().Exported() { - // (6.4) structs use embedded fields that have exported methods (recursively) - g.use(t.Field(i), t, "extends exported method set") - break + // (e3) exported identifiers aren't automatically used. + if !g.wholeProgram { + // does the embedded field contribute exported methods to the method set? + ms := g.msCache.MethodSet(t.Field(i).Type()) + for j := 0; j < ms.Len(); j++ { + if ms.At(j).Obj().Exported() { + // (6.4) structs use embedded fields that have exported methods (recursively) + g.use(t.Field(i), t, "extends exported method set") + break + } } } @@ -806,7 +917,7 @@ func (g *Graph) typ(t types.Type) { for i := 0; i < t.NumMethods(); i++ { meth := g.pkg.Prog.FuncValue(t.Method(i)) g.see(meth) - if meth.Object() != nil && meth.Object().Exported() { + if meth.Object() != nil && meth.Object().Exported() && !g.wholeProgram { // (2.1) named types use exported methods g.use(meth, t, "exported method") } @@ -940,6 +1051,36 @@ func (g *Graph) instructions(fn *ssa.Function) { c := instr.Common() if !c.IsInvoke() { // handled generically as an instruction operand + + if g.wholeProgram { + // (e3) special case known reflection-based method callers + switch lintdsl.CallName(c) { + case "net/rpc.Register", "net/rpc.RegisterName", "(*net/rpc.Server).Register", "(*net/rpc.Server).RegisterName": + var arg ssa.Value + switch lintdsl.CallName(c) { + case "net/rpc.Register": + arg = c.Args[0] + case "net/rpc.RegisterName": + arg = c.Args[1] + case "(*net/rpc.Server).Register": + arg = c.Args[1] + case "(*net/rpc.Server).RegisterName": + arg = c.Args[2] + } + walkPhi(arg, func(v ssa.Value) { + if v, ok := v.(*ssa.MakeInterface); ok { + walkPhi(v.X, func(vv ssa.Value) { + ms := g.msCache.MethodSet(vv.Type()) + for i := 0; i < ms.Len(); i++ { + if ms.At(i).Obj().Exported() { + g.useMethod(vv.Type(), ms.At(i), fn, "net/rpc.Register") + } + } + }) + } + }) + } + } } else { // (4.5) functions use functions/interface methods they call g.seeAndUse(c.Method, fn, "interface call") @@ -1117,3 +1258,49 @@ func walkPhi(v ssa.Value, fn func(v ssa.Value)) { } impl(phi) } + +func interfacesFromExportData(pkg *types.Package) []*types.Interface { + var out []*types.Interface + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + out = append(out, interfacesFromObject(obj)...) + } + return out +} + +func interfacesFromObject(obj types.Object) []*types.Interface { + var out []*types.Interface + switch obj := obj.(type) { + case *types.Func: + sig := obj.Type().(*types.Signature) + for i := 0; i < sig.Results().Len(); i++ { + out = append(out, interfacesFromObject(sig.Results().At(i))...) + } + for i := 0; i < sig.Params().Len(); i++ { + out = append(out, interfacesFromObject(sig.Params().At(i))...) + } + case *types.TypeName: + if named, ok := obj.Type().(*types.Named); ok { + for i := 0; i < named.NumMethods(); i++ { + out = append(out, interfacesFromObject(named.Method(i))...) + } + + if iface, ok := named.Underlying().(*types.Interface); ok { + out = append(out, iface) + } + } + case *types.Var: + // No call to Underlying here. We want unnamed interfaces + // only. Named interfaces are gotten directly from the + // package's scope. + if iface, ok := obj.Type().(*types.Interface); ok { + out = append(out, iface) + } + case *types.Const: + case *types.Builtin: + default: + panic(fmt.Sprintf("unhandled type: %T", obj)) + } + return out +} From 76e51227bc1f3fce1e37fa26fc3e2022f46300c8 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 20 Mar 2019 23:10:45 +0100 Subject: [PATCH 075/254] Revert "ssa: explicitly initialize blank variables with implicit zero value" We made this change so that code like `var _ B` would generate code for unused to pick up on and consider B used. However, the change made doesn't work for `var _ *T = nil`, because that generates a blank store of an untyped nil. We could change this to store a typed nil, but at that point we'd effectively be lying about the code. Instead, we'll add AST-based code to unused to handle blank variable declarations. This reverts commit 3e913d42fb6282d2f42e40eda15c3a2ee564d3f8. --- ssa/builder.go | 37 ++++--------------------------------- 1 file changed, 4 insertions(+), 33 deletions(-) diff --git a/ssa/builder.go b/ssa/builder.go index 9684f4044..032819a2a 100644 --- a/ssa/builder.go +++ b/ssa/builder.go @@ -995,39 +995,10 @@ func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { // e.g. var x, y int // Locals are implicitly zero-initialized. for _, id := range spec.Names { - lhs := fn.addLocalForIdent(id) - if fn.debugInfo() { - emitDebugRef(fn, id, lhs, true) - } - // create explicit zero initialisation for blank - // identifiers so that we get BlankStores and the locals - // don't get optimized away - if isBlankIdent(id) { - // We don't need explicit code for structs and arrays, - // their locals don't get optimized away. - T := fn.Pkg.typeOf(spec.Type) - switch U := T.Underlying().(type) { - case *types.Pointer, *types.Slice, *types.Map, *types.Chan, *types.Signature, *types.Interface: - lval := b.addr(fn, id, false) - lval.store(fn, NewConst(nil, T)) - case *types.Basic: - n := U.Info() - - var k exact.Value - switch { - case (n & types.IsBoolean) != 0: - k = exact.MakeBool(false) - case (n & types.IsInteger) != 0: - k = exact.MakeInt64(0) - case (n & types.IsFloat) != 0: - k = exact.MakeFloat64(0) - case (n & types.IsComplex) != 0: - k = exact.MakeImag(exact.MakeInt64(0)) - case (n & types.IsString) != 0: - k = exact.MakeString("") - } - lval := b.addr(fn, id, false) - lval.store(fn, NewConst(k, T)) + if !isBlankIdent(id) { + lhs := fn.addLocalForIdent(id) + if fn.debugInfo() { + emitDebugRef(fn, id, lhs, true) } } } From b85b256325aecd44eaffcc804c354039d9af641a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 00:23:49 +0100 Subject: [PATCH 076/254] unused2: restructure API --- unused2/unused.go | 37 +++++++++++-------------------------- unused2/unused_test.go | 5 ++--- 2 files changed, 13 insertions(+), 29 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index e3a6202c2..82c7ac237 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -108,22 +108,20 @@ func assert(b bool) { } } -func NewLintChecker(c *Checker) *LintChecker { - l := &LintChecker{ - c: c, - } - return l +type Unused struct { + Obj types.Object + Position token.Position } -type LintChecker struct { - c *Checker +type Checker struct { + WholeProgram bool } -func (*LintChecker) Name() string { return "unused" } -func (*LintChecker) Prefix() string { return "U" } +func (*Checker) Name() string { return "unused" } +func (*Checker) Prefix() string { return "U" } -func (l *LintChecker) Init(*lint.Program) {} -func (l *LintChecker) Checks() []lint.Check { +func (l *Checker) Init(*lint.Program) {} +func (l *Checker) Checks() []lint.Check { return []lint.Check{ {ID: "U1000", FilterGenerated: true, Fn: l.Lint}, } @@ -147,8 +145,8 @@ func typString(obj types.Object) string { } } -func (l *LintChecker) Lint(j *lint.Job) { - unused := l.c.Check(j.Program, j) +func (c *Checker) Lint(j *lint.Job) { + unused := c.Check(j.Program, j) for _, u := range unused { name := u.Obj.Name() if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil { @@ -166,19 +164,6 @@ func (l *LintChecker) Lint(j *lint.Job) { } } -type Unused struct { - Obj types.Object - Position token.Position -} - -func NewChecker() *Checker { - return &Checker{} -} - -type Checker struct { - WholeProgram bool -} - func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { scopes := map[*types.Scope]*ssa.Function{} for _, fn := range j.Program.InitialFunctions { diff --git a/unused2/unused_test.go b/unused2/unused_test.go index 4ed2c8c08..73c9c0cf1 100644 --- a/unused2/unused_test.go +++ b/unused2/unused_test.go @@ -13,7 +13,6 @@ import ( ) func TestAll(t *testing.T) { - checker := NewChecker() - l := NewLintChecker(checker) - testutil.TestAll(t, l, "") + c := &Checker{} + testutil.TestAll(t, c, "") } From f142c6107c91f9addd12346849cbf87269841033 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 00:31:58 +0100 Subject: [PATCH 077/254] lint: create one Inspector per package --- lint/lint.go | 6 ++-- lint/lintdsl/lintdsl.go | 12 ++++++++ simple/lint.go | 64 ++++++++++++++++++++--------------------- staticcheck/lint.go | 62 +++++++++++++++++++-------------------- stylecheck/lint.go | 8 +++--- 5 files changed, 82 insertions(+), 70 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 5d806131c..038048899 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -113,7 +113,6 @@ type Program struct { AllPackages []*packages.Package AllFunctions []*ssa.Function Files []*ast.File - Inspector *inspector.Inspector GoVersion int tokenFileMap map[*token.File]*ast.File @@ -297,6 +296,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { Package: pkg, Config: cfg, } + pkg.Inspector = inspector.New(pkg.Syntax) pkgMap[ssapkg] = pkg pkgs = append(pkgs, pkg) } @@ -336,7 +336,6 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { prog.astFileMap[f] = pkgMap[ssapkg] } } - prog.Inspector = inspector.New(prog.Files) for _, pkg := range allPkgs { for _, f := range pkg.Syntax { @@ -623,7 +622,8 @@ func (prog *Program) Package(path string) *packages.Package { type Pkg struct { SSA *ssa.Package *packages.Package - Config config.Config + Config config.Config + Inspector *inspector.Inspector } type Positioner interface { diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 2f614c9b7..2fa5708fa 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -377,3 +377,15 @@ func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Fiel } return out } + +func InspectPreorder(j *lint.Job, types []ast.Node, fn func(ast.Node)) { + for _, pkg := range j.Program.InitialPackages { + pkg.Inspector.Preorder(types, fn) + } +} + +func InspectNodes(j *lint.Job, types []ast.Node, fn func(node ast.Node, push bool) (prune bool)) { + for _, pkg := range j.Program.InitialPackages { + pkg.Inspector.Nodes(types, fn) + } +} diff --git a/simple/lint.go b/simple/lint.go index 606b0505f..6376a8c12 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -103,7 +103,7 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { j.Errorf(node, "should use a simple channel send/receive instead of select with a single case") } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) } func (c *Checker) LintLoopCopy(j *lint.Job) { @@ -178,7 +178,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { } j.Errorf(loop, "should use copy() instead of a loop") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) LintIfBoolCmp(j *lint.Job) { @@ -220,7 +220,7 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { } j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintBytesBufferConversions(j *lint.Job) { @@ -247,7 +247,7 @@ func (c *Checker) LintBytesBufferConversions(j *lint.Job) { } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintStringsContains(j *lint.Job) { @@ -312,7 +312,7 @@ func (c *Checker) LintStringsContains(j *lint.Job) { } j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(j, call.Args)) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintBytesCompare(j *lint.Job) { @@ -339,7 +339,7 @@ func (c *Checker) LintBytesCompare(j *lint.Job) { } j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintForTrue(j *lint.Job) { @@ -353,7 +353,7 @@ func (c *Checker) LintForTrue(j *lint.Job) { } j.Errorf(loop, "should use for {} instead of for true {}") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) LintRegexpRaw(j *lint.Job) { @@ -410,7 +410,7 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintIfReturn(j *lint.Job) { @@ -471,7 +471,7 @@ func (c *Checker) LintIfReturn(j *lint.Job) { } j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BlockStmt)(nil)}, fn) } // LintRedundantNilCheckWithLen checks for the following reduntant nil-checks: @@ -597,7 +597,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { } j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintSlicing(j *lint.Job) { @@ -627,7 +627,7 @@ func (c *Checker) LintSlicing(j *lint.Job) { } j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.SliceExpr)(nil)}, fn) } func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { @@ -712,7 +712,7 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", Render(j, stmt.Lhs[0]), Render(j, call.Args[Arg("append.slice")]), Render(j, loop.X)) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) LintTimeSince(j *lint.Job) { @@ -730,7 +730,7 @@ func (c *Checker) LintTimeSince(j *lint.Job) { } j.Errorf(call, "should use time.Since instead of time.Now().Sub") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintTimeUntil(j *lint.Job) { @@ -747,7 +747,7 @@ func (c *Checker) LintTimeUntil(j *lint.Job) { } j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { @@ -812,10 +812,10 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1) - j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2) + InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn1) + InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn2) if IsGoVersion(j, 4) { - j.Program.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3) + InspectPreorder(j, []ast.Node{(*ast.RangeStmt)(nil)}, fn3) } } @@ -938,7 +938,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) } func (c *Checker) LintTrim(j *lint.Job) { @@ -1139,7 +1139,7 @@ func (c *Checker) LintTrim(j *lint.Job) { j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) LintLoopSlide(j *lint.Job) { @@ -1241,7 +1241,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(j, bs1), Render(j, biny), Render(j, bs1), Render(j, add1)) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) LintMakeLenCap(j *lint.Job) { @@ -1269,7 +1269,7 @@ func (c *Checker) LintMakeLenCap(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintAssertNotNil(j *lint.Job) { @@ -1378,8 +1378,8 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { } j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) - j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) + InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn1) + InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn2) } func (c *Checker) LintDeclareAssign(j *lint.Job) { @@ -1450,7 +1450,7 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { j.Errorf(decl, "should merge variable declaration with assignment on next line") } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BlockStmt)(nil)}, fn) } func (c *Checker) LintRedundantBreak(j *lint.Job) { @@ -1492,8 +1492,8 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { // checked x.Type.Results to be nil. j.Errorf(rst, "redundant return statement") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) - j.Program.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) + InspectPreorder(j, []ast.Node{(*ast.CaseClause)(nil)}, fn1) + InspectPreorder(j, []ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) } func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool { @@ -1553,7 +1553,7 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { @@ -1567,7 +1567,7 @@ func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { } j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintRangeStringRunes(j *lint.Job) { @@ -1606,7 +1606,7 @@ func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { j.Errorf(node, "unnecessary nil check around range") } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn) } func isPermissibleSort(j *lint.Job, node ast.Node) bool { @@ -1687,7 +1687,7 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { } return } - j.Program.Inspector.Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) } func (c *Checker) LintGuardedDelete(j *lint.Job) { @@ -1749,7 +1749,7 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { } j.Errorf(stmt, "unnecessary guard around call to delete") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { @@ -1818,5 +1818,5 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { j.Errorf(expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(j, ident), Render(j, ident), at) } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) } diff --git a/staticcheck/lint.go b/staticcheck/lint.go index f1f7c853e..42076d96c 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -629,7 +629,7 @@ func (c *Checker) CheckUntrappableSignal(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckTemplate(j *lint.Job) { @@ -670,7 +670,7 @@ func (c *Checker) CheckTemplate(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { @@ -700,7 +700,7 @@ func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { j.Errorf(call.Args[Arg("time.Sleep.d")], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { @@ -734,7 +734,7 @@ func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { Render(j, stmt)) } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.GoStmt)(nil)}, fn) } func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { @@ -777,7 +777,7 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { } j.Errorf(loop, "this loop will spin, using 100%% CPU") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { @@ -816,7 +816,7 @@ func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { j.Errorf(stmt, "defers in this infinite loop will never run") } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { @@ -839,7 +839,7 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { } ast.Inspect(loop.Body, fn2) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) CheckTestMainExit(j *lint.Job) { @@ -887,7 +887,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { j.Errorf(node, "TestMain should call os.Exit to set exit code") } } - j.Program.Inspector.Preorder(nil, fn) + InspectPreorder(j, nil, fn) } func isTestMain(j *lint.Job, node ast.Node) bool { @@ -924,7 +924,7 @@ func (c *Checker) CheckExec(j *lint.Job) { j.Errorf(call.Args[Arg("os/exec.Command.name")], "first argument to exec.Command looks like a shell command, but a program name or path are expected") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { @@ -943,7 +943,7 @@ func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { @@ -970,7 +970,7 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { } j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckScopedBreak(j *lint.Job) { @@ -1028,7 +1028,7 @@ func (c *Checker) CheckScopedBreak(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) } func (c *Checker) CheckUnsafePrintf(j *lint.Job) { @@ -1053,7 +1053,7 @@ func (c *Checker) CheckUnsafePrintf(j *lint.Job) { j.Errorf(call.Args[arg], "printf-style function with dynamic format string and no further arguments should use print-style function instead") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckEarlyDefer(j *lint.Job) { @@ -1121,7 +1121,7 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { j.Errorf(def, "should check returned error before deferring %s", Render(j, def.Call)) } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BlockStmt)(nil)}, fn) } func selectorX(sel *ast.SelectorExpr) ast.Node { @@ -1189,7 +1189,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BlockStmt)(nil)}, fn) } // cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't @@ -1213,7 +1213,7 @@ func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) } func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { @@ -1281,7 +1281,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) return true } - j.Program.Inspector.Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) + InspectNodes(j, []ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) } func (c *Checker) CheckBenchmarkN(j *lint.Job) { @@ -1302,7 +1302,7 @@ func (c *Checker) CheckBenchmarkN(j *lint.Job) { } j.Errorf(assign, "should not assign to %s", Render(j, sel)) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn) } func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { @@ -1519,7 +1519,7 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) { @@ -1802,7 +1802,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { return true }) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) } func (c *Checker) CheckNilContext(j *lint.Job) { @@ -1827,7 +1827,7 @@ func (c *Checker) CheckNilContext(j *lint.Job) { j.Errorf(call.Args[0], "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckSeeker(j *lint.Job) { @@ -1861,7 +1861,7 @@ func (c *Checker) CheckSeeker(j *lint.Job) { } j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { @@ -2204,7 +2204,7 @@ func (c *Checker) CheckDoubleNegation(j *lint.Job) { } j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.UnaryExpr)(nil)}, fn) } func hasSideEffects(node ast.Node) bool { @@ -2263,7 +2263,7 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { @@ -2338,7 +2338,7 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckPureFunctions(j *lint.Job) { @@ -2464,7 +2464,7 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { }) } } - j.Program.Inspector.Nodes(nil, fn) + InspectNodes(j, nil, fn) } func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { @@ -2711,7 +2711,7 @@ func (c *Checker) CheckSelfAssignment(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn) } func buildTagsIdentical(s1, s2 []string) bool { @@ -2829,7 +2829,7 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { j.Errorf(group[0], "only the first constant in this group has an explicit type") } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.GenDecl)(nil)}, fn) } func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { @@ -2930,7 +2930,7 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { j.Errorf(binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { @@ -2997,7 +2997,7 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) } func (c *Checker) CheckSingleArgAppend(j *lint.Job) { @@ -3011,5 +3011,5 @@ func (c *Checker) CheckSingleArgAppend(j *lint.Job) { } j.Errorf(call, "x = append(y) is equivalent to x = y") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } diff --git a/stylecheck/lint.go b/stylecheck/lint.go index ae5039439..107a276d2 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -198,7 +198,7 @@ func (c *Checker) CheckIncDec(j *lint.Job) { j.Errorf(assign, "should replace %s with %s%s", Render(j, assign), Render(j, assign.Lhs[0]), suffix) } - j.Program.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn) } func (c *Checker) CheckErrorReturn(j *lint.Job) { @@ -611,7 +611,7 @@ func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.SwitchStmt)(nil)}, fn) } func (c *Checker) CheckYodaConditions(j *lint.Job) { @@ -629,7 +629,7 @@ func (c *Checker) CheckYodaConditions(j *lint.Job) { } j.Errorf(cond, "don't use Yoda conditions") } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { @@ -646,5 +646,5 @@ func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { } } } - j.Program.Inspector.Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn) + InspectPreorder(j, []ast.Node{(*ast.BasicLit)(nil)}, fn) } From 11b0a13d36863a9af8cdea17a8b6f4ea45a51007 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 00:48:51 +0100 Subject: [PATCH 078/254] unused2: handle blank variables once more --- unused2/testdata/src/blank/blank.go | 2 ++ unused2/unused.go | 42 ++++++++++++++++++++++++----- 2 files changed, 38 insertions(+), 6 deletions(-) diff --git a/unused2/testdata/src/blank/blank.go b/unused2/testdata/src/blank/blank.go index 994e5a402..2e3d9357e 100644 --- a/unused2/testdata/src/blank/blank.go +++ b/unused2/testdata/src/blank/blank.go @@ -6,6 +6,7 @@ type t1 struct{} // MATCH /t1 is unused/ type t2 struct{} type t3 struct{} type t4 struct{} +type t5 struct{} var _ = t2{} @@ -17,6 +18,7 @@ func fn1() { // MATCH /fn1 is unused/ func fn2() { _ = t3{} var _ t4 + var _ *t5 = nil } func init() { diff --git a/unused2/unused.go b/unused2/unused.go index 82c7ac237..1ab07d10d 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -184,7 +184,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { if pkg.PkgPath == "unsafe" { continue } - graph.entry(pkg.SSA, pkg.TypesInfo) + graph.entry(pkg) } if c.WholeProgram { @@ -611,9 +611,9 @@ func (g *Graph) seeAndUse(used, by interface{}, reason string) { g.use(used, by, reason) } -func (g *Graph) entry(pkg *ssa.Package, tinfo *types.Info) { +func (g *Graph) entry(pkg *lint.Pkg) { // TODO rename Entry - g.pkg = pkg + g.pkg = pkg.SSA surroundingFunc := func(obj types.Object) *ssa.Function { scope := obj.Parent() @@ -631,7 +631,7 @@ func (g *Graph) entry(pkg *ssa.Package, tinfo *types.Info) { // // SSA form also won't tell us about constants; use Defs and Uses // to determine which constants exist and which are being used. - for _, obj := range tinfo.Defs { + for _, obj := range pkg.TypesInfo.Defs { switch obj := obj.(type) { case *types.TypeName: g.see(obj) @@ -665,7 +665,7 @@ func (g *Graph) entry(pkg *ssa.Package, tinfo *types.Info) { return true } - obj, ok := tinfo.Uses[ident] + obj, ok := pkg.TypesInfo.Uses[ident] if !ok { return true } @@ -677,7 +677,7 @@ func (g *Graph) entry(pkg *ssa.Package, tinfo *types.Info) { }) } // Find constants being used in non-function contexts - for ident, obj := range tinfo.Uses { + for ident, obj := range pkg.TypesInfo.Uses { _, ok := obj.(*types.Const) if !ok { continue @@ -688,6 +688,36 @@ func (g *Graph) entry(pkg *ssa.Package, tinfo *types.Info) { g.seeAndUse(obj, nil, "used constant") } + var fn *ssa.Function + pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)}, func(n ast.Node) { + switch n := n.(type) { + case *ast.FuncDecl: + fn = pkg.SSA.Prog.FuncValue(lintdsl.ObjectOf(g.job, n.Name).(*types.Func)) + if fn != nil { + g.see(fn) + } + case *ast.GenDecl: + if n.Tok != token.VAR { + return + } + for _, spec := range n.Specs { + v := spec.(*ast.ValueSpec) + if v.Type == nil { + continue + } + T := lintdsl.TypeOf(g.job, v.Type) + if fn != nil { + g.seeAndUse(T, fn, "var decl") + } else { + g.seeAndUse(T, nil, "var decl") + } + g.typ(T) + } + default: + panic(fmt.Sprintf("unreachable: %T", n)) + } + }) + for _, m := range g.pkg.Members { switch m := m.(type) { case *ssa.NamedConst: From 6609a5b7dbc39f682afd043583590069eaf9010c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 01:12:14 +0100 Subject: [PATCH 079/254] unused2: consolidate debug printing --- unused2/unused.go | 63 ++++++++++++++++++++++++++++------------------- 1 file changed, 38 insertions(+), 25 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 1ab07d10d..f43f5cf8f 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -220,6 +220,37 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } } + if debug { + fmt.Println("digraph{") + fmt.Printf("n%d [label=\"Root\"];\n", graph.Root.id) + for used, reasons := range graph.Root.used { + for _, reason := range reasons { + fmt.Printf("n%d -> n%d [label=%q];\n", graph.Root.id, used.id, reason) + } + } + for _, node := range graph.Nodes { + fmt.Printf("n%d [label=%q];\n", node.id, node.obj) + + for used, reasons := range node.used { + for _, reason := range reasons { + fmt.Printf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) + } + } + } + + graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { + node := value.(*Node) + fmt.Printf("n%d [label=%q];\n", node.id, node.obj) + + for used, reasons := range node.used { + for _, reason := range reasons { + fmt.Printf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) + } + } + }) + fmt.Println("}") + } + graph.color(graph.Root) // if a node is unused, don't report any of the node's // children as unused. for example, if a function is unused, @@ -408,10 +439,6 @@ func NewGraph() *Graph { seenFns: map[*ssa.Function]struct{}{}, } g.Root = g.newNode(nil) - if debug { - fmt.Printf("n%d [label=\"Root\"];\n", g.Root.id) - } - return g } @@ -428,7 +455,7 @@ func (g *Graph) color(root *Node) { type Node struct { obj interface{} id int - used map[*Node]string + used map[*Node][]string seen bool quiet bool @@ -469,17 +496,13 @@ func (g *Graph) newNode(obj interface{}) *Node { return &Node{ obj: obj, id: g.nodeCounter, - used: map[*Node]string{}, + used: map[*Node][]string{}, } } -func (n *Node) use(node *Node, reason string) (new bool) { +func (n *Node) use(node *Node, reason string) { assert(node != nil) - if s, ok := n.used[node]; ok && s == reason { - return false - } - n.used[node] = reason - return true + n.used[node] = append(n.used[node], reason) } // isIrrelevant reports whether an object's presence in the graph is @@ -560,10 +583,7 @@ func (g *Graph) see(obj interface{}) { } // add new node to graph - node, new := g.node(obj) - if debug && new { - fmt.Printf("n%d [label=%q];\n", node.id, obj) - } + g.node(obj) } func (g *Graph) use(used, by interface{}, reason string) { @@ -591,18 +611,11 @@ func (g *Graph) use(used, by interface{}, reason string) { usedNode, new := g.node(used) assert(!new) if by == nil { - new := g.Root.use(usedNode, reason) - if debug && new { - fmt.Printf("n%d -> n%d [label=%q];\n", g.Root.id, usedNode.id, reason) - } + g.Root.use(usedNode, reason) } else { byNode, new := g.node(by) assert(!new) - new = byNode.use(usedNode, reason) - if debug && new { - fmt.Printf("n%d -> n%d [label=%q];\n", byNode.id, usedNode.id, reason) - } - + byNode.use(usedNode, reason) } } From 3b4c379a6f477c2190e2895b9f0a797aaefc8013 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 01:16:45 +0100 Subject: [PATCH 080/254] unused2: dynamic debugging --- unused2/unused.go | 48 ++++++++++++++++++++++------------------------- 1 file changed, 22 insertions(+), 26 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index f43f5cf8f..3e12707b3 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -5,6 +5,7 @@ import ( "go/ast" "go/token" "go/types" + "io" "strings" "honnef.co/go/tools/go/types/typeutil" @@ -18,8 +19,6 @@ import ( // containing the conversion is unused, the fields will be marked as // used. -const debug = false - /* - packages use: @@ -115,6 +114,7 @@ type Unused struct { type Checker struct { WholeProgram bool + Debug io.Writer } func (*Checker) Name() string { return "unused" } @@ -164,6 +164,12 @@ func (c *Checker) Lint(j *lint.Job) { } } +func (c *Checker) debugf(f string, v ...interface{}) { + if c.Debug != nil { + fmt.Fprintf(c.Debug, f, v...) + } +} + func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { scopes := map[*types.Scope]*ssa.Function{} for _, fn := range j.Program.InitialFunctions { @@ -220,35 +226,35 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } } - if debug { - fmt.Println("digraph{") - fmt.Printf("n%d [label=\"Root\"];\n", graph.Root.id) + if c.Debug != nil { + c.debugf("digraph{\n") + c.debugf("n%d [label=\"Root\"];\n", graph.Root.id) for used, reasons := range graph.Root.used { for _, reason := range reasons { - fmt.Printf("n%d -> n%d [label=%q];\n", graph.Root.id, used.id, reason) + c.debugf("n%d -> n%d [label=%q];\n", graph.Root.id, used.id, reason) } } for _, node := range graph.Nodes { - fmt.Printf("n%d [label=%q];\n", node.id, node.obj) + c.debugf("n%d [label=%q];\n", node.id, node.obj) for used, reasons := range node.used { for _, reason := range reasons { - fmt.Printf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) + c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) } } } graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { node := value.(*Node) - fmt.Printf("n%d [label=%q];\n", node.id, node.obj) + c.debugf("n%d [label=%q];\n", node.id, node.obj) for used, reasons := range node.used { for _, reason := range reasons { - fmt.Printf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) + c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) } } }) - fmt.Println("}") + c.debugf("}\n") } graph.color(graph.Root) @@ -312,9 +318,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { return } if node.quiet { - if debug { - fmt.Printf("n%d [color=purple];\n", node.id) - } + c.debugf("n%d [color=purple];\n", node.id) return } @@ -334,9 +338,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { break checkPkg } } - if debug { - fmt.Printf("n%d [color=yellow];\n", node.id) - } + c.debugf("n%d [color=yellow];\n", node.id) return case packager2: // This happens to filter $bound and $thunk, which @@ -349,15 +351,11 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { break checkPkg } } - if debug { - fmt.Printf("n%d [color=yellow];\n", node.id) - } + c.debugf("n%d [color=yellow];\n", node.id) return } - if debug { - fmt.Printf("n%d [color=red];\n", node.id) - } + c.debugf("n%d [color=red];\n", node.id) switch obj := node.obj.(type) { case *types.Var: // don't report unnamed variables (receivers, interface embedding) @@ -391,9 +389,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { Position: pos, }) default: - if debug { - fmt.Printf("n%d [color=gray];\n", node.id) - } + c.debugf("n%d [color=gray];\n", node.id) } } for _, node := range graph.Nodes { From faa934cab8359f01165d076cd2e1fe5679ae210f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 01:19:33 +0100 Subject: [PATCH 081/254] unused2: simplify debugging code --- unused2/unused.go | 28 +++++++++++----------------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 3e12707b3..b9e3f2018 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -227,16 +227,12 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } if c.Debug != nil { - c.debugf("digraph{\n") - c.debugf("n%d [label=\"Root\"];\n", graph.Root.id) - for used, reasons := range graph.Root.used { - for _, reason := range reasons { - c.debugf("n%d -> n%d [label=%q];\n", graph.Root.id, used.id, reason) + debugNode := func(node *Node) { + if node.obj == nil { + c.debugf("n%d [label=\"Root\"];\n", node.id) + } else { + c.debugf("n%d [label=%q];\n", node.id, node.obj) } - } - for _, node := range graph.Nodes { - c.debugf("n%d [label=%q];\n", node.id, node.obj) - for used, reasons := range node.used { for _, reason := range reasons { c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) @@ -244,15 +240,13 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } } + c.debugf("digraph{\n") + debugNode(graph.Root) + for _, node := range graph.Nodes { + debugNode(node) + } graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { - node := value.(*Node) - c.debugf("n%d [label=%q];\n", node.id, node.obj) - - for used, reasons := range node.used { - for _, reason := range reasons { - c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) - } - } + debugNode(value.(*Node)) }) c.debugf("}\n") } From d36bf90409063c0fe4fecdc07cc71e25b59a4050 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 4 Apr 2019 06:17:11 +0200 Subject: [PATCH 082/254] staticcheck: don't flag 0 == 0 generated by cgo Closes gh-430 --- staticcheck/lint.go | 15 +++++++++++++++ .../CheckLhsRhsIdentical/CheckLhsRhsIdentical.go | 3 +++ .../testdata/src/CheckLhsRhsIdentical/cgo.go | 16 ++++++++++++++++ 3 files changed, 34 insertions(+) create mode 100644 staticcheck/testdata/src/CheckLhsRhsIdentical/cgo.go diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 42076d96c..c1dfed2b6 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -968,6 +968,21 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { if Render(j, op.X) != Render(j, op.Y) { return } + l1, ok1 := op.X.(*ast.BasicLit) + l2, ok2 := op.Y.(*ast.BasicLit) + if ok1 && ok2 && l1.Kind == token.INT && l2.Kind == l1.Kind && l1.Value == "0" && l2.Value == l1.Value && IsGenerated(j.File(l1)) { + // cgo generates the following function call: + // _cgoCheckPointer(_cgoBase0, 0 == 0) – it uses 0 == 0 + // instead of true in case the user shadowed the + // identifier. Ideally we'd restrict this exception to + // calls of _cgoCheckPointer, but it's not worth the + // hassle of keeping track of the stack. + // are very rare to begin with, and we're mostly checking + // for them to catch typos such as 1 == 1 where the user + // meant to type i == 1. The odds of a false negative for + // 0 == 0 are slim. + return + } j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) } InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) diff --git a/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go b/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go index e914080a2..be682350f 100644 --- a/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go +++ b/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go @@ -3,6 +3,9 @@ package pkg type Float float64 func fn(a int, s []int, f1 float64, f2 Float) { + if 0 == 0 { // MATCH /identical expressions/ + println() + } if 1 == 1 { // MATCH /identical expressions/ println() } diff --git a/staticcheck/testdata/src/CheckLhsRhsIdentical/cgo.go b/staticcheck/testdata/src/CheckLhsRhsIdentical/cgo.go new file mode 100644 index 000000000..7c1c194ef --- /dev/null +++ b/staticcheck/testdata/src/CheckLhsRhsIdentical/cgo.go @@ -0,0 +1,16 @@ +package pkg + +// void foo(void **p) {} +import "C" +import "unsafe" + +func Foo() { + var p unsafe.Pointer + + C.foo(&p) + if 0 == 0 { + // We don't currently flag this instance of 0 == 0 because of + // our cgo-specific exception. + println() + } +} From 6f064a0523996bd6ed588f06d5ff2e5dbec3ebb4 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 01:27:05 +0100 Subject: [PATCH 083/254] cmd/unused, cmd/staticcheck: switch over to new unused --- cmd/staticcheck/staticcheck.go | 7 +++---- cmd/unused/main.go | 30 +++++------------------------- 2 files changed, 8 insertions(+), 29 deletions(-) diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 3c8d96475..91426819d 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -9,7 +9,7 @@ import ( "honnef.co/go/tools/simple" "honnef.co/go/tools/staticcheck" "honnef.co/go/tools/stylecheck" - "honnef.co/go/tools/unused" + "honnef.co/go/tools/unused2" ) func main() { @@ -22,9 +22,8 @@ func main() { stylecheck.NewChecker(), } - uc := unused.NewChecker(unused.CheckAll) - uc.ConsiderReflection = true - checkers = append(checkers, unused.NewLintChecker(uc)) + uc := &unused.Checker{} + checkers = append(checkers, uc) lintutil.ProcessFlagSet(checkers, fs) } diff --git a/cmd/unused/main.go b/cmd/unused/main.go index 89bcb6c6b..836f33981 100644 --- a/cmd/unused/main.go +++ b/cmd/unused/main.go @@ -9,7 +9,7 @@ import ( "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/unused" + "honnef.co/go/tools/unused2" ) var ( @@ -23,9 +23,8 @@ var ( fReflection bool ) -func newChecker(mode unused.CheckMode) *unused.Checker { - checker := unused.NewChecker(mode) - +func newChecker() *unused.Checker { + checker := &unused.Checker{} if fDebug != "" { debug, err := os.Create(fDebug) if err != nil { @@ -35,7 +34,6 @@ func newChecker(mode unused.CheckMode) *unused.Checker { } checker.WholeProgram = fWholeProgram - checker.ConsiderReflection = fReflection return checker } @@ -54,24 +52,6 @@ func main() { fs.BoolVar(&fReflection, "reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection") fs.Parse(os.Args[1:]) - var mode unused.CheckMode - if fConstants { - mode |= unused.CheckConstants - } - if fFields { - mode |= unused.CheckFields - } - if fFunctions { - mode |= unused.CheckFunctions - } - if fTypes { - mode |= unused.CheckTypes - } - if fVariables { - mode |= unused.CheckVariables - } - - checker := newChecker(mode) - l := unused.NewLintChecker(checker) - lintutil.ProcessFlagSet([]lint.Checker{l}, fs) + c := newChecker() + lintutil.ProcessFlagSet([]lint.Checker{c}, fs) } From 838dc8ac070ef0c997a37f67be6634695ac66ca3 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 02:36:49 +0100 Subject: [PATCH 084/254] unused2: use blank fields --- unused2/testdata/src/blank/blank.go | 4 +++- unused2/unused.go | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/unused2/testdata/src/blank/blank.go b/unused2/testdata/src/blank/blank.go index 2e3d9357e..b43ab1078 100644 --- a/unused2/testdata/src/blank/blank.go +++ b/unused2/testdata/src/blank/blank.go @@ -3,7 +3,9 @@ package pkg import _ "fmt" type t1 struct{} // MATCH /t1 is unused/ -type t2 struct{} +type t2 struct { + _ int +} type t3 struct{} type t4 struct{} type t5 struct{} diff --git a/unused2/unused.go b/unused2/unused.go index b9e3f2018..84a0fc99e 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -875,6 +875,8 @@ func (g *Graph) typ(t types.Type) { if t.Field(i).Exported() { // (6.2) structs use exported fields g.use(t.Field(i), t, "exported struct field") + } else if t.Field(i).Name() == "_" { + g.use(t.Field(i), t, "blank field") } else if isNoCopyType(t.Field(i).Type()) { // (6.1) structs use fields of type NoCopy sentinel g.use(t.Field(i), t, "NoCopy sentinel") From 2b3ff21478ad9ee20ea1744f30b1be201dc9cb34 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 21 Mar 2019 03:24:59 +0100 Subject: [PATCH 085/254] unused2: use the correct method set for embedded types --- unused2/testdata/src/embedding/embedding.go | 8 ++++++++ unused2/unused.go | 8 +++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/unused2/testdata/src/embedding/embedding.go b/unused2/testdata/src/embedding/embedding.go index 31bb43520..b45b3fc72 100644 --- a/unused2/testdata/src/embedding/embedding.go +++ b/unused2/testdata/src/embedding/embedding.go @@ -62,3 +62,11 @@ type t8 struct{ t7 } type t9 struct{ t8 } var _ = t9{} + +type t10 struct{} + +func (*t10) Foo() {} + +type t11 struct{ t10 } + +var _ = t11{} diff --git a/unused2/unused.go b/unused2/unused.go index 84a0fc99e..46b4e3299 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -885,7 +885,13 @@ func (g *Graph) typ(t types.Type) { // (e3) exported identifiers aren't automatically used. if !g.wholeProgram { // does the embedded field contribute exported methods to the method set? - ms := g.msCache.MethodSet(t.Field(i).Type()) + T := t.Field(i).Type() + if _, ok := T.Underlying().(*types.Pointer); !ok { + // An embedded field is addressable, so check + // the pointer type to get the full method set + T = types.NewPointer(T) + } + ms := g.msCache.MethodSet(T) for j := 0; j < ms.Len(); j++ { if ms.At(j).Obj().Exported() { // (6.4) structs use embedded fields that have exported methods (recursively) From 607aae208e6fcd05bd623403c90d6fa451460c23 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 27 Mar 2019 10:49:32 +0100 Subject: [PATCH 086/254] unused2: handle go:linkname, benchmark sinks, runtime functions --- .../exported_method_test.go | 10 + unused2/testdata/src/linkname/linkname.go | 6 + unused2/unused.go | 335 +++++++++++++++++- 3 files changed, 334 insertions(+), 17 deletions(-) create mode 100644 unused2/testdata/src/linkname/linkname.go diff --git a/unused2/testdata/src/exported_method_test/exported_method_test.go b/unused2/testdata/src/exported_method_test/exported_method_test.go index d59c2f52a..346056d68 100644 --- a/unused2/testdata/src/exported_method_test/exported_method_test.go +++ b/unused2/testdata/src/exported_method_test/exported_method_test.go @@ -26,3 +26,13 @@ func TestFoo(t *testing.T) { t.Errorf("got %d, want 13", cr.N) } } + +var sink int + +func BenchmarkFoo(b *testing.B) { + for i := 0; i < b.N; i++ { + sink = fn() + } +} + +func fn() int { return 0 } diff --git a/unused2/testdata/src/linkname/linkname.go b/unused2/testdata/src/linkname/linkname.go new file mode 100644 index 000000000..b5268faf5 --- /dev/null +++ b/unused2/testdata/src/linkname/linkname.go @@ -0,0 +1,6 @@ +package pkg + +import _ "unsafe" + +//go:linkname foo bar +func foo() {} diff --git a/unused2/unused.go b/unused2/unused.go index 46b4e3299..3b9b97a02 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -19,6 +19,8 @@ import ( // containing the conversion is unused, the fields will be marked as // used. +// TODO(dh): we cannot observe function calls in assembly files. + /* - packages use: @@ -29,6 +31,8 @@ import ( - (1.5) init functions - (1.6) functions exported to cgo - (1.7) the main function iff in the main package + - (1.8) symbols linked via go:linkname + - named types use: - (2.1) exported methods @@ -47,6 +51,7 @@ import ( - types they instantiate or convert to - (4.7) fields they access - (4.8) types of all instructions + - (4.9) package-level variables they assign to iff in tests (sinks for benchmarks) - conversions use: - (5.1) when converting between two equivalent structs, the fields in @@ -88,7 +93,8 @@ import ( - (9.4) conversions use the type they convert to - (9.5) instructions use their operands - (9.6) instructions use their operands' types - - (9.7) variable _reads_ use variables, writes do not + - (9.7) variable _reads_ use variables, writes do not, except in tests + - (9.8) runtime functions that may be called from user code via the compiler - Differences in whole program mode: @@ -145,6 +151,248 @@ func typString(obj types.Object) string { } } +// /usr/lib/go/src/runtime/proc.go:433:6: func badmorestackg0 is unused (U1000) + +// Functions defined in the Go runtime that may be called through +// compiler magic or via assembly. +var runtimeFuncs = map[string]bool{ + // The first part of the list is copied from + // cmd/compile/internal/gc/builtin.go, var runtimeDecls + "newobject": true, + "panicindex": true, + "panicslice": true, + "panicdivide": true, + "panicmakeslicelen": true, + "throwinit": true, + "panicwrap": true, + "gopanic": true, + "gorecover": true, + "goschedguarded": true, + "printbool": true, + "printfloat": true, + "printint": true, + "printhex": true, + "printuint": true, + "printcomplex": true, + "printstring": true, + "printpointer": true, + "printiface": true, + "printeface": true, + "printslice": true, + "printnl": true, + "printsp": true, + "printlock": true, + "printunlock": true, + "concatstring2": true, + "concatstring3": true, + "concatstring4": true, + "concatstring5": true, + "concatstrings": true, + "cmpstring": true, + "intstring": true, + "slicebytetostring": true, + "slicebytetostringtmp": true, + "slicerunetostring": true, + "stringtoslicebyte": true, + "stringtoslicerune": true, + "slicecopy": true, + "slicestringcopy": true, + "decoderune": true, + "countrunes": true, + "convI2I": true, + "convT16": true, + "convT32": true, + "convT64": true, + "convTstring": true, + "convTslice": true, + "convT2E": true, + "convT2Enoptr": true, + "convT2I": true, + "convT2Inoptr": true, + "assertE2I": true, + "assertE2I2": true, + "assertI2I": true, + "assertI2I2": true, + "panicdottypeE": true, + "panicdottypeI": true, + "panicnildottype": true, + "ifaceeq": true, + "efaceeq": true, + "fastrand": true, + "makemap64": true, + "makemap": true, + "makemap_small": true, + "mapaccess1": true, + "mapaccess1_fast32": true, + "mapaccess1_fast64": true, + "mapaccess1_faststr": true, + "mapaccess1_fat": true, + "mapaccess2": true, + "mapaccess2_fast32": true, + "mapaccess2_fast64": true, + "mapaccess2_faststr": true, + "mapaccess2_fat": true, + "mapassign": true, + "mapassign_fast32": true, + "mapassign_fast32ptr": true, + "mapassign_fast64": true, + "mapassign_fast64ptr": true, + "mapassign_faststr": true, + "mapiterinit": true, + "mapdelete": true, + "mapdelete_fast32": true, + "mapdelete_fast64": true, + "mapdelete_faststr": true, + "mapiternext": true, + "mapclear": true, + "makechan64": true, + "makechan": true, + "chanrecv1": true, + "chanrecv2": true, + "chansend1": true, + "closechan": true, + "writeBarrier": true, + "typedmemmove": true, + "typedmemclr": true, + "typedslicecopy": true, + "selectnbsend": true, + "selectnbrecv": true, + "selectnbrecv2": true, + "selectsetpc": true, + "selectgo": true, + "block": true, + "makeslice": true, + "makeslice64": true, + "growslice": true, + "memmove": true, + "memclrNoHeapPointers": true, + "memclrHasPointers": true, + "memequal": true, + "memequal8": true, + "memequal16": true, + "memequal32": true, + "memequal64": true, + "memequal128": true, + "int64div": true, + "uint64div": true, + "int64mod": true, + "uint64mod": true, + "float64toint64": true, + "float64touint64": true, + "float64touint32": true, + "int64tofloat64": true, + "uint64tofloat64": true, + "uint32tofloat64": true, + "complex128div": true, + "racefuncenter": true, + "racefuncenterfp": true, + "racefuncexit": true, + "raceread": true, + "racewrite": true, + "racereadrange": true, + "racewriterange": true, + "msanread": true, + "msanwrite": true, + "x86HasPOPCNT": true, + "x86HasSSE41": true, + "arm64HasATOMICS": true, + + // The second part of the list is extracted from assembly code in + // the standard library, with the exception of the runtime package itself + "abort": true, + "aeshashbody": true, + "args": true, + "asminit": true, + "badctxt": true, + "badmcall2": true, + "badmcall": true, + "badmorestackg0": true, + "badmorestackgsignal": true, + "badsignal2": true, + "callbackasm1": true, + "callCfunction": true, + "cgocallback_gofunc": true, + "cgocallbackg": true, + "checkgoarm": true, + "check": true, + "debugCallCheck": true, + "debugCallWrap": true, + "emptyfunc": true, + "entersyscall": true, + "exit": true, + "exits": true, + "exitsyscall": true, + "externalthreadhandler": true, + "findnull": true, + "goexit1": true, + "gostring": true, + "i386_set_ldt": true, + "_initcgo": true, + "init_thread_tls": true, + "ldt0setup": true, + "libpreinit": true, + "load_g": true, + "morestack": true, + "mstart": true, + "nacl_sysinfo": true, + "nanotimeQPC": true, + "nanotime": true, + "newosproc0": true, + "newproc": true, + "newstack": true, + "noted": true, + "nowQPC": true, + "osinit": true, + "printf": true, + "racecallback": true, + "reflectcallmove": true, + "reginit": true, + "rt0_go": true, + "save_g": true, + "schedinit": true, + "setldt": true, + "settls": true, + "sighandler": true, + "sigprofNonGo": true, + "sigtrampgo": true, + "_sigtramp": true, + "sigtramp": true, + "stackcheck": true, + "syscall_chdir": true, + "syscall_chroot": true, + "syscall_close": true, + "syscall_dup2": true, + "syscall_execve": true, + "syscall_exit": true, + "syscall_fcntl": true, + "syscall_forkx": true, + "syscall_gethostname": true, + "syscall_getpid": true, + "syscall_ioctl": true, + "syscall_pipe": true, + "syscall_rawsyscall6": true, + "syscall_rawSyscall6": true, + "syscall_rawsyscall": true, + "syscall_RawSyscall": true, + "syscall_rawsysvicall6": true, + "syscall_setgid": true, + "syscall_setgroups": true, + "syscall_setpgid": true, + "syscall_setsid": true, + "syscall_setuid": true, + "syscall_syscall6": true, + "syscall_syscall": true, + "syscall_Syscall": true, + "syscall_sysvicall6": true, + "syscall_wait4": true, + "syscall_write": true, + "traceback": true, + "tstart": true, + "usplitR0": true, + "wbBufFlush": true, + "write": true, +} + func (c *Checker) Lint(j *lint.Job) { unused := c.Check(j.Program, j) for _, u := range unused { @@ -614,6 +862,29 @@ func (g *Graph) seeAndUse(used, by interface{}, reason string) { g.use(used, by, reason) } +func (g *Graph) trackExportedIdentifier(obj types.Object) bool { + if !obj.Exported() { + // object isn't exported, the question is moot + return false + } + if g.wholeProgram { + // whole program mode tracks exported identifiers accurately + return false + } + if g.pkg.Pkg.Name() == "main" { + // exported identifiers in package main can't be imported + return false + } + + // at one point we only considered exported identifiers in + // *_test.go files if they were Benchmark, Example or Test + // functions. However, this doesn't work when we look at one + // package at a time, because objects exported in a test variant + // of a package may be used by the xtest package. The only + // solution would be to look at multiple packages at once + return true +} + func (g *Graph) entry(pkg *lint.Pkg) { // TODO rename Entry g.pkg = pkg.SSA @@ -642,7 +913,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { case *types.Const: g.see(obj) fn := surroundingFunc(obj) - if fn == nil && obj.Exported() && g.pkg.Pkg.Name() != "main" && !g.wholeProgram { + if fn == nil && g.trackExportedIdentifier(obj) { // (1.4) packages use exported constants (unless in package main) g.use(obj, nil, "exported constant") } @@ -651,7 +922,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { } } - // Find constants being used inside functions + // Find constants being used inside functions, find sinks in tests handledConsts := map[*ast.Ident]struct{}{} for _, fn := range g.job.Program.InitialFunctions { if fn.Pkg != g.pkg { @@ -663,19 +934,40 @@ func (g *Graph) entry(pkg *lint.Pkg) { continue } ast.Inspect(node, func(node ast.Node) bool { - ident, ok := node.(*ast.Ident) - if !ok { - return true + switch node := node.(type) { + case *ast.Ident: + obj, ok := pkg.TypesInfo.Uses[node] + if !ok { + return true + } + switch obj := obj.(type) { + case *types.Const: + g.seeAndUse(obj, fn, "used constant") + } + case *ast.AssignStmt: + for _, expr := range node.Lhs { + ident, ok := expr.(*ast.Ident) + if !ok { + continue + } + obj := pkg.TypesInfo.ObjectOf(ident) + if obj == nil { + continue + } + path := g.pkg.Prog.Fset.File(obj.Pos()).Name() + if strings.HasSuffix(path, "_test.go") { + if obj.Parent() != nil && obj.Parent().Parent() != nil && obj.Parent().Parent().Parent() == nil { + // object's scope is the package, whose + // parent is the file, whose parent is nil + + // (4.9) functions use package-level variables they assign to iff in tests (sinks for benchmarks) + // (9.7) variable _reads_ use variables, writes do not, except in tests + g.seeAndUse(obj, fn, "test sink") + } + } + } } - obj, ok := pkg.TypesInfo.Uses[ident] - if !ok { - return true - } - switch obj := obj.(type) { - case *types.Const: - g.seeAndUse(obj, fn, "used constant") - } return true }) } @@ -728,7 +1020,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { case *ssa.Global: if m.Object() != nil { g.see(m.Object()) - if m.Object().Exported() && g.pkg.Pkg.Name() != "main" && !g.wholeProgram { + if g.trackExportedIdentifier(m.Object()) { // (1.3) packages use exported variables (unless in package main) g.use(m.Object(), nil, "exported top-level variable") } @@ -740,7 +1032,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { g.use(m, nil, "init function") } // This branch catches top-level functions, not methods. - if m.Object() != nil && m.Object().Exported() && g.pkg.Pkg.Name() != "main" && !g.wholeProgram { + if m.Object() != nil && g.trackExportedIdentifier(m.Object()) { // (1.2) packages use exported functions (unless in package main) g.use(m, nil, "exported top-level function") } @@ -748,6 +1040,10 @@ func (g *Graph) entry(pkg *lint.Pkg) { // (1.7) packages use the main function iff in the main package g.use(m, nil, "main function") } + if g.pkg.Pkg.Path() == "runtime" && runtimeFuncs[m.Name()] { + // (9.8) runtime functions that may be called from user code via the compiler + g.use(m, nil, "runtime function") + } if m.Syntax() != nil { doc := m.Syntax().(*ast.FuncDecl).Doc if doc != nil { @@ -755,6 +1051,9 @@ func (g *Graph) entry(pkg *lint.Pkg) { if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { // (1.6) packages use functions exported to cgo g.use(m, nil, "cgo exported") + } else if strings.HasPrefix(cmt.Text, "//go:linkname ") { + // (1.8) packages use symbols linked via go:linkname + g.use(m, nil, "go:linkname") } } } @@ -763,7 +1062,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { case *ssa.Type: if m.Object() != nil { g.see(m.Object()) - if m.Object().Exported() && g.pkg.Pkg.Name() != "main" && !g.wholeProgram { + if g.trackExportedIdentifier(m.Object()) { // (1.1) packages use exported named types (unless in package main) g.use(m.Object(), nil, "exported top-level type") } @@ -943,6 +1242,8 @@ func (g *Graph) typ(t types.Type) { for i := 0; i < t.NumMethods(); i++ { meth := g.pkg.Prog.FuncValue(t.Method(i)) g.see(meth) + // don't use trackExportedIdentifier here, we care about + // all exported methods, even in package main or in tests. if meth.Object() != nil && meth.Object().Exported() && !g.wholeProgram { // (2.1) named types use exported methods g.use(meth, t, "exported method") From 95e22af8c8a613ab3fdbd0e8a0b127a56e0764e4 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 9 Apr 2019 15:45:16 +0200 Subject: [PATCH 087/254] Mark entire constant groups as used --- unused2/testdata/src/consts/consts.go | 12 ++++++ unused2/unused.go | 58 ++++++++++++++++++++------- 2 files changed, 56 insertions(+), 14 deletions(-) diff --git a/unused2/testdata/src/consts/consts.go b/unused2/testdata/src/consts/consts.go index f458cb011..8c1076cbd 100644 --- a/unused2/testdata/src/consts/consts.go +++ b/unused2/testdata/src/consts/consts.go @@ -7,6 +7,16 @@ const c3 = 1 const c4 = 1 const C5 = 1 +const ( + c6 = 0 + c7 + c8 + + c9 // MATCH "c9 is unused" + c10 // MATCH "c10 is unused" + c11 // MATCH "c11 is unused" +) + var _ = []int{c3: 1} type T1 struct { @@ -16,6 +26,8 @@ type T1 struct { func init() { _ = []int{c2: 1} var _ [c4]int + + _ = c7 } func Fn() { diff --git a/unused2/unused.go b/unused2/unused.go index 3b9b97a02..6169f175d 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -33,7 +33,6 @@ import ( - (1.7) the main function iff in the main package - (1.8) symbols linked via go:linkname - - named types use: - (2.1) exported methods @@ -97,6 +96,14 @@ import ( - (9.8) runtime functions that may be called from user code via the compiler +- const groups: + (10.1) if one constant out of a block of constants is used, mark all + of them used. a lot of the time, unused constants exist for the sake + of completeness. See also + https://github.com/dominikh/go-tools/issues/365 + + + - Differences in whole program mode: - (e1) all packages share a single graph - (e2) types aim to implement all exported interfaces from all packages @@ -690,6 +697,13 @@ func (g *Graph) color(root *Node) { } } +type ConstGroup struct { + // give the struct a size to get unique pointers + _ byte +} + +func (ConstGroup) String() string { return "const group" } + type Node struct { obj interface{} id int @@ -992,21 +1006,37 @@ func (g *Graph) entry(pkg *lint.Pkg) { g.see(fn) } case *ast.GenDecl: - if n.Tok != token.VAR { - return - } - for _, spec := range n.Specs { - v := spec.(*ast.ValueSpec) - if v.Type == nil { - continue + switch n.Tok { + case token.CONST: + groups := lintdsl.GroupSpecs(g.job, n.Specs) + for _, specs := range groups { + if len(specs) > 1 { + cg := &ConstGroup{} + g.see(cg) + for _, spec := range specs { + for _, name := range spec.(*ast.ValueSpec).Names { + obj := pkg.TypesInfo.ObjectOf(name) + // (10.1) const groups + g.seeAndUse(obj, cg, "const group") + g.use(cg, obj, "const group") + } + } + } } - T := lintdsl.TypeOf(g.job, v.Type) - if fn != nil { - g.seeAndUse(T, fn, "var decl") - } else { - g.seeAndUse(T, nil, "var decl") + case token.VAR: + for _, spec := range n.Specs { + v := spec.(*ast.ValueSpec) + if v.Type == nil { + continue + } + T := lintdsl.TypeOf(g.job, v.Type) + if fn != nil { + g.seeAndUse(T, fn, "var decl") + } else { + g.seeAndUse(T, nil, "var decl") + } + g.typ(T) } - g.typ(T) } default: panic(fmt.Sprintf("unreachable: %T", n)) From 5693068c323a10858f5863901fae78f1c4c75336 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 11:38:36 +0200 Subject: [PATCH 088/254] unused2: use 't1' in 'type t2 t1' --- unused2/testdata/src/named/named.go | 4 ++++ unused2/unused.go | 21 +++++++++++++++++++-- 2 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 unused2/testdata/src/named/named.go diff --git a/unused2/testdata/src/named/named.go b/unused2/testdata/src/named/named.go new file mode 100644 index 000000000..7105f0a0e --- /dev/null +++ b/unused2/testdata/src/named/named.go @@ -0,0 +1,4 @@ +package pkg + +type t1 struct{} +type T2 t1 diff --git a/unused2/unused.go b/unused2/unused.go index 6169f175d..03cca3fbe 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -35,6 +35,7 @@ import ( - named types use: - (2.1) exported methods + - (2.2) the type they're based on - variables and constants use: - their types @@ -922,8 +923,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { for _, obj := range pkg.TypesInfo.Defs { switch obj := obj.(type) { case *types.TypeName: - g.see(obj) - g.typ(obj.Type()) + // types are being handled by walking the AST case *types.Const: g.see(obj) fn := surroundingFunc(obj) @@ -1037,6 +1037,23 @@ func (g *Graph) entry(pkg *lint.Pkg) { } g.typ(T) } + case token.TYPE: + for _, spec := range n.Specs { + // go/types doesn't provide a way to go from a + // types.Named to the named type it was based on + // (the t1 in type t2 t1). Therefore we walk the + // AST and process GenDecls. + // + // (2.2) named types use the type they're based on + v := spec.(*ast.TypeSpec) + T := pkg.TypesInfo.TypeOf(v.Type) + obj := pkg.TypesInfo.ObjectOf(v.Name) + g.see(obj) + g.see(T) + g.use(T, obj, "type") + g.typ(obj.Type()) + g.typ(T) + } } default: panic(fmt.Sprintf("unreachable: %T", n)) From 00fcae904e7911b66f5deefd68959fe81bf813dc Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 12:06:06 +0200 Subject: [PATCH 089/254] unused2: compute method sets less often For unused -exported std, this results in a 40% runtime reduction. --- unused2/implements.go | 5 ++--- unused2/unused.go | 14 ++++++++------ 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/unused2/implements.go b/unused2/implements.go index f3b134503..0457c5760 100644 --- a/unused2/implements.go +++ b/unused2/implements.go @@ -37,7 +37,7 @@ func sameId(obj types.Object, pkg *types.Package, name string) bool { return pkg.Path() == obj.Pkg().Path() } -func (g *Graph) implements(V types.Type, T *types.Interface) ([]*types.Selection, bool) { +func (g *Graph) implements(V types.Type, T *types.Interface, msV *types.MethodSet) ([]*types.Selection, bool) { // fast path for common case if T.Empty() { return nil, true @@ -58,11 +58,10 @@ func (g *Graph) implements(V types.Type, T *types.Interface) ([]*types.Selection } // A concrete type implements T if it implements all methods of T. - ms := g.msCache.MethodSet(V) var sels []*types.Selection for i := 0; i < T.NumMethods(); i++ { m := T.Method(i) - sel := ms.Lookup(m.Pkg(), m.Name()) + sel := msV.Lookup(m.Pkg(), m.Name()) if sel == nil { return nil, false } diff --git a/unused2/unused.go b/unused2/unused.go index 03cca3fbe..9040185a5 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -471,9 +471,10 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages - for _, iface := range ifaces { - for _, t := range notIfaces { - if sels, ok := graph.implements(t, iface); ok { + for _, t := range notIfaces { + ms := graph.msCache.MethodSet(t) + for _, iface := range ifaces { + if sels, ok := graph.implements(t, iface, ms); ok { for _, sel := range sels { graph.useMethod(t, sel, t, "implements") } @@ -1145,9 +1146,10 @@ func (g *Graph) entry(pkg *lint.Pkg) { }) // (8.0) handle interfaces - for _, iface := range ifaces { - for _, t := range notIfaces { - if sels, ok := g.implements(t, iface); ok { + for _, t := range notIfaces { + ms := g.msCache.MethodSet(t) + for _, iface := range ifaces { + if sels, ok := g.implements(t, iface, ms); ok { for _, sel := range sels { g.useMethod(t, sel, t, "implements") } From 21eb5d75c7158b8517b38472504332dc92989b01 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 13:56:43 +0200 Subject: [PATCH 090/254] unused2: track used identifiers across files A single file can be part of multiple packages, for example non-test and test variants. If an identifier is used in one of the packages but not the others, we shouldn't report it as unused. This most often occurs with functions defined in a non-test file but only used in tests. In the future we will want to add a flag to disable this, so that code only used by tests can optionally be flagged. --- unused2/implements.go | 1 + unused2/testdata/src/tests/tests.go | 3 ++ unused2/testdata/src/tests/tests_test.go | 7 +++ unused2/unused.go | 56 +++++++++++++++++------- 4 files changed, 51 insertions(+), 16 deletions(-) create mode 100644 unused2/testdata/src/tests/tests.go create mode 100644 unused2/testdata/src/tests/tests_test.go diff --git a/unused2/implements.go b/unused2/implements.go index 0457c5760..835baac69 100644 --- a/unused2/implements.go +++ b/unused2/implements.go @@ -44,6 +44,7 @@ func (g *Graph) implements(V types.Type, T *types.Interface, msV *types.MethodSe } if ityp, _ := V.Underlying().(*types.Interface); ityp != nil { + // TODO(dh): is this code reachable? for i := 0; i < T.NumMethods(); i++ { m := T.Method(i) _, obj := lookupMethod(ityp, m.Pkg(), m.Name()) diff --git a/unused2/testdata/src/tests/tests.go b/unused2/testdata/src/tests/tests.go new file mode 100644 index 000000000..253de73aa --- /dev/null +++ b/unused2/testdata/src/tests/tests.go @@ -0,0 +1,3 @@ +package pkg + +func fn(){} diff --git a/unused2/testdata/src/tests/tests_test.go b/unused2/testdata/src/tests/tests_test.go new file mode 100644 index 000000000..4025030d5 --- /dev/null +++ b/unused2/testdata/src/tests/tests_test.go @@ -0,0 +1,7 @@ +package pkg + +import "testing" + +func TestFn(t *testing.T) { + fn() +} diff --git a/unused2/unused.go b/unused2/unused.go index 9040185a5..46e479744 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -121,9 +121,10 @@ func assert(b bool) { } } -type Unused struct { +type Object struct { Obj types.Object Position token.Position + seen bool } type Checker struct { @@ -426,7 +427,7 @@ func (c *Checker) debugf(f string, v ...interface{}) { } } -func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { +func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { scopes := map[*types.Scope]*ssa.Function{} for _, fn := range j.Program.InitialFunctions { if fn.Object() != nil { @@ -435,7 +436,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } } - var out []Unused + out := map[token.Position]Object{} processPkgs := func(pkgs ...*lint.Pkg) { graph := NewGraph() graph.wholeProgram = c.WholeProgram @@ -566,6 +567,17 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { report := func(node *Node) { if node.seen { + var pos token.Pos + switch obj := node.obj.(type) { + case types.Object: + pos = obj.Pos() + case *ssa.Function: + pos = obj.Pos() + } + + if pos != 0 { + out[prog.Fset().Position(pos)] = Object{seen: true} + } return } if node.quiet { @@ -612,18 +624,22 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { // don't report unnamed variables (receivers, interface embedding) if obj.Name() != "" || obj.IsField() { pos := prog.Fset().Position(obj.Pos()) - out = append(out, Unused{ - Obj: obj, - Position: pos, - }) + if _, ok := out[pos]; !ok { + out[pos] = Object{ + Obj: obj, + Position: pos, + } + } } case types.Object: if obj.Name() != "_" { pos := prog.Fset().Position(obj.Pos()) - out = append(out, Unused{ - Obj: obj, - Position: pos, - }) + if _, ok := out[pos]; !ok { + out[pos] = Object{ + Obj: obj, + Position: pos, + } + } } case *ssa.Function: if obj == nil { @@ -635,10 +651,12 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { return } pos := prog.Fset().Position(obj.Pos()) - out = append(out, Unused{ - Obj: obj.Object(), - Position: pos, - }) + if _, ok := out[pos]; !ok { + out[pos] = Object{ + Obj: obj.Object(), + Position: pos, + } + } default: c.debugf("n%d [color=gray];\n", node.id) } @@ -659,7 +677,13 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { processPkgs(pkg) } } - return out + out2 := make([]Object, 0, len(out)) + for _, v := range out { + if !v.seen { + out2 = append(out2, v) + } + } + return out2 } type Graph struct { From 01f6a5687f668c267681f83eaa9c06ab9febc1eb Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 14:03:02 +0200 Subject: [PATCH 091/254] unused2: remove unused Object.Position field --- unused2/unused.go | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 46e479744..37d2d76f7 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -122,9 +122,8 @@ func assert(b bool) { } type Object struct { - Obj types.Object - Position token.Position - seen bool + Obj types.Object + seen bool } type Checker struct { @@ -626,8 +625,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { pos := prog.Fset().Position(obj.Pos()) if _, ok := out[pos]; !ok { out[pos] = Object{ - Obj: obj, - Position: pos, + Obj: obj, } } } @@ -636,8 +634,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { pos := prog.Fset().Position(obj.Pos()) if _, ok := out[pos]; !ok { out[pos] = Object{ - Obj: obj, - Position: pos, + Obj: obj, } } } @@ -653,8 +650,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { pos := prog.Fset().Position(obj.Pos()) if _, ok := out[pos]; !ok { out[pos] = Object{ - Obj: obj.Object(), - Position: pos, + Obj: obj.Object(), } } default: From b9c4172a9aca45a14aaa2a0045596d135b60f206 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 14:07:33 +0200 Subject: [PATCH 092/254] unused2: simplify tracking of objects seen in files --- unused2/unused.go | 43 +++++++++++++++++-------------------------- 1 file changed, 17 insertions(+), 26 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 37d2d76f7..4affaed0c 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -121,9 +121,8 @@ func assert(b bool) { } } -type Object struct { - Obj types.Object - seen bool +type Unused struct { + Obj types.Object } type Checker struct { @@ -426,7 +425,7 @@ func (c *Checker) debugf(f string, v ...interface{}) { } } -func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { +func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { scopes := map[*types.Scope]*ssa.Function{} for _, fn := range j.Program.InitialFunctions { if fn.Object() != nil { @@ -435,7 +434,8 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { } } - out := map[token.Position]Object{} + seen := map[token.Position]struct{}{} + var out []Unused processPkgs := func(pkgs ...*lint.Pkg) { graph := NewGraph() graph.wholeProgram = c.WholeProgram @@ -575,7 +575,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { } if pos != 0 { - out[prog.Fset().Position(pos)] = Object{seen: true} + seen[prog.Fset().Position(pos)] = struct{}{} } return } @@ -622,21 +622,15 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { case *types.Var: // don't report unnamed variables (receivers, interface embedding) if obj.Name() != "" || obj.IsField() { - pos := prog.Fset().Position(obj.Pos()) - if _, ok := out[pos]; !ok { - out[pos] = Object{ - Obj: obj, - } - } + out = append(out, Unused{ + Obj: obj, + }) } case types.Object: if obj.Name() != "_" { - pos := prog.Fset().Position(obj.Pos()) - if _, ok := out[pos]; !ok { - out[pos] = Object{ - Obj: obj, - } - } + out = append(out, Unused{ + Obj: obj, + }) } case *ssa.Function: if obj == nil { @@ -647,12 +641,9 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { // Closures return } - pos := prog.Fset().Position(obj.Pos()) - if _, ok := out[pos]; !ok { - out[pos] = Object{ - Obj: obj.Object(), - } - } + out = append(out, Unused{ + Obj: obj.Object(), + }) default: c.debugf("n%d [color=gray];\n", node.id) } @@ -673,9 +664,9 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Object { processPkgs(pkg) } } - out2 := make([]Object, 0, len(out)) + out2 := make([]Unused, 0, len(out)) for _, v := range out { - if !v.seen { + if _, ok := seen[prog.Fset().Position(v.Obj.Pos())]; !ok { out2 = append(out2, v) } } From 1cc80a7a71ad0c1894bc503ab5ee753331aac85f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 14:34:49 +0200 Subject: [PATCH 093/254] unused2: remove Unused type --- unused2/unused.go | 34 ++++++++++++---------------------- 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/unused2/unused.go b/unused2/unused.go index 4affaed0c..bf417356c 100644 --- a/unused2/unused.go +++ b/unused2/unused.go @@ -121,10 +121,6 @@ func assert(b bool) { } } -type Unused struct { - Obj types.Object -} - type Checker struct { WholeProgram bool Debug io.Writer @@ -403,19 +399,19 @@ var runtimeFuncs = map[string]bool{ func (c *Checker) Lint(j *lint.Job) { unused := c.Check(j.Program, j) for _, u := range unused { - name := u.Obj.Name() - if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil { + name := u.Name() + if sig, ok := u.Type().(*types.Signature); ok && sig.Recv() != nil { switch sig.Recv().Type().(type) { case *types.Named, *types.Pointer: typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) if len(typ) > 0 && typ[0] == '*' { - name = fmt.Sprintf("(%s).%s", typ, u.Obj.Name()) + name = fmt.Sprintf("(%s).%s", typ, u.Name()) } else if len(typ) > 0 { - name = fmt.Sprintf("%s.%s", typ, u.Obj.Name()) + name = fmt.Sprintf("%s.%s", typ, u.Name()) } } } - j.Errorf(u.Obj, "%s %s is unused", typString(u.Obj), name) + j.Errorf(u, "%s %s is unused", typString(u), name) } } @@ -425,7 +421,7 @@ func (c *Checker) debugf(f string, v ...interface{}) { } } -func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { +func (c *Checker) Check(prog *lint.Program, j *lint.Job) []types.Object { scopes := map[*types.Scope]*ssa.Function{} for _, fn := range j.Program.InitialFunctions { if fn.Object() != nil { @@ -435,7 +431,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { } seen := map[token.Position]struct{}{} - var out []Unused + var out []types.Object processPkgs := func(pkgs ...*lint.Pkg) { graph := NewGraph() graph.wholeProgram = c.WholeProgram @@ -622,15 +618,11 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { case *types.Var: // don't report unnamed variables (receivers, interface embedding) if obj.Name() != "" || obj.IsField() { - out = append(out, Unused{ - Obj: obj, - }) + out = append(out, obj) } case types.Object: if obj.Name() != "_" { - out = append(out, Unused{ - Obj: obj, - }) + out = append(out, obj) } case *ssa.Function: if obj == nil { @@ -641,9 +633,7 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { // Closures return } - out = append(out, Unused{ - Obj: obj.Object(), - }) + out = append(out, obj.Object()) default: c.debugf("n%d [color=gray];\n", node.id) } @@ -664,9 +654,9 @@ func (c *Checker) Check(prog *lint.Program, j *lint.Job) []Unused { processPkgs(pkg) } } - out2 := make([]Unused, 0, len(out)) + out2 := make([]types.Object, 0, len(out)) for _, v := range out { - if _, ok := seen[prog.Fset().Position(v.Obj.Pos())]; !ok { + if _, ok := seen[prog.Fset().Position(v.Pos())]; !ok { out2 = append(out2, v) } } From f15110a4cedf8a2493d429226df561941043c2b1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 14:40:11 +0200 Subject: [PATCH 094/254] unused: delete --- cmd/megacheck/megacheck.go | 23 +- unused/implements.go | 79 -- unused/testdata/src/anonymous/anonymous.go | 18 - unused/testdata/src/blank/blank.go | 26 - unused/testdata/src/cgo/cgo.go | 6 - unused/testdata/src/consts/consts.go | 18 - unused/testdata/src/conversion/conversion.go | 92 -- unused/testdata/src/cyclic/cyclic.go | 9 - unused/testdata/src/elem/elem.go | 16 - .../src/embedded_call/embedded_call.go | 21 - unused/testdata/src/embedding/embedding.go | 58 - .../src/exported_fields/exported_fields.go | 36 - .../exported_fields_main.go | 14 - .../exported_method_test/exported_method.go | 1 - .../exported_method_test.go | 28 - unused/testdata/src/fields/fields.go | 68 - unused/testdata/src/functions/functions.go | 36 - unused/testdata/src/generated1/generated1.go | 5 - unused/testdata/src/generated2/generated2.go | 5 - unused/testdata/src/interfaces/interfaces.go | 15 - unused/testdata/src/main/main.go | 14 - unused/testdata/src/mapslice/mapslice.go | 8 - unused/testdata/src/methods/methods.go | 14 - unused/testdata/src/nested/nested.go | 17 - .../testdata/src/nocopy-main/nocopy-main.go | 24 - unused/testdata/src/nocopy/nocopy.go | 20 - unused/testdata/src/only_in_test/pkg.go | 3 - unused/testdata/src/only_in_test/pkg_test.go | 7 - .../pointer-type-embedding.go | 17 - unused/testdata/src/selectors/selectors.go | 14 - .../src/switch_interface/switch_interface.go | 19 - .../src/unused-argument/unused-argument.go | 10 - .../testdata/src/unused_type/unused_type.go | 17 - unused/unused.go | 1101 ----------------- unused/unused_test.go | 19 - 35 files changed, 3 insertions(+), 1875 deletions(-) delete mode 100644 unused/implements.go delete mode 100644 unused/testdata/src/anonymous/anonymous.go delete mode 100644 unused/testdata/src/blank/blank.go delete mode 100644 unused/testdata/src/cgo/cgo.go delete mode 100644 unused/testdata/src/consts/consts.go delete mode 100644 unused/testdata/src/conversion/conversion.go delete mode 100644 unused/testdata/src/cyclic/cyclic.go delete mode 100644 unused/testdata/src/elem/elem.go delete mode 100644 unused/testdata/src/embedded_call/embedded_call.go delete mode 100644 unused/testdata/src/embedding/embedding.go delete mode 100644 unused/testdata/src/exported_fields/exported_fields.go delete mode 100644 unused/testdata/src/exported_fields_main/exported_fields_main.go delete mode 100644 unused/testdata/src/exported_method_test/exported_method.go delete mode 100644 unused/testdata/src/exported_method_test/exported_method_test.go delete mode 100644 unused/testdata/src/fields/fields.go delete mode 100644 unused/testdata/src/functions/functions.go delete mode 100644 unused/testdata/src/generated1/generated1.go delete mode 100644 unused/testdata/src/generated2/generated2.go delete mode 100644 unused/testdata/src/interfaces/interfaces.go delete mode 100644 unused/testdata/src/main/main.go delete mode 100644 unused/testdata/src/mapslice/mapslice.go delete mode 100644 unused/testdata/src/methods/methods.go delete mode 100644 unused/testdata/src/nested/nested.go delete mode 100644 unused/testdata/src/nocopy-main/nocopy-main.go delete mode 100644 unused/testdata/src/nocopy/nocopy.go delete mode 100644 unused/testdata/src/only_in_test/pkg.go delete mode 100644 unused/testdata/src/only_in_test/pkg_test.go delete mode 100644 unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go delete mode 100644 unused/testdata/src/selectors/selectors.go delete mode 100644 unused/testdata/src/switch_interface/switch_interface.go delete mode 100644 unused/testdata/src/unused-argument/unused-argument.go delete mode 100644 unused/testdata/src/unused_type/unused_type.go delete mode 100644 unused/unused.go delete mode 100644 unused/unused_test.go diff --git a/cmd/megacheck/megacheck.go b/cmd/megacheck/megacheck.go index f037dd83a..8bd00b978 100644 --- a/cmd/megacheck/megacheck.go +++ b/cmd/megacheck/megacheck.go @@ -9,7 +9,7 @@ import ( "honnef.co/go/tools/lint/lintutil" "honnef.co/go/tools/simple" "honnef.co/go/tools/staticcheck" - "honnef.co/go/tools/unused" + "honnef.co/go/tools/unused2" ) func main() { @@ -84,26 +84,9 @@ func main() { } if flags.unused.enabled { - var mode unused.CheckMode - if flags.unused.constants { - mode |= unused.CheckConstants - } - if flags.unused.fields { - mode |= unused.CheckFields - } - if flags.unused.functions { - mode |= unused.CheckFunctions - } - if flags.unused.types { - mode |= unused.CheckTypes - } - if flags.unused.variables { - mode |= unused.CheckVariables - } - uc := unused.NewChecker(mode) + uc := &unused.Checker{} uc.WholeProgram = flags.unused.wholeProgram - uc.ConsiderReflection = flags.unused.reflection - checkers = append(checkers, unused.NewLintChecker(uc)) + checkers = append(checkers, uc) } lintutil.ProcessFlagSet(checkers, fs) diff --git a/unused/implements.go b/unused/implements.go deleted file mode 100644 index 78a545639..000000000 --- a/unused/implements.go +++ /dev/null @@ -1,79 +0,0 @@ -package unused - -import "go/types" - -// lookupMethod returns the index of and method with matching package and name, or (-1, nil). -func lookupMethod(T *types.Interface, pkg *types.Package, name string) (int, *types.Func) { - if name != "_" { - for i := 0; i < T.NumMethods(); i++ { - m := T.Method(i) - if sameId(m, pkg, name) { - return i, m - } - } - } - return -1, nil -} - -func sameId(obj types.Object, pkg *types.Package, name string) bool { - // spec: - // "Two identifiers are different if they are spelled differently, - // or if they appear in different packages and are not exported. - // Otherwise, they are the same." - if name != obj.Name() { - return false - } - // obj.Name == name - if obj.Exported() { - return true - } - // not exported, so packages must be the same (pkg == nil for - // fields in Universe scope; this can only happen for types - // introduced via Eval) - if pkg == nil || obj.Pkg() == nil { - return pkg == obj.Pkg() - } - // pkg != nil && obj.pkg != nil - return pkg.Path() == obj.Pkg().Path() -} - -func (c *Checker) implements(V types.Type, T *types.Interface) bool { - // fast path for common case - if T.Empty() { - return true - } - - if ityp, _ := V.Underlying().(*types.Interface); ityp != nil { - for i := 0; i < T.NumMethods(); i++ { - m := T.Method(i) - _, obj := lookupMethod(ityp, m.Pkg(), m.Name()) - switch { - case obj == nil: - return false - case !types.Identical(obj.Type(), m.Type()): - return false - } - } - return true - } - - // A concrete type implements T if it implements all methods of T. - ms := c.msCache.MethodSet(V) - for i := 0; i < T.NumMethods(); i++ { - m := T.Method(i) - sel := ms.Lookup(m.Pkg(), m.Name()) - if sel == nil { - return false - } - - f, _ := sel.Obj().(*types.Func) - if f == nil { - return false - } - - if !types.Identical(f.Type(), m.Type()) { - return false - } - } - return true -} diff --git a/unused/testdata/src/anonymous/anonymous.go b/unused/testdata/src/anonymous/anonymous.go deleted file mode 100644 index d0577a737..000000000 --- a/unused/testdata/src/anonymous/anonymous.go +++ /dev/null @@ -1,18 +0,0 @@ -package pkg - -import "fmt" - -type Node interface { - position() int -} - -type noder struct{} - -func (noder) position() int { panic("unreachable") } - -func Fn() { - nodes := []Node{struct { - noder - }{}} - fmt.Println(nodes) -} diff --git a/unused/testdata/src/blank/blank.go b/unused/testdata/src/blank/blank.go deleted file mode 100644 index d220bdff3..000000000 --- a/unused/testdata/src/blank/blank.go +++ /dev/null @@ -1,26 +0,0 @@ -package pkg - -import _ "fmt" - -type t1 struct{} // MATCH /t1 is unused/ -type t2 struct{} -type t3 struct{} - -var _ = t2{} - -func fn1() { // MATCH /fn1 is unused/ - _ = t1{} - var _ = t1{} -} - -func fn2() { - _ = t3{} -} - -func init() { - fn2() -} - -func _() {} - -type _ struct{} diff --git a/unused/testdata/src/cgo/cgo.go b/unused/testdata/src/cgo/cgo.go deleted file mode 100644 index 6b484f820..000000000 --- a/unused/testdata/src/cgo/cgo.go +++ /dev/null @@ -1,6 +0,0 @@ -package pkg - -//go:cgo_export_dynamic -func foo() {} - -func bar() {} // MATCH /bar is unused/ diff --git a/unused/testdata/src/consts/consts.go b/unused/testdata/src/consts/consts.go deleted file mode 100644 index 6906f1e38..000000000 --- a/unused/testdata/src/consts/consts.go +++ /dev/null @@ -1,18 +0,0 @@ -package pkg - -const c1 = 1 - -const c2 = 1 -const c3 = 1 -const c4 = 1 - -var _ = []int{c3: 1} - -type T1 struct { - F1 [c1]int -} - -func init() { - _ = []int{c2: 1} - var _ [c4]int -} diff --git a/unused/testdata/src/conversion/conversion.go b/unused/testdata/src/conversion/conversion.go deleted file mode 100644 index afeb1f7a1..000000000 --- a/unused/testdata/src/conversion/conversion.go +++ /dev/null @@ -1,92 +0,0 @@ -package pkg - -import ( - "compress/flate" - "unsafe" -) - -type t1 struct { - a int - b int -} - -type t2 struct { - a int - b int -} - -type t3 struct { - a int - b int // MATCH /b is unused/ -} - -type t4 struct { - a int - b int // MATCH /b is unused/ -} - -type t5 struct { - a int - b int -} - -type t6 struct { - a int - b int -} - -type t7 struct { - a int - b int -} - -type t8 struct { - a int - b int -} - -type t9 struct { - Offset int64 - Err error -} - -type t10 struct { - a int - b int -} - -func fn() { - // All fields in t2 used because they're initialised in t1 - v1 := t1{0, 1} - v2 := t2(v1) - _ = v2 - - // Field b isn't used by anyone - v3 := t3{} - v4 := t4(v3) - println(v3.a) - _ = v4 - - // Both fields are used - v5 := t5{} - v6 := t6(v5) - println(v5.a) - println(v6.b) - - v7 := &t7{} - println(v7.a) - println(v7.b) - v8 := (*t8)(v7) - _ = v8 - - vb := flate.ReadError{} - v9 := t9(vb) - _ = v9 - - // All fields are used because this is an unsafe conversion - var b []byte - v10 := (*t10)(unsafe.Pointer(&b[0])) - _ = v10 -} - -func init() { fn() } diff --git a/unused/testdata/src/cyclic/cyclic.go b/unused/testdata/src/cyclic/cyclic.go deleted file mode 100644 index 8601c24f2..000000000 --- a/unused/testdata/src/cyclic/cyclic.go +++ /dev/null @@ -1,9 +0,0 @@ -package pkg - -func a() { // MATCH /a is unused/ - b() -} - -func b() { // MATCH /b is unused/ - a() -} diff --git a/unused/testdata/src/elem/elem.go b/unused/testdata/src/elem/elem.go deleted file mode 100644 index 24cbf03cc..000000000 --- a/unused/testdata/src/elem/elem.go +++ /dev/null @@ -1,16 +0,0 @@ -// Test of field usage detection - -package pkg - -type t15 struct{ f151 int } -type a2 [1]t15 - -type t16 struct{} -type a3 [1][1]t16 - -func foo() { - _ = a2{0: {1}} - _ = a3{{{}}} -} - -func init() { foo() } diff --git a/unused/testdata/src/embedded_call/embedded_call.go b/unused/testdata/src/embedded_call/embedded_call.go deleted file mode 100644 index 196ac0dec..000000000 --- a/unused/testdata/src/embedded_call/embedded_call.go +++ /dev/null @@ -1,21 +0,0 @@ -package pkg - -var t1 struct { - t2 - t3 - t4 -} - -type t2 struct{} -type t3 struct{} -type t4 struct{ t5 } -type t5 struct{} - -func (t2) foo() {} -func (t3) bar() {} -func (t5) baz() {} -func init() { - t1.foo() - _ = t1.bar - t1.baz() -} diff --git a/unused/testdata/src/embedding/embedding.go b/unused/testdata/src/embedding/embedding.go deleted file mode 100644 index af495c8b1..000000000 --- a/unused/testdata/src/embedding/embedding.go +++ /dev/null @@ -1,58 +0,0 @@ -package pkg - -type I interface { - f1() - f2() -} - -func init() { - var _ I -} - -type t1 struct{} -type T2 struct{ t1 } - -func (t1) f1() {} -func (T2) f2() {} - -func Fn() { - var v T2 - _ = v.t1 -} - -type I2 interface { - f3() - f4() -} - -type t3 struct{} -type t4 struct { - x int // MATCH /x is unused/ - y int // MATCH /y is unused/ - t3 -} - -func (*t3) f3() {} -func (*t4) f4() {} - -func init() { - var i I2 = &t4{} - i.f3() - i.f4() -} - -type i3 interface { - F() -} - -type I4 interface { - i3 -} - -type T5 struct { - t6 -} - -type t6 struct { - F int -} diff --git a/unused/testdata/src/exported_fields/exported_fields.go b/unused/testdata/src/exported_fields/exported_fields.go deleted file mode 100644 index 64686ccab..000000000 --- a/unused/testdata/src/exported_fields/exported_fields.go +++ /dev/null @@ -1,36 +0,0 @@ -package pkg - -type t1 struct { - F1 int -} - -type T2 struct { - F2 int -} - -var v struct { - T3 -} - -type T3 struct{} - -func (T3) Foo() {} - -func init() { - v.Foo() -} - -func init() { - _ = t1{} -} - -type codeResponse struct { - Tree *codeNode `json:"tree"` -} - -type codeNode struct { -} - -func init() { - var _ codeResponse -} diff --git a/unused/testdata/src/exported_fields_main/exported_fields_main.go b/unused/testdata/src/exported_fields_main/exported_fields_main.go deleted file mode 100644 index ffb99d990..000000000 --- a/unused/testdata/src/exported_fields_main/exported_fields_main.go +++ /dev/null @@ -1,14 +0,0 @@ -package main - -type t1 struct { - F1 int -} - -type T2 struct { - F2 int -} - -func init() { - _ = t1{} - _ = T2{} -} diff --git a/unused/testdata/src/exported_method_test/exported_method.go b/unused/testdata/src/exported_method_test/exported_method.go deleted file mode 100644 index c1caffeb1..000000000 --- a/unused/testdata/src/exported_method_test/exported_method.go +++ /dev/null @@ -1 +0,0 @@ -package pkg diff --git a/unused/testdata/src/exported_method_test/exported_method_test.go b/unused/testdata/src/exported_method_test/exported_method_test.go deleted file mode 100644 index d59c2f52a..000000000 --- a/unused/testdata/src/exported_method_test/exported_method_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package pkg - -import ( - "bytes" - "io" - "io/ioutil" - "testing" -) - -type countReadSeeker struct { - io.ReadSeeker - N int64 -} - -func (rs *countReadSeeker) Read(buf []byte) (int, error) { - n, err := rs.ReadSeeker.Read(buf) - rs.N += int64(n) - return n, err -} - -func TestFoo(t *testing.T) { - r := bytes.NewReader([]byte("Hello, world!")) - cr := &countReadSeeker{ReadSeeker: r} - ioutil.ReadAll(cr) - if cr.N != 13 { - t.Errorf("got %d, want 13", cr.N) - } -} diff --git a/unused/testdata/src/fields/fields.go b/unused/testdata/src/fields/fields.go deleted file mode 100644 index feb5ea022..000000000 --- a/unused/testdata/src/fields/fields.go +++ /dev/null @@ -1,68 +0,0 @@ -// Test of field usage detection - -package pkg - -type t1 struct{ f11, f12 int } -type t2 struct{ f21, f22 int } -type t3 struct{ f31 t4 } -type t4 struct{ f41 int } -type t5 struct{ f51 int } -type t6 struct{ f61 int } -type t7 struct{ f71 int } -type m1 map[string]t7 -type t8 struct{ f81 int } -type t9 struct{ f91 int } -type t10 struct{ f101 int } -type t11 struct{ f111 int } -type s1 []t11 -type t12 struct{ f121 int } -type s2 []t12 -type t13 struct{ f131 int } -type t14 struct{ f141 int } -type a1 [1]t14 -type t15 struct{ f151 int } -type a2 [1]t15 -type t16 struct{ f161 int } -type t17 struct{ f171, f172 int } // MATCH /t17 is unused/ -// MATCH:28 /f183 is unused/ -type t18 struct{ f181, f182, f183 int } // MATCH /f182 is unused/ - -type t19 struct{ f191 int } -type m2 map[string]t19 - -type t20 struct{ f201 int } -type m3 map[string]t20 - -type t21 struct{ f211, f212 int } // MATCH /f211 is unused/ - -func foo() { - _ = t10{1} - _ = t21{f212: 1} - _ = []t1{{1, 2}} - _ = t2{1, 2} - _ = []struct{ a int }{{1}} - - // XXX - // _ = []struct{ foo struct{ bar int } }{{struct{ bar int }{1}}} - - _ = []t1{t1{1, 2}} - _ = []t3{{t4{1}}} - _ = map[string]t5{"a": {1}} - _ = map[t6]string{{1}: "a"} - _ = m1{"a": {1}} - _ = map[t8]t8{{}: {1}} - _ = map[t9]t9{{1}: {}} - _ = s1{{1}} - _ = s2{2: {1}} - _ = [...]t13{{1}} - _ = a1{{1}} - _ = a2{0: {1}} - _ = map[[1]t16]int{{{1}}: 1} - y := struct{ x int }{} // MATCH /x is unused/ - _ = y - _ = t18{f181: 1} - _ = []m2{{"a": {1}}} - _ = [][]m3{{{"a": {1}}}} -} - -func init() { foo() } diff --git a/unused/testdata/src/functions/functions.go b/unused/testdata/src/functions/functions.go deleted file mode 100644 index 80b5b5d0b..000000000 --- a/unused/testdata/src/functions/functions.go +++ /dev/null @@ -1,36 +0,0 @@ -package main - -type state func() state - -func a() state { - return a -} - -func main() { - st := a - _ = st() -} - -type t1 struct{} // MATCH /t1 is unused/ -type t2 struct{} -type t3 struct{} - -func fn1() t1 { return t1{} } // MATCH /fn1 is unused/ -func fn2() (x t2) { return } -func fn3() *t3 { return nil } - -func fn4() { - const x = 1 - const y = 2 // MATCH /y is unused/ - type foo int // MATCH /foo is unused/ - type bar int - - _ = x - var _ bar -} - -func init() { - fn2() - fn3() - fn4() -} diff --git a/unused/testdata/src/generated1/generated1.go b/unused/testdata/src/generated1/generated1.go deleted file mode 100644 index 1a8ca55f6..000000000 --- a/unused/testdata/src/generated1/generated1.go +++ /dev/null @@ -1,5 +0,0 @@ -// Code generated by a clever monkey; DO NOT EDIT. - -package pkg - -type t struct{} diff --git a/unused/testdata/src/generated2/generated2.go b/unused/testdata/src/generated2/generated2.go deleted file mode 100644 index 17d736ee1..000000000 --- a/unused/testdata/src/generated2/generated2.go +++ /dev/null @@ -1,5 +0,0 @@ -// Code generated by a bunch of monkeys with typewriters and RSI, DO NOT EDIT. - -package pkg - -type t struct{} diff --git a/unused/testdata/src/interfaces/interfaces.go b/unused/testdata/src/interfaces/interfaces.go deleted file mode 100644 index e810549a7..000000000 --- a/unused/testdata/src/interfaces/interfaces.go +++ /dev/null @@ -1,15 +0,0 @@ -package pkg - -type I interface { - fn1() -} - -type t struct{} - -func (t) fn1() {} -func (t) fn2() {} // MATCH /fn2 is unused/ - -func init() { - var _ I - var _ t -} diff --git a/unused/testdata/src/main/main.go b/unused/testdata/src/main/main.go deleted file mode 100644 index ab000fc79..000000000 --- a/unused/testdata/src/main/main.go +++ /dev/null @@ -1,14 +0,0 @@ -package main - -func Fn1() {} -func Fn2() {} // MATCH /Fn2 is unused/ - -const X = 1 // MATCH /X is unused/ - -var Y = 2 // MATCH /Y is unused/ - -type Z struct{} // MATCH /Z is unused/ - -func main() { - Fn1() -} diff --git a/unused/testdata/src/mapslice/mapslice.go b/unused/testdata/src/mapslice/mapslice.go deleted file mode 100644 index 2769b2c21..000000000 --- a/unused/testdata/src/mapslice/mapslice.go +++ /dev/null @@ -1,8 +0,0 @@ -package pkg - -type M map[int]int - -func Fn() { - var n M - _ = []M{n} -} diff --git a/unused/testdata/src/methods/methods.go b/unused/testdata/src/methods/methods.go deleted file mode 100644 index 17673addd..000000000 --- a/unused/testdata/src/methods/methods.go +++ /dev/null @@ -1,14 +0,0 @@ -package pkg - -type t1 struct{} -type t2 struct{ t3 } -type t3 struct{} - -func (t1) Foo() {} -func (t3) Foo() {} -func (t3) foo() {} // MATCH /foo is unused/ - -func init() { - _ = t1{} - _ = t2{} -} diff --git a/unused/testdata/src/nested/nested.go b/unused/testdata/src/nested/nested.go deleted file mode 100644 index ade2c0dbb..000000000 --- a/unused/testdata/src/nested/nested.go +++ /dev/null @@ -1,17 +0,0 @@ -package pkg - -type t struct{} // MATCH /t is unused/ - -func (t) fragment() {} - -func fn() bool { // MATCH /fn is unused/ - var v interface{} = t{} - switch obj := v.(type) { - // XXX it shouldn't report fragment(), because fn is unused - case interface { - fragment() // MATCH /fragment is unused/ - }: - obj.fragment() - } - return false -} diff --git a/unused/testdata/src/nocopy-main/nocopy-main.go b/unused/testdata/src/nocopy-main/nocopy-main.go deleted file mode 100644 index 4fefb5071..000000000 --- a/unused/testdata/src/nocopy-main/nocopy-main.go +++ /dev/null @@ -1,24 +0,0 @@ -package main - -type myNoCopy1 struct{} -type myNoCopy2 struct{} -type locker struct{} // MATCH "locker is unused" -type someStruct struct{ x int } // MATCH "someStruct is unused" - -func (myNoCopy1) Lock() {} -func (recv myNoCopy2) Lock() {} -func (locker) Lock() {} -func (locker) Unlock() {} -func (someStruct) Lock() {} - -type T struct { - noCopy1 myNoCopy1 - noCopy2 myNoCopy2 - field1 someStruct // MATCH "field1 is unused" - field2 locker // MATCH "field2 is unused" - field3 int // MATCH "field3 is unused" -} - -func main() { - _ = T{} -} diff --git a/unused/testdata/src/nocopy/nocopy.go b/unused/testdata/src/nocopy/nocopy.go deleted file mode 100644 index 156edf50c..000000000 --- a/unused/testdata/src/nocopy/nocopy.go +++ /dev/null @@ -1,20 +0,0 @@ -package bar - -type myNoCopy1 struct{} -type myNoCopy2 struct{} -type locker struct{} // MATCH "locker is unused" -type someStruct struct{ x int } // MATCH "someStruct is unused" - -func (myNoCopy1) Lock() {} -func (recv myNoCopy2) Lock() {} -func (locker) Lock() {} -func (locker) Unlock() {} -func (someStruct) Lock() {} - -type T struct { - noCopy1 myNoCopy1 - noCopy2 myNoCopy2 - field1 someStruct // MATCH "field1 is unused" - field2 locker // MATCH "field2 is unused" - field3 int // MATCH "field3 is unused" -} diff --git a/unused/testdata/src/only_in_test/pkg.go b/unused/testdata/src/only_in_test/pkg.go deleted file mode 100644 index ca2d5b3cd..000000000 --- a/unused/testdata/src/only_in_test/pkg.go +++ /dev/null @@ -1,3 +0,0 @@ -package pkg - -func fn() {} diff --git a/unused/testdata/src/only_in_test/pkg_test.go b/unused/testdata/src/only_in_test/pkg_test.go deleted file mode 100644 index 99fdfd753..000000000 --- a/unused/testdata/src/only_in_test/pkg_test.go +++ /dev/null @@ -1,7 +0,0 @@ -package pkg - -import "testing" - -func TestPkg(t *testing.T) { - fn() -} diff --git a/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go b/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go deleted file mode 100644 index 648b7d6f0..000000000 --- a/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go +++ /dev/null @@ -1,17 +0,0 @@ -package pkg - -func init() { - var p P - _ = p.n -} - -type T0 struct { - m int // MATCH /m is unused/ - n int -} - -type T1 struct { - T0 -} - -type P *T1 diff --git a/unused/testdata/src/selectors/selectors.go b/unused/testdata/src/selectors/selectors.go deleted file mode 100644 index 9ab337888..000000000 --- a/unused/testdata/src/selectors/selectors.go +++ /dev/null @@ -1,14 +0,0 @@ -package pkg - -type t struct { - f int -} - -func fn(v *t) { - println(v.f) -} - -func init() { - var v t - fn(&v) -} diff --git a/unused/testdata/src/switch_interface/switch_interface.go b/unused/testdata/src/switch_interface/switch_interface.go deleted file mode 100644 index 99c2ce858..000000000 --- a/unused/testdata/src/switch_interface/switch_interface.go +++ /dev/null @@ -1,19 +0,0 @@ -package pkg - -type t struct{} - -func (t) fragment() {} - -func fn() bool { - var v interface{} = t{} - switch obj := v.(type) { - case interface { - fragment() - }: - obj.fragment() - } - return false -} - -var x = fn() -var _ = x diff --git a/unused/testdata/src/unused-argument/unused-argument.go b/unused/testdata/src/unused-argument/unused-argument.go deleted file mode 100644 index 423592692..000000000 --- a/unused/testdata/src/unused-argument/unused-argument.go +++ /dev/null @@ -1,10 +0,0 @@ -package main - -type t1 struct{} -type t2 struct{} - -func (t1) foo(arg *t2) {} - -func init() { - t1{}.foo(nil) -} diff --git a/unused/testdata/src/unused_type/unused_type.go b/unused/testdata/src/unused_type/unused_type.go deleted file mode 100644 index eabfce4b5..000000000 --- a/unused/testdata/src/unused_type/unused_type.go +++ /dev/null @@ -1,17 +0,0 @@ -package pkg - -type t1 struct{} // MATCH /t1 is unused/ - -func (t1) Fn() {} - -type t2 struct{} - -func (*t2) Fn() {} - -func init() { - (*t2).Fn(nil) -} - -type t3 struct{} // MATCH /t3 is unused/ - -func (t3) fn() diff --git a/unused/unused.go b/unused/unused.go deleted file mode 100644 index e962c16bb..000000000 --- a/unused/unused.go +++ /dev/null @@ -1,1101 +0,0 @@ -package unused // import "honnef.co/go/tools/unused" - -import ( - "fmt" - "go/ast" - "go/token" - "go/types" - "io" - "path/filepath" - "strings" - - "honnef.co/go/tools/lint" - . "honnef.co/go/tools/lint/lintdsl" - - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/types/typeutil" -) - -func NewLintChecker(c *Checker) *LintChecker { - l := &LintChecker{ - c: c, - } - return l -} - -type LintChecker struct { - c *Checker -} - -func (*LintChecker) Name() string { return "unused" } -func (*LintChecker) Prefix() string { return "U" } - -func (l *LintChecker) Init(*lint.Program) {} -func (l *LintChecker) Checks() []lint.Check { - return []lint.Check{ - {ID: "U1000", FilterGenerated: true, Fn: l.Lint}, - } -} - -func typString(obj types.Object) string { - switch obj := obj.(type) { - case *types.Func: - return "func" - case *types.Var: - if obj.IsField() { - return "field" - } - return "var" - case *types.Const: - return "const" - case *types.TypeName: - return "type" - default: - // log.Printf("%T", obj) - return "identifier" - } -} - -func (l *LintChecker) Lint(j *lint.Job) { - unused := l.c.Check(j.Program) - for _, u := range unused { - name := u.Obj.Name() - if sig, ok := u.Obj.Type().(*types.Signature); ok && sig.Recv() != nil { - switch sig.Recv().Type().(type) { - case *types.Named, *types.Pointer: - typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) - if len(typ) > 0 && typ[0] == '*' { - name = fmt.Sprintf("(%s).%s", typ, u.Obj.Name()) - } else if len(typ) > 0 { - name = fmt.Sprintf("%s.%s", typ, u.Obj.Name()) - } - } - } - j.Errorf(u.Obj, "%s %s is unused", typString(u.Obj), name) - } -} - -type graph struct { - roots []*graphNode - nodes map[interface{}]*graphNode -} - -func (g *graph) markUsedBy(obj, usedBy interface{}) { - objNode := g.getNode(obj) - usedByNode := g.getNode(usedBy) - if objNode.obj == usedByNode.obj { - return - } - usedByNode.uses[objNode] = struct{}{} -} - -var labelCounter = 1 - -func (g *graph) getNode(obj interface{}) *graphNode { - for { - if pt, ok := obj.(*types.Pointer); ok { - obj = pt.Elem() - } else { - break - } - } - _, ok := g.nodes[obj] - if !ok { - g.addObj(obj) - } - - return g.nodes[obj] -} - -func (g *graph) addObj(obj interface{}) { - if pt, ok := obj.(*types.Pointer); ok { - obj = pt.Elem() - } - node := &graphNode{obj: obj, uses: make(map[*graphNode]struct{}), n: labelCounter} - g.nodes[obj] = node - labelCounter++ - - if obj, ok := obj.(*types.Struct); ok { - n := obj.NumFields() - for i := 0; i < n; i++ { - field := obj.Field(i) - g.markUsedBy(obj, field) - } - } -} - -type graphNode struct { - obj interface{} - uses map[*graphNode]struct{} - used bool - quiet bool - n int -} - -type CheckMode int - -const ( - CheckConstants CheckMode = 1 << iota - CheckFields - CheckFunctions - CheckTypes - CheckVariables - - CheckAll = CheckConstants | CheckFields | CheckFunctions | CheckTypes | CheckVariables -) - -type Unused struct { - Obj types.Object - Position token.Position -} - -type Checker struct { - Mode CheckMode - WholeProgram bool - ConsiderReflection bool - Debug io.Writer - - graph *graph - - msCache typeutil.MethodSetCache - prog *lint.Program - topmostCache map[*types.Scope]*types.Scope - interfaces []*types.Interface -} - -func NewChecker(mode CheckMode) *Checker { - return &Checker{ - Mode: mode, - graph: &graph{ - nodes: make(map[interface{}]*graphNode), - }, - topmostCache: make(map[*types.Scope]*types.Scope), - } -} - -func (c *Checker) checkConstants() bool { return (c.Mode & CheckConstants) > 0 } -func (c *Checker) checkFields() bool { return (c.Mode & CheckFields) > 0 } -func (c *Checker) checkFunctions() bool { return (c.Mode & CheckFunctions) > 0 } -func (c *Checker) checkTypes() bool { return (c.Mode & CheckTypes) > 0 } -func (c *Checker) checkVariables() bool { return (c.Mode & CheckVariables) > 0 } - -func (c *Checker) markFields(typ types.Type) { - structType, ok := typ.Underlying().(*types.Struct) - if !ok { - return - } - n := structType.NumFields() - for i := 0; i < n; i++ { - field := structType.Field(i) - c.graph.markUsedBy(field, typ) - } -} - -type Error struct { - Errors map[string][]error -} - -func (e Error) Error() string { - return fmt.Sprintf("errors in %d packages", len(e.Errors)) -} - -func (c *Checker) Check(prog *lint.Program) []Unused { - var unused []Unused - c.prog = prog - if c.WholeProgram { - c.findExportedInterfaces() - } - for _, pkg := range prog.InitialPackages { - c.processDefs(pkg) - c.processUses(pkg) - c.processTypes(pkg) - c.processSelections(pkg) - c.processAST(pkg) - } - - for _, node := range c.graph.nodes { - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - typNode, ok := c.graph.nodes[obj.Type()] - if !ok { - continue - } - node.uses[typNode] = struct{}{} - } - - roots := map[*graphNode]struct{}{} - for _, root := range c.graph.roots { - roots[root] = struct{}{} - } - markNodesUsed(roots) - c.markNodesQuiet() - c.deduplicate() - - if c.Debug != nil { - c.printDebugGraph(c.Debug) - } - - for _, node := range c.graph.nodes { - if node.used || node.quiet { - continue - } - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - found := false - if !false { - for _, pkg := range prog.InitialPackages { - if pkg.Types == obj.Pkg() { - found = true - break - } - } - } - if !found { - continue - } - - pos := c.prog.Fset().Position(obj.Pos()) - if pos.Filename == "" || filepath.Base(pos.Filename) == "C" { - continue - } - - unused = append(unused, Unused{Obj: obj, Position: pos}) - } - - return unused -} - -// isNoCopyType reports whether a type represents the NoCopy sentinel -// type. The NoCopy type is a named struct with no fields and exactly -// one method `func Lock()` that is empty. -// -// FIXME(dh): currently we're not checking that the function body is -// empty. -func isNoCopyType(typ types.Type) bool { - st, ok := typ.Underlying().(*types.Struct) - if !ok { - return false - } - if st.NumFields() != 0 { - return false - } - - named, ok := typ.(*types.Named) - if !ok { - return false - } - if named.NumMethods() != 1 { - return false - } - meth := named.Method(0) - if meth.Name() != "Lock" { - return false - } - sig := meth.Type().(*types.Signature) - if sig.Params().Len() != 0 || sig.Results().Len() != 0 { - return false - } - return true -} - -func (c *Checker) useNoCopyFields(typ types.Type) { - if st, ok := typ.Underlying().(*types.Struct); ok { - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if isNoCopyType(field.Type()) { - c.graph.markUsedBy(field, typ) - c.graph.markUsedBy(field.Type().(*types.Named).Method(0), field.Type()) - } - } - } -} - -func (c *Checker) useExportedFields(typ types.Type, by types.Type) bool { - any := false - if st, ok := typ.Underlying().(*types.Struct); ok { - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if field.Anonymous() { - if c.useExportedFields(field.Type(), typ) { - c.graph.markUsedBy(field, typ) - } - } - if field.Exported() { - c.graph.markUsedBy(field, by) - any = true - } - } - } - return any -} - -func (c *Checker) useExportedMethods(typ types.Type) { - named, ok := typ.(*types.Named) - if !ok { - return - } - ms := typeutil.IntuitiveMethodSet(named, &c.msCache) - for i := 0; i < len(ms); i++ { - meth := ms[i].Obj() - if meth.Exported() { - c.graph.markUsedBy(meth, typ) - } - } - - st, ok := named.Underlying().(*types.Struct) - if !ok { - return - } - n := st.NumFields() - for i := 0; i < n; i++ { - field := st.Field(i) - if !field.Anonymous() { - continue - } - ms := typeutil.IntuitiveMethodSet(field.Type(), &c.msCache) - for j := 0; j < len(ms); j++ { - if ms[j].Obj().Exported() { - c.graph.markUsedBy(field, typ) - break - } - } - } -} - -func (c *Checker) processDefs(pkg *lint.Pkg) { - for _, obj := range pkg.TypesInfo.Defs { - if obj == nil { - continue - } - c.graph.getNode(obj) - - if obj, ok := obj.(*types.TypeName); ok { - c.graph.markUsedBy(obj.Type().Underlying(), obj.Type()) - c.graph.markUsedBy(obj.Type(), obj) // TODO is this needed? - c.graph.markUsedBy(obj, obj.Type()) - - // We mark all exported fields as used. For normal - // operation, we have to. The user may use these fields - // without us knowing. - // - // TODO(dh): In whole-program mode, however, we mark them - // as used because of reflection (such as JSON - // marshaling). Strictly speaking, we would only need to - // mark them used if an instance of the type was - // accessible via an interface value. - if !c.WholeProgram || c.ConsiderReflection { - c.useExportedFields(obj.Type(), obj.Type()) - } - - // TODO(dh): Traditionally we have not marked all exported - // methods as exported, even though they're strictly - // speaking accessible through reflection. We've done that - // because using methods just via reflection is rare, and - // not worth the false negatives. With the new -reflect - // flag, however, we should reconsider that choice. - if !c.WholeProgram { - c.useExportedMethods(obj.Type()) - } - } - - switch obj := obj.(type) { - case *types.Var, *types.Const, *types.Func, *types.TypeName: - if obj.Exported() { - // Exported variables and constants use their types, - // even if there's no expression using them in the - // checked program. - // - // Also operates on funcs and type names, but that's - // irrelevant/redundant. - c.graph.markUsedBy(obj.Type(), obj) - } - if obj.Name() == "_" { - node := c.graph.getNode(obj) - node.quiet = true - scope := c.topmostScope(pkg.Types.Scope().Innermost(obj.Pos()), pkg.Types) - if scope == pkg.Types.Scope() { - c.graph.roots = append(c.graph.roots, node) - } else { - c.graph.markUsedBy(obj, scope) - } - } else { - // Variables declared in functions are used. This is - // done so that arguments and return parameters are - // always marked as used. - if _, ok := obj.(*types.Var); ok { - if obj.Parent() != obj.Pkg().Scope() && obj.Parent() != nil { - c.graph.markUsedBy(obj, c.topmostScope(obj.Parent(), obj.Pkg())) - c.graph.markUsedBy(obj.Type(), obj) - } - } - } - } - - if fn, ok := obj.(*types.Func); ok { - // A function uses its signature - c.graph.markUsedBy(fn, fn.Type()) - - // A function uses its return types - sig := fn.Type().(*types.Signature) - res := sig.Results() - n := res.Len() - for i := 0; i < n; i++ { - c.graph.markUsedBy(res.At(i).Type(), fn) - } - } - - if obj, ok := obj.(interface { - Scope() *types.Scope - Pkg() *types.Package - }); ok { - scope := obj.Scope() - c.graph.markUsedBy(c.topmostScope(scope, obj.Pkg()), obj) - } - - if c.isRoot(obj) { - node := c.graph.getNode(obj) - c.graph.roots = append(c.graph.roots, node) - if obj, ok := obj.(*types.PkgName); ok { - scope := obj.Pkg().Scope() - c.graph.markUsedBy(scope, obj) - } - } - } -} - -func (c *Checker) processUses(pkg *lint.Pkg) { - for ident, usedObj := range pkg.TypesInfo.Uses { - if _, ok := usedObj.(*types.PkgName); ok { - continue - } - pos := ident.Pos() - scope := pkg.Types.Scope().Innermost(pos) - scope = c.topmostScope(scope, pkg.Types) - if scope != pkg.Types.Scope() { - c.graph.markUsedBy(usedObj, scope) - } - - switch usedObj.(type) { - case *types.Var, *types.Const: - c.graph.markUsedBy(usedObj.Type(), usedObj) - } - } -} - -func (c *Checker) findExportedInterfaces() { - c.interfaces = []*types.Interface{types.Universe.Lookup("error").Type().(*types.Named).Underlying().(*types.Interface)} - var pkgs []*packages.Package - if c.WholeProgram { - pkgs = append(pkgs, c.prog.AllPackages...) - } else { - for _, pkg := range c.prog.InitialPackages { - pkgs = append(pkgs, pkg.Package) - } - } - - for _, pkg := range pkgs { - for _, tv := range pkg.TypesInfo.Types { - iface, ok := tv.Type.(*types.Interface) - if !ok { - continue - } - if iface.NumMethods() == 0 { - continue - } - c.interfaces = append(c.interfaces, iface) - } - } -} - -func (c *Checker) processTypes(pkg *lint.Pkg) { - implementers := map[types.Type]*types.Pointer{} - var interfaces []*types.Interface - for _, tv := range pkg.TypesInfo.Types { - if typ, ok := tv.Type.(interface { - Elem() types.Type - }); ok { - c.graph.markUsedBy(typ.Elem(), typ) - } - - switch obj := tv.Type.(type) { - case *types.Named: - implementers[obj] = types.NewPointer(obj) - c.graph.markUsedBy(obj, obj.Underlying()) - c.graph.markUsedBy(obj.Underlying(), obj) - case *types.Interface: - if obj.NumMethods() > 0 { - interfaces = append(interfaces, obj) - } - case *types.Struct: - implementers[obj] = types.NewPointer(obj) - c.useNoCopyFields(obj) - if pkg.Types.Name() != "main" && !c.WholeProgram { - c.useExportedFields(obj, obj) - } - } - } - - // Pretend that all types are meant to implement as many - // interfaces as possible. - // - // TODO(dh): For normal operations, that's the best we can do, as - // we have no idea what external users will do with our types. In - // whole-program mode, we could be more precise, in two ways: - // 1) Only consider interfaces if a type has been assigned to one - // 2) Use SSA and flow analysis and determine the exact set of - // interfaces that is relevant. - fn := func(iface *types.Interface) { - for i := 0; i < iface.NumEmbeddeds(); i++ { - c.graph.markUsedBy(iface.Embedded(i), iface) - } - namedLoop: - for obj, objPtr := range implementers { - switch obj.Underlying().(type) { - case *types.Interface: - // pointers to interfaces have no methods, only checking non-pointer - if !c.implements(obj, iface) { - continue namedLoop - } - default: - // pointer receivers include the method set of non-pointer receivers, - // only checking pointer - if !c.implements(objPtr, iface) { - continue namedLoop - } - } - - ifaceMethods := make(map[string]struct{}, iface.NumMethods()) - n := iface.NumMethods() - for i := 0; i < n; i++ { - meth := iface.Method(i) - ifaceMethods[meth.Name()] = struct{}{} - } - for _, obj := range []types.Type{obj, objPtr} { - ms := c.msCache.MethodSet(obj) - n := ms.Len() - for i := 0; i < n; i++ { - sel := ms.At(i) - meth := sel.Obj().(*types.Func) - _, found := ifaceMethods[meth.Name()] - if !found { - continue - } - c.graph.markUsedBy(meth.Type().(*types.Signature).Recv().Type(), obj) // embedded receiver - if len(sel.Index()) > 1 { - f := getField(obj, sel.Index()[0]) - c.graph.markUsedBy(f, obj) // embedded receiver - } - c.graph.markUsedBy(meth, obj) - } - } - } - } - - for _, iface := range interfaces { - fn(iface) - } - for _, iface := range c.interfaces { - fn(iface) - } -} - -func (c *Checker) processSelections(pkg *lint.Pkg) { - fn := func(expr *ast.SelectorExpr, sel *types.Selection, offset int) { - scope := pkg.Types.Scope().Innermost(expr.Pos()) - c.graph.markUsedBy(sel, c.topmostScope(scope, pkg.Types)) - c.graph.markUsedBy(sel.Obj(), sel) - if len(sel.Index()) > 1 { - typ := sel.Recv() - indices := sel.Index() - for _, idx := range indices[:len(indices)-offset] { - obj := getField(typ, idx) - typ = obj.Type() - c.graph.markUsedBy(obj, sel) - } - } - } - - for expr, sel := range pkg.TypesInfo.Selections { - switch sel.Kind() { - case types.FieldVal: - fn(expr, sel, 0) - case types.MethodVal: - fn(expr, sel, 1) - } - } -} - -func dereferenceType(typ types.Type) types.Type { - if typ, ok := typ.(*types.Pointer); ok { - return typ.Elem() - } - return typ -} - -// processConversion marks fields as used if they're part of a type conversion. -func (c *Checker) processConversion(pkg *lint.Pkg, node ast.Node) { - if node, ok := node.(*ast.CallExpr); ok { - callTyp := pkg.TypesInfo.TypeOf(node.Fun) - var typDst *types.Struct - var ok bool - switch typ := callTyp.(type) { - case *types.Named: - typDst, ok = typ.Underlying().(*types.Struct) - case *types.Pointer: - typDst, ok = typ.Elem().Underlying().(*types.Struct) - default: - return - } - if !ok { - return - } - - if typ, ok := pkg.TypesInfo.TypeOf(node.Args[0]).(*types.Basic); ok && typ.Kind() == types.UnsafePointer { - // This is an unsafe conversion. Assume that all the - // fields are relevant (they are, because of memory - // layout) - n := typDst.NumFields() - for i := 0; i < n; i++ { - c.graph.markUsedBy(typDst.Field(i), typDst) - } - return - } - - typSrc, ok := dereferenceType(pkg.TypesInfo.TypeOf(node.Args[0])).Underlying().(*types.Struct) - if !ok { - return - } - - // When we convert from type t1 to t2, were t1 and t2 are - // structs, all fields are relevant, as otherwise the - // conversion would fail. - // - // We mark t2's fields as used by t1's fields, and vice - // versa. That way, if no code actually refers to a field - // in either type, it's still correctly marked as unused. - // If a field is used in either struct, it's implicitly - // relevant in the other one, too. - // - // It works in a similar way for conversions between types - // of two packages, only that the extra information in the - // graph is redundant unless we're in whole program mode. - n := typDst.NumFields() - for i := 0; i < n; i++ { - fDst := typDst.Field(i) - fSrc := typSrc.Field(i) - c.graph.markUsedBy(fDst, fSrc) - c.graph.markUsedBy(fSrc, fDst) - } - } -} - -// processCompositeLiteral marks fields as used if the struct is used -// in a composite literal. -func (c *Checker) processCompositeLiteral(pkg *lint.Pkg, node ast.Node) { - // XXX how does this actually work? wouldn't it match t{}? - if node, ok := node.(*ast.CompositeLit); ok { - typ := pkg.TypesInfo.TypeOf(node) - if _, ok := typ.(*types.Named); ok { - typ = typ.Underlying() - } - if _, ok := typ.(*types.Struct); !ok { - return - } - - if isBasicStruct(node.Elts) { - c.markFields(typ) - } - } -} - -// processCgoExported marks functions as used if they're being -// exported to cgo. -func (c *Checker) processCgoExported(pkg *lint.Pkg, node ast.Node) { - if node, ok := node.(*ast.FuncDecl); ok { - if node.Doc == nil { - return - } - for _, cmt := range node.Doc.List { - if !strings.HasPrefix(cmt.Text, "//go:cgo_export_") { - return - } - obj := pkg.TypesInfo.ObjectOf(node.Name) - c.graph.roots = append(c.graph.roots, c.graph.getNode(obj)) - } - } -} - -func (c *Checker) processVariableDeclaration(pkg *lint.Pkg, node ast.Node) { - if decl, ok := node.(*ast.GenDecl); ok { - for _, spec := range decl.Specs { - spec, ok := spec.(*ast.ValueSpec) - if !ok { - continue - } - for i, name := range spec.Names { - if i >= len(spec.Values) { - break - } - value := spec.Values[i] - fn := func(node ast.Node) bool { - if node3, ok := node.(*ast.Ident); ok { - obj := pkg.TypesInfo.ObjectOf(node3) - if _, ok := obj.(*types.PkgName); ok { - return true - } - c.graph.markUsedBy(obj, pkg.TypesInfo.ObjectOf(name)) - } - return true - } - ast.Inspect(value, fn) - } - } - } -} - -func (c *Checker) processArrayConstants(pkg *lint.Pkg, node ast.Node) { - if decl, ok := node.(*ast.ArrayType); ok { - ident, ok := decl.Len.(*ast.Ident) - if !ok { - return - } - c.graph.markUsedBy(pkg.TypesInfo.ObjectOf(ident), pkg.TypesInfo.TypeOf(decl)) - } -} - -func (c *Checker) processKnownReflectMethodCallers(pkg *lint.Pkg, node ast.Node) { - call, ok := node.(*ast.CallExpr) - if !ok { - return - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return - } - if !IsType(pkg.TypesInfo.TypeOf(sel.X), "*net/rpc.Server") { - x, ok := sel.X.(*ast.Ident) - if !ok { - return - } - pkgname, ok := pkg.TypesInfo.ObjectOf(x).(*types.PkgName) - if !ok { - return - } - if pkgname.Imported().Path() != "net/rpc" { - return - } - } - - var arg ast.Expr - switch sel.Sel.Name { - case "Register": - if len(call.Args) != 1 { - return - } - arg = call.Args[0] - case "RegisterName": - if len(call.Args) != 2 { - return - } - arg = call.Args[1] - } - typ := pkg.TypesInfo.TypeOf(arg) - ms := types.NewMethodSet(typ) - for i := 0; i < ms.Len(); i++ { - c.graph.markUsedBy(ms.At(i).Obj(), typ) - } -} - -func (c *Checker) processAST(pkg *lint.Pkg) { - fn := func(node ast.Node) bool { - c.processConversion(pkg, node) - c.processKnownReflectMethodCallers(pkg, node) - c.processCompositeLiteral(pkg, node) - c.processCgoExported(pkg, node) - c.processVariableDeclaration(pkg, node) - c.processArrayConstants(pkg, node) - return true - } - for _, file := range pkg.Syntax { - ast.Inspect(file, fn) - } -} - -func isBasicStruct(elts []ast.Expr) bool { - for _, elt := range elts { - if _, ok := elt.(*ast.KeyValueExpr); !ok { - return true - } - } - return false -} - -func isPkgScope(obj types.Object) bool { - return obj.Parent() == obj.Pkg().Scope() -} - -func isMain(obj types.Object) bool { - if obj.Pkg().Name() != "main" { - return false - } - if obj.Name() != "main" { - return false - } - if !isPkgScope(obj) { - return false - } - if !isFunction(obj) { - return false - } - if isMethod(obj) { - return false - } - return true -} - -func isFunction(obj types.Object) bool { - _, ok := obj.(*types.Func) - return ok -} - -func isMethod(obj types.Object) bool { - if !isFunction(obj) { - return false - } - return obj.(*types.Func).Type().(*types.Signature).Recv() != nil -} - -func isVariable(obj types.Object) bool { - _, ok := obj.(*types.Var) - return ok -} - -func isConstant(obj types.Object) bool { - _, ok := obj.(*types.Const) - return ok -} - -func isType(obj types.Object) bool { - _, ok := obj.(*types.TypeName) - return ok -} - -func isField(obj types.Object) bool { - if obj, ok := obj.(*types.Var); ok && obj.IsField() { - return true - } - return false -} - -func (c *Checker) checkFlags(v interface{}) bool { - obj, ok := v.(types.Object) - if !ok { - return false - } - if isFunction(obj) && !c.checkFunctions() { - return false - } - if isVariable(obj) && !c.checkVariables() { - return false - } - if isConstant(obj) && !c.checkConstants() { - return false - } - if isType(obj) && !c.checkTypes() { - return false - } - if isField(obj) && !c.checkFields() { - return false - } - return true -} - -func (c *Checker) isRoot(obj types.Object) bool { - // - in local mode, main, init, tests, and non-test, non-main exported are roots - // - in global mode (not yet implemented), main, init and tests are roots - - if _, ok := obj.(*types.PkgName); ok { - return true - } - - if isMain(obj) || (isFunction(obj) && !isMethod(obj) && obj.Name() == "init") { - return true - } - if obj.Exported() { - f := c.prog.Fset().Position(obj.Pos()).Filename - if strings.HasSuffix(f, "_test.go") { - return strings.HasPrefix(obj.Name(), "Test") || - strings.HasPrefix(obj.Name(), "Benchmark") || - strings.HasPrefix(obj.Name(), "Example") - } - - // Package-level are used, except in package main - if isPkgScope(obj) && obj.Pkg().Name() != "main" && !c.WholeProgram { - return true - } - } - return false -} - -func markNodesUsed(nodes map[*graphNode]struct{}) { - for node := range nodes { - wasUsed := node.used - node.used = true - if !wasUsed { - markNodesUsed(node.uses) - } - } -} - -// deduplicate merges objects based on their positions. This is done -// to work around packages existing multiple times in go/packages. -func (c *Checker) deduplicate() { - m := map[token.Position]struct{ used, quiet bool }{} - for _, node := range c.graph.nodes { - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - pos := c.prog.Fset().Position(obj.Pos()) - m[pos] = struct{ used, quiet bool }{ - m[pos].used || node.used, - m[pos].quiet || node.quiet, - } - } - - for _, node := range c.graph.nodes { - obj, ok := node.obj.(types.Object) - if !ok { - continue - } - pos := c.prog.Fset().Position(obj.Pos()) - node.used = m[pos].used - node.quiet = m[pos].quiet - } -} - -func (c *Checker) markNodesQuiet() { - for _, node := range c.graph.nodes { - if node.used { - continue - } - if obj, ok := node.obj.(types.Object); ok && !c.checkFlags(obj) { - node.quiet = true - continue - } - c.markObjQuiet(node.obj) - } -} - -func (c *Checker) markObjQuiet(obj interface{}) { - switch obj := obj.(type) { - case *types.Named: - n := obj.NumMethods() - for i := 0; i < n; i++ { - meth := obj.Method(i) - node := c.graph.getNode(meth) - node.quiet = true - c.markObjQuiet(meth.Scope()) - } - case *types.Struct: - n := obj.NumFields() - for i := 0; i < n; i++ { - field := obj.Field(i) - c.graph.nodes[field].quiet = true - } - case *types.Func: - c.markObjQuiet(obj.Scope()) - case *types.Scope: - if obj == nil { - return - } - if obj.Parent() == types.Universe { - return - } - for _, name := range obj.Names() { - v := obj.Lookup(name) - if n, ok := c.graph.nodes[v]; ok { - n.quiet = true - } - } - n := obj.NumChildren() - for i := 0; i < n; i++ { - c.markObjQuiet(obj.Child(i)) - } - } -} - -func getField(typ types.Type, idx int) *types.Var { - switch obj := typ.(type) { - case *types.Pointer: - return getField(obj.Elem(), idx) - case *types.Named: - switch v := obj.Underlying().(type) { - case *types.Struct: - return v.Field(idx) - case *types.Pointer: - return getField(v.Elem(), idx) - default: - panic(fmt.Sprintf("unexpected type %s", typ)) - } - case *types.Struct: - return obj.Field(idx) - } - return nil -} - -func (c *Checker) topmostScope(scope *types.Scope, pkg *types.Package) (ret *types.Scope) { - if top, ok := c.topmostCache[scope]; ok { - return top - } - defer func() { - c.topmostCache[scope] = ret - }() - if scope == pkg.Scope() { - return scope - } - if scope.Parent().Parent() == pkg.Scope() { - return scope - } - return c.topmostScope(scope.Parent(), pkg) -} - -func (c *Checker) printDebugGraph(w io.Writer) { - fmt.Fprintln(w, "digraph {") - fmt.Fprintln(w, "n0 [label = roots]") - for _, node := range c.graph.nodes { - s := fmt.Sprintf("%s (%T)", node.obj, node.obj) - s = strings.Replace(s, "\n", "", -1) - s = strings.Replace(s, `"`, "", -1) - fmt.Fprintf(w, `n%d [label = %q]`, node.n, s) - color := "black" - switch { - case node.used: - color = "green" - case node.quiet: - color = "orange" - case !c.checkFlags(node.obj): - color = "purple" - default: - color = "red" - } - fmt.Fprintf(w, "[color = %s]", color) - fmt.Fprintln(w) - } - - for _, node1 := range c.graph.nodes { - for node2 := range node1.uses { - fmt.Fprintf(w, "n%d -> n%d\n", node1.n, node2.n) - } - } - for _, root := range c.graph.roots { - fmt.Fprintf(w, "n0 -> n%d\n", root.n) - } - fmt.Fprintln(w, "}") -} diff --git a/unused/unused_test.go b/unused/unused_test.go deleted file mode 100644 index 7cc2c4ead..000000000 --- a/unused/unused_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package unused - -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found at -// https://developers.google.com/open-source/licenses/bsd. - -import ( - "testing" - - "honnef.co/go/tools/lint/testutil" -) - -func TestAll(t *testing.T) { - checker := NewChecker(CheckAll) - l := NewLintChecker(checker) - testutil.TestAll(t, l, "") -} From 04f07fbb0053c8daec607468fd2d2541d4bcdf51 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 14:41:22 +0200 Subject: [PATCH 095/254] Rename unused2 to unused --- cmd/megacheck/megacheck.go | 2 +- cmd/staticcheck/staticcheck.go | 2 +- cmd/unused/main.go | 2 +- {unused2 => unused}/implements.go | 0 {unused2 => unused}/testdata/src/anonymous/anonymous.go | 0 {unused2 => unused}/testdata/src/blank/blank.go | 0 {unused2 => unused}/testdata/src/cgo/cgo.go | 0 {unused2 => unused}/testdata/src/consts/consts.go | 0 {unused2 => unused}/testdata/src/conversion/conversion.go | 0 {unused2 => unused}/testdata/src/cyclic/cyclic.go | 0 {unused2 => unused}/testdata/src/defer/defer.go | 0 {unused2 => unused}/testdata/src/elem/elem.go | 0 {unused2 => unused}/testdata/src/embedded_call/embedded_call.go | 0 {unused2 => unused}/testdata/src/embedding/embedding.go | 0 .../testdata/src/exported_fields/exported_fields.go | 0 .../testdata/src/exported_fields_main/exported_fields_main.go | 0 .../testdata/src/exported_method_test/exported_method.go | 0 .../testdata/src/exported_method_test/exported_method_test.go | 0 {unused2 => unused}/testdata/src/fields/fields.go | 0 {unused2 => unused}/testdata/src/functions/functions.go | 0 {unused2 => unused}/testdata/src/generated1/generated1.go | 0 {unused2 => unused}/testdata/src/generated2/generated2.go | 0 {unused2 => unused}/testdata/src/interfaces/interfaces.go | 0 {unused2 => unused}/testdata/src/linkname/linkname.go | 0 {unused2 => unused}/testdata/src/main/main.go | 0 {unused2 => unused}/testdata/src/mapslice/mapslice.go | 0 {unused2 => unused}/testdata/src/methods/methods.go | 0 {unused2 => unused}/testdata/src/named/named.go | 0 {unused2 => unused}/testdata/src/nested/nested.go | 0 {unused2 => unused}/testdata/src/nocopy-main/nocopy-main.go | 0 {unused2 => unused}/testdata/src/nocopy/nocopy.go | 0 .../src/pointer-type-embedding/pointer-type-embedding.go | 0 {unused2 => unused}/testdata/src/quiet/quiet.go | 0 {unused2 => unused}/testdata/src/selectors/selectors.go | 0 .../testdata/src/switch_interface/switch_interface.go | 0 {unused2 => unused}/testdata/src/tests/tests.go | 0 {unused2 => unused}/testdata/src/tests/tests_test.go | 0 .../testdata/src/unused-argument/unused-argument.go | 0 {unused2 => unused}/testdata/src/unused_type/unused_type.go | 0 {unused2 => unused}/testdata/src/variables/variables.go | 0 {unused2 => unused}/unused.go | 0 {unused2 => unused}/unused_test.go | 0 42 files changed, 3 insertions(+), 3 deletions(-) rename {unused2 => unused}/implements.go (100%) rename {unused2 => unused}/testdata/src/anonymous/anonymous.go (100%) rename {unused2 => unused}/testdata/src/blank/blank.go (100%) rename {unused2 => unused}/testdata/src/cgo/cgo.go (100%) rename {unused2 => unused}/testdata/src/consts/consts.go (100%) rename {unused2 => unused}/testdata/src/conversion/conversion.go (100%) rename {unused2 => unused}/testdata/src/cyclic/cyclic.go (100%) rename {unused2 => unused}/testdata/src/defer/defer.go (100%) rename {unused2 => unused}/testdata/src/elem/elem.go (100%) rename {unused2 => unused}/testdata/src/embedded_call/embedded_call.go (100%) rename {unused2 => unused}/testdata/src/embedding/embedding.go (100%) rename {unused2 => unused}/testdata/src/exported_fields/exported_fields.go (100%) rename {unused2 => unused}/testdata/src/exported_fields_main/exported_fields_main.go (100%) rename {unused2 => unused}/testdata/src/exported_method_test/exported_method.go (100%) rename {unused2 => unused}/testdata/src/exported_method_test/exported_method_test.go (100%) rename {unused2 => unused}/testdata/src/fields/fields.go (100%) rename {unused2 => unused}/testdata/src/functions/functions.go (100%) rename {unused2 => unused}/testdata/src/generated1/generated1.go (100%) rename {unused2 => unused}/testdata/src/generated2/generated2.go (100%) rename {unused2 => unused}/testdata/src/interfaces/interfaces.go (100%) rename {unused2 => unused}/testdata/src/linkname/linkname.go (100%) rename {unused2 => unused}/testdata/src/main/main.go (100%) rename {unused2 => unused}/testdata/src/mapslice/mapslice.go (100%) rename {unused2 => unused}/testdata/src/methods/methods.go (100%) rename {unused2 => unused}/testdata/src/named/named.go (100%) rename {unused2 => unused}/testdata/src/nested/nested.go (100%) rename {unused2 => unused}/testdata/src/nocopy-main/nocopy-main.go (100%) rename {unused2 => unused}/testdata/src/nocopy/nocopy.go (100%) rename {unused2 => unused}/testdata/src/pointer-type-embedding/pointer-type-embedding.go (100%) rename {unused2 => unused}/testdata/src/quiet/quiet.go (100%) rename {unused2 => unused}/testdata/src/selectors/selectors.go (100%) rename {unused2 => unused}/testdata/src/switch_interface/switch_interface.go (100%) rename {unused2 => unused}/testdata/src/tests/tests.go (100%) rename {unused2 => unused}/testdata/src/tests/tests_test.go (100%) rename {unused2 => unused}/testdata/src/unused-argument/unused-argument.go (100%) rename {unused2 => unused}/testdata/src/unused_type/unused_type.go (100%) rename {unused2 => unused}/testdata/src/variables/variables.go (100%) rename {unused2 => unused}/unused.go (100%) rename {unused2 => unused}/unused_test.go (100%) diff --git a/cmd/megacheck/megacheck.go b/cmd/megacheck/megacheck.go index 8bd00b978..309e73c4d 100644 --- a/cmd/megacheck/megacheck.go +++ b/cmd/megacheck/megacheck.go @@ -9,7 +9,7 @@ import ( "honnef.co/go/tools/lint/lintutil" "honnef.co/go/tools/simple" "honnef.co/go/tools/staticcheck" - "honnef.co/go/tools/unused2" + "honnef.co/go/tools/unused" ) func main() { diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 91426819d..3bec82339 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -9,7 +9,7 @@ import ( "honnef.co/go/tools/simple" "honnef.co/go/tools/staticcheck" "honnef.co/go/tools/stylecheck" - "honnef.co/go/tools/unused2" + "honnef.co/go/tools/unused" ) func main() { diff --git a/cmd/unused/main.go b/cmd/unused/main.go index 836f33981..77b953bbb 100644 --- a/cmd/unused/main.go +++ b/cmd/unused/main.go @@ -9,7 +9,7 @@ import ( "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/unused2" + "honnef.co/go/tools/unused" ) var ( diff --git a/unused2/implements.go b/unused/implements.go similarity index 100% rename from unused2/implements.go rename to unused/implements.go diff --git a/unused2/testdata/src/anonymous/anonymous.go b/unused/testdata/src/anonymous/anonymous.go similarity index 100% rename from unused2/testdata/src/anonymous/anonymous.go rename to unused/testdata/src/anonymous/anonymous.go diff --git a/unused2/testdata/src/blank/blank.go b/unused/testdata/src/blank/blank.go similarity index 100% rename from unused2/testdata/src/blank/blank.go rename to unused/testdata/src/blank/blank.go diff --git a/unused2/testdata/src/cgo/cgo.go b/unused/testdata/src/cgo/cgo.go similarity index 100% rename from unused2/testdata/src/cgo/cgo.go rename to unused/testdata/src/cgo/cgo.go diff --git a/unused2/testdata/src/consts/consts.go b/unused/testdata/src/consts/consts.go similarity index 100% rename from unused2/testdata/src/consts/consts.go rename to unused/testdata/src/consts/consts.go diff --git a/unused2/testdata/src/conversion/conversion.go b/unused/testdata/src/conversion/conversion.go similarity index 100% rename from unused2/testdata/src/conversion/conversion.go rename to unused/testdata/src/conversion/conversion.go diff --git a/unused2/testdata/src/cyclic/cyclic.go b/unused/testdata/src/cyclic/cyclic.go similarity index 100% rename from unused2/testdata/src/cyclic/cyclic.go rename to unused/testdata/src/cyclic/cyclic.go diff --git a/unused2/testdata/src/defer/defer.go b/unused/testdata/src/defer/defer.go similarity index 100% rename from unused2/testdata/src/defer/defer.go rename to unused/testdata/src/defer/defer.go diff --git a/unused2/testdata/src/elem/elem.go b/unused/testdata/src/elem/elem.go similarity index 100% rename from unused2/testdata/src/elem/elem.go rename to unused/testdata/src/elem/elem.go diff --git a/unused2/testdata/src/embedded_call/embedded_call.go b/unused/testdata/src/embedded_call/embedded_call.go similarity index 100% rename from unused2/testdata/src/embedded_call/embedded_call.go rename to unused/testdata/src/embedded_call/embedded_call.go diff --git a/unused2/testdata/src/embedding/embedding.go b/unused/testdata/src/embedding/embedding.go similarity index 100% rename from unused2/testdata/src/embedding/embedding.go rename to unused/testdata/src/embedding/embedding.go diff --git a/unused2/testdata/src/exported_fields/exported_fields.go b/unused/testdata/src/exported_fields/exported_fields.go similarity index 100% rename from unused2/testdata/src/exported_fields/exported_fields.go rename to unused/testdata/src/exported_fields/exported_fields.go diff --git a/unused2/testdata/src/exported_fields_main/exported_fields_main.go b/unused/testdata/src/exported_fields_main/exported_fields_main.go similarity index 100% rename from unused2/testdata/src/exported_fields_main/exported_fields_main.go rename to unused/testdata/src/exported_fields_main/exported_fields_main.go diff --git a/unused2/testdata/src/exported_method_test/exported_method.go b/unused/testdata/src/exported_method_test/exported_method.go similarity index 100% rename from unused2/testdata/src/exported_method_test/exported_method.go rename to unused/testdata/src/exported_method_test/exported_method.go diff --git a/unused2/testdata/src/exported_method_test/exported_method_test.go b/unused/testdata/src/exported_method_test/exported_method_test.go similarity index 100% rename from unused2/testdata/src/exported_method_test/exported_method_test.go rename to unused/testdata/src/exported_method_test/exported_method_test.go diff --git a/unused2/testdata/src/fields/fields.go b/unused/testdata/src/fields/fields.go similarity index 100% rename from unused2/testdata/src/fields/fields.go rename to unused/testdata/src/fields/fields.go diff --git a/unused2/testdata/src/functions/functions.go b/unused/testdata/src/functions/functions.go similarity index 100% rename from unused2/testdata/src/functions/functions.go rename to unused/testdata/src/functions/functions.go diff --git a/unused2/testdata/src/generated1/generated1.go b/unused/testdata/src/generated1/generated1.go similarity index 100% rename from unused2/testdata/src/generated1/generated1.go rename to unused/testdata/src/generated1/generated1.go diff --git a/unused2/testdata/src/generated2/generated2.go b/unused/testdata/src/generated2/generated2.go similarity index 100% rename from unused2/testdata/src/generated2/generated2.go rename to unused/testdata/src/generated2/generated2.go diff --git a/unused2/testdata/src/interfaces/interfaces.go b/unused/testdata/src/interfaces/interfaces.go similarity index 100% rename from unused2/testdata/src/interfaces/interfaces.go rename to unused/testdata/src/interfaces/interfaces.go diff --git a/unused2/testdata/src/linkname/linkname.go b/unused/testdata/src/linkname/linkname.go similarity index 100% rename from unused2/testdata/src/linkname/linkname.go rename to unused/testdata/src/linkname/linkname.go diff --git a/unused2/testdata/src/main/main.go b/unused/testdata/src/main/main.go similarity index 100% rename from unused2/testdata/src/main/main.go rename to unused/testdata/src/main/main.go diff --git a/unused2/testdata/src/mapslice/mapslice.go b/unused/testdata/src/mapslice/mapslice.go similarity index 100% rename from unused2/testdata/src/mapslice/mapslice.go rename to unused/testdata/src/mapslice/mapslice.go diff --git a/unused2/testdata/src/methods/methods.go b/unused/testdata/src/methods/methods.go similarity index 100% rename from unused2/testdata/src/methods/methods.go rename to unused/testdata/src/methods/methods.go diff --git a/unused2/testdata/src/named/named.go b/unused/testdata/src/named/named.go similarity index 100% rename from unused2/testdata/src/named/named.go rename to unused/testdata/src/named/named.go diff --git a/unused2/testdata/src/nested/nested.go b/unused/testdata/src/nested/nested.go similarity index 100% rename from unused2/testdata/src/nested/nested.go rename to unused/testdata/src/nested/nested.go diff --git a/unused2/testdata/src/nocopy-main/nocopy-main.go b/unused/testdata/src/nocopy-main/nocopy-main.go similarity index 100% rename from unused2/testdata/src/nocopy-main/nocopy-main.go rename to unused/testdata/src/nocopy-main/nocopy-main.go diff --git a/unused2/testdata/src/nocopy/nocopy.go b/unused/testdata/src/nocopy/nocopy.go similarity index 100% rename from unused2/testdata/src/nocopy/nocopy.go rename to unused/testdata/src/nocopy/nocopy.go diff --git a/unused2/testdata/src/pointer-type-embedding/pointer-type-embedding.go b/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go similarity index 100% rename from unused2/testdata/src/pointer-type-embedding/pointer-type-embedding.go rename to unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go diff --git a/unused2/testdata/src/quiet/quiet.go b/unused/testdata/src/quiet/quiet.go similarity index 100% rename from unused2/testdata/src/quiet/quiet.go rename to unused/testdata/src/quiet/quiet.go diff --git a/unused2/testdata/src/selectors/selectors.go b/unused/testdata/src/selectors/selectors.go similarity index 100% rename from unused2/testdata/src/selectors/selectors.go rename to unused/testdata/src/selectors/selectors.go diff --git a/unused2/testdata/src/switch_interface/switch_interface.go b/unused/testdata/src/switch_interface/switch_interface.go similarity index 100% rename from unused2/testdata/src/switch_interface/switch_interface.go rename to unused/testdata/src/switch_interface/switch_interface.go diff --git a/unused2/testdata/src/tests/tests.go b/unused/testdata/src/tests/tests.go similarity index 100% rename from unused2/testdata/src/tests/tests.go rename to unused/testdata/src/tests/tests.go diff --git a/unused2/testdata/src/tests/tests_test.go b/unused/testdata/src/tests/tests_test.go similarity index 100% rename from unused2/testdata/src/tests/tests_test.go rename to unused/testdata/src/tests/tests_test.go diff --git a/unused2/testdata/src/unused-argument/unused-argument.go b/unused/testdata/src/unused-argument/unused-argument.go similarity index 100% rename from unused2/testdata/src/unused-argument/unused-argument.go rename to unused/testdata/src/unused-argument/unused-argument.go diff --git a/unused2/testdata/src/unused_type/unused_type.go b/unused/testdata/src/unused_type/unused_type.go similarity index 100% rename from unused2/testdata/src/unused_type/unused_type.go rename to unused/testdata/src/unused_type/unused_type.go diff --git a/unused2/testdata/src/variables/variables.go b/unused/testdata/src/variables/variables.go similarity index 100% rename from unused2/testdata/src/variables/variables.go rename to unused/testdata/src/variables/variables.go diff --git a/unused2/unused.go b/unused/unused.go similarity index 100% rename from unused2/unused.go rename to unused/unused.go diff --git a/unused2/unused_test.go b/unused/unused_test.go similarity index 100% rename from unused2/unused_test.go rename to unused/unused_test.go From 9527de7b4b62ef30b38fc87fecc6f6739bd9ec6d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 15:18:04 +0200 Subject: [PATCH 096/254] unused: handle tests in package main --- unused/testdata/src/tests-main/main.go | 1 + unused/testdata/src/tests-main/main_test.go | 11 +++++++++++ unused/unused.go | 8 ++++++-- 3 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 unused/testdata/src/tests-main/main.go create mode 100644 unused/testdata/src/tests-main/main_test.go diff --git a/unused/testdata/src/tests-main/main.go b/unused/testdata/src/tests-main/main.go new file mode 100644 index 000000000..06ab7d0f9 --- /dev/null +++ b/unused/testdata/src/tests-main/main.go @@ -0,0 +1 @@ +package main diff --git a/unused/testdata/src/tests-main/main_test.go b/unused/testdata/src/tests-main/main_test.go new file mode 100644 index 000000000..fffcc5f62 --- /dev/null +++ b/unused/testdata/src/tests-main/main_test.go @@ -0,0 +1,11 @@ +package main + +import ( + "testing" +) + +type t1 struct{} + +func TestFoo(t *testing.T) { + _ = t1{} +} diff --git a/unused/unused.go b/unused/unused.go index bf417356c..6774b00a3 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -888,8 +888,12 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { // whole program mode tracks exported identifiers accurately return false } - if g.pkg.Pkg.Name() == "main" { - // exported identifiers in package main can't be imported + + path := g.pkg.Prog.Fset.Position(obj.Pos()).Filename + if g.pkg.Pkg.Name() == "main" && !strings.HasSuffix(path, "_test.go") { + // exported identifiers in package main can't be imported. + // However, test functions can be called, and xtest packages + // even have access to exported identifiers. return false } From bf9787c127f89e734632306cbd00b834786386ce Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 19:42:58 +0200 Subject: [PATCH 097/254] unused: handle variable declarations with no explicit type --- unused/testdata/src/variables/variables.go | 19 +++++++++++++++++++ unused/unused.go | 17 ++++++++--------- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/unused/testdata/src/variables/variables.go b/unused/testdata/src/variables/variables.go index 65c44b2e1..d5129a833 100644 --- a/unused/testdata/src/variables/variables.go +++ b/unused/testdata/src/variables/variables.go @@ -3,7 +3,26 @@ package pkg var a byte var b [16]byte +type t1 struct{} +type t2 struct{} +type t3 struct{} +type t4 struct{} +type t5 struct{} + +type iface interface{} + +var x t1 +var y = t2{} +var j, k = t3{}, t4{} +var l iface = t5{} + func Fn() { println(a) _ = b[:] + + _ = x + _ = y + _ = j + _ = k + _ = l } diff --git a/unused/unused.go b/unused/unused.go index 6774b00a3..7e5fbb32f 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1032,16 +1032,15 @@ func (g *Graph) entry(pkg *lint.Pkg) { case token.VAR: for _, spec := range n.Specs { v := spec.(*ast.ValueSpec) - if v.Type == nil { - continue - } - T := lintdsl.TypeOf(g.job, v.Type) - if fn != nil { - g.seeAndUse(T, fn, "var decl") - } else { - g.seeAndUse(T, nil, "var decl") + for _, name := range v.Names { + T := lintdsl.TypeOf(g.job, name) + if fn != nil { + g.seeAndUse(T, fn, "var decl") + } else { + g.seeAndUse(T, nil, "var decl") + } + g.typ(T) } - g.typ(T) } case token.TYPE: for _, spec := range n.Specs { From 6da49aace82e94acd4695e7867d968dda0e456a6 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 20:01:41 +0200 Subject: [PATCH 098/254] unused: handle type aliases --- unused/testdata/src/alias/alias.go | 14 ++++++++++++++ unused/unused.go | 9 +++++++++ 2 files changed, 23 insertions(+) create mode 100644 unused/testdata/src/alias/alias.go diff --git a/unused/testdata/src/alias/alias.go b/unused/testdata/src/alias/alias.go new file mode 100644 index 000000000..1f90190d4 --- /dev/null +++ b/unused/testdata/src/alias/alias.go @@ -0,0 +1,14 @@ +package main + +type t1 struct{} +type t2 struct{} // MATCH "t2 is unused" +type t3 struct{} + +type alias1 = t1 +type alias2 = t2 // MATCH "alias2 is unused" +type alias3 = t3 + +func main() { + var _ alias1 + var _ t3 +} diff --git a/unused/unused.go b/unused/unused.go index 7e5fbb32f..437dfb3ac 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -36,6 +36,9 @@ import ( - named types use: - (2.1) exported methods - (2.2) the type they're based on + - (2.3) all their aliases. we can't easily track uses of aliases + because go/types turns them into uses of the aliased types. assume + that if a type is used, so are all of its aliases. - variables and constants use: - their types @@ -1058,6 +1061,12 @@ func (g *Graph) entry(pkg *lint.Pkg) { g.use(T, obj, "type") g.typ(obj.Type()) g.typ(T) + + if v.Assign != 0 { + aliasFor := obj.(*types.TypeName).Type() + // (2.3) named types use all their aliases. we can't easily track uses of aliases + g.seeAndUse(obj, aliasFor, "alias") + } } } default: From 30f18e47f463a8aceb58b1bca5c97d6763d7bed0 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 21:34:57 +0200 Subject: [PATCH 099/254] deprecated: update list Closes gh-437 --- deprecated/stdlib.go | 142 ++++++++++++++++++++++++++++++------------- 1 file changed, 100 insertions(+), 42 deletions(-) diff --git a/deprecated/stdlib.go b/deprecated/stdlib.go index b6b217c3e..5d8ce186b 100644 --- a/deprecated/stdlib.go +++ b/deprecated/stdlib.go @@ -9,46 +9,104 @@ var Stdlib = map[string]Deprecation{ "image/jpeg.Reader": {4, 0}, // FIXME(dh): AllowBinary isn't being detected as deprecated // because the comment has a newline right after "Deprecated:" - "go/build.AllowBinary": {7, 7}, - "(archive/zip.FileHeader).CompressedSize": {1, 1}, - "(archive/zip.FileHeader).UncompressedSize": {1, 1}, - "(go/doc.Package).Bugs": {1, 1}, - "os.SEEK_SET": {7, 7}, - "os.SEEK_CUR": {7, 7}, - "os.SEEK_END": {7, 7}, - "(net.Dialer).Cancel": {7, 7}, - "runtime.CPUProfile": {9, 0}, - "compress/flate.ReadError": {6, 6}, - "compress/flate.WriteError": {6, 6}, - "path/filepath.HasPrefix": {0, 0}, - "(net/http.Transport).Dial": {7, 7}, - "(*net/http.Transport).CancelRequest": {6, 5}, - "net/http.ErrWriteAfterFlush": {7, 0}, - "net/http.ErrHeaderTooLong": {8, 0}, - "net/http.ErrShortBody": {8, 0}, - "net/http.ErrMissingContentLength": {8, 0}, - "net/http/httputil.ErrPersistEOF": {0, 0}, - "net/http/httputil.ErrClosed": {0, 0}, - "net/http/httputil.ErrPipeline": {0, 0}, - "net/http/httputil.ServerConn": {0, 0}, - "net/http/httputil.NewServerConn": {0, 0}, - "net/http/httputil.ClientConn": {0, 0}, - "net/http/httputil.NewClientConn": {0, 0}, - "net/http/httputil.NewProxyClientConn": {0, 0}, - "(net/http.Request).Cancel": {7, 7}, - "(text/template/parse.PipeNode).Line": {1, 1}, - "(text/template/parse.ActionNode).Line": {1, 1}, - "(text/template/parse.BranchNode).Line": {1, 1}, - "(text/template/parse.TemplateNode).Line": {1, 1}, - "database/sql/driver.ColumnConverter": {9, 9}, - "database/sql/driver.Execer": {8, 8}, - "database/sql/driver.Queryer": {8, 8}, - "(database/sql/driver.Conn).Begin": {8, 8}, - "(database/sql/driver.Stmt).Exec": {8, 8}, - "(database/sql/driver.Stmt).Query": {8, 8}, - "syscall.StringByteSlice": {1, 1}, - "syscall.StringBytePtr": {1, 1}, - "syscall.StringSlicePtr": {1, 1}, - "syscall.StringToUTF16": {1, 1}, - "syscall.StringToUTF16Ptr": {1, 1}, + "go/build.AllowBinary": {7, 7}, + "(archive/zip.FileHeader).CompressedSize": {1, 1}, + "(archive/zip.FileHeader).UncompressedSize": {1, 1}, + "(archive/zip.FileHeader).ModifiedTime": {10, 10}, + "(archive/zip.FileHeader).ModifiedDate": {10, 10}, + "(*archive/zip.FileHeader).ModTime": {10, 10}, + "(*archive/zip.FileHeader).SetModTime": {10, 10}, + "(go/doc.Package).Bugs": {1, 1}, + "os.SEEK_SET": {7, 7}, + "os.SEEK_CUR": {7, 7}, + "os.SEEK_END": {7, 7}, + "(net.Dialer).Cancel": {7, 7}, + "runtime.CPUProfile": {9, 0}, + "compress/flate.ReadError": {6, 6}, + "compress/flate.WriteError": {6, 6}, + "path/filepath.HasPrefix": {0, 0}, + "(net/http.Transport).Dial": {7, 7}, + "(*net/http.Transport).CancelRequest": {6, 5}, + "net/http.ErrWriteAfterFlush": {7, 0}, + "net/http.ErrHeaderTooLong": {8, 0}, + "net/http.ErrShortBody": {8, 0}, + "net/http.ErrMissingContentLength": {8, 0}, + "net/http/httputil.ErrPersistEOF": {0, 0}, + "net/http/httputil.ErrClosed": {0, 0}, + "net/http/httputil.ErrPipeline": {0, 0}, + "net/http/httputil.ServerConn": {0, 0}, + "net/http/httputil.NewServerConn": {0, 0}, + "net/http/httputil.ClientConn": {0, 0}, + "net/http/httputil.NewClientConn": {0, 0}, + "net/http/httputil.NewProxyClientConn": {0, 0}, + "(net/http.Request).Cancel": {7, 7}, + "(text/template/parse.PipeNode).Line": {1, 1}, + "(text/template/parse.ActionNode).Line": {1, 1}, + "(text/template/parse.BranchNode).Line": {1, 1}, + "(text/template/parse.TemplateNode).Line": {1, 1}, + "database/sql/driver.ColumnConverter": {9, 9}, + "database/sql/driver.Execer": {8, 8}, + "database/sql/driver.Queryer": {8, 8}, + "(database/sql/driver.Conn).Begin": {8, 8}, + "(database/sql/driver.Stmt).Exec": {8, 8}, + "(database/sql/driver.Stmt).Query": {8, 8}, + "syscall.StringByteSlice": {1, 1}, + "syscall.StringBytePtr": {1, 1}, + "syscall.StringSlicePtr": {1, 1}, + "syscall.StringToUTF16": {1, 1}, + "syscall.StringToUTF16Ptr": {1, 1}, + "(*regexp.Regexp).Copy": {12, 12}, + "(archive/tar.Header).Xattrs": {10, 10}, + "archive/tar.TypeRegA": {11, 1}, + "go/types.NewInterface": {11, 11}, + "(*go/types.Interface).Embedded": {11, 11}, + "go/importer.For": {12, 12}, + "encoding/json.InvalidUTF8Error": {2, 2}, + "encoding/json.UnmarshalFieldError": {2, 2}, + "encoding/csv.ErrTrailingComma": {2, 2}, + "(encoding/csv.Reader).TrailingComma": {2, 2}, + "(net.Dialer).DualStack": {12, 12}, + "net/http.ErrUnexpectedTrailer": {12, 12}, + "net/http.CloseNotifier": {11, 7}, + "net/http.ProtocolError": {8, 8}, + "(crypto/x509.CertificateRequest).Attributes": {5, 3}, + // This function has no alternative, but also no purpose. + "(*crypto/rc4.Cipher).Reset": {12, 0}, + "(net/http/httptest.ResponseRecorder).HeaderMap": {11, 7}, + + // All of these have been deprecated in favour of external libraries + "syscall.AttachLsf": {7, 0}, + "syscall.DetachLsf": {7, 0}, + "syscall.LsfSocket": {7, 0}, + "syscall.SetLsfPromisc": {7, 0}, + "syscall.LsfJump": {7, 0}, + "syscall.LsfStmt": {7, 0}, + "syscall.BpfStmt": {7, 0}, + "syscall.BpfJump": {7, 0}, + "syscall.BpfBuflen": {7, 0}, + "syscall.SetBpfBuflen": {7, 0}, + "syscall.BpfDatalink": {7, 0}, + "syscall.SetBpfDatalink": {7, 0}, + "syscall.SetBpfPromisc": {7, 0}, + "syscall.FlushBpf": {7, 0}, + "syscall.BpfInterface": {7, 0}, + "syscall.SetBpfInterface": {7, 0}, + "syscall.BpfTimeout": {7, 0}, + "syscall.SetBpfTimeout": {7, 0}, + "syscall.BpfStats": {7, 0}, + "syscall.SetBpfImmediate": {7, 0}, + "syscall.SetBpf": {7, 0}, + "syscall.CheckBpfVersion": {7, 0}, + "syscall.BpfHeadercmpl": {7, 0}, + "syscall.SetBpfHeadercmpl": {7, 0}, + "syscall.RouteRIB": {8, 0}, + "syscall.RoutingMessage": {8, 0}, + "syscall.RouteMessage": {8, 0}, + "syscall.InterfaceMessage": {8, 0}, + "syscall.InterfaceAddrMessage": {8, 0}, + "syscall.ParseRoutingMessage": {8, 0}, + "syscall.ParseRoutingSockaddr": {8, 0}, + "InterfaceAnnounceMessage": {7, 0}, + "InterfaceMulticastAddrMessage": {7, 0}, + "syscall.FormatMessage": {5, 0}, } From 8c27ffd45e1f398ceac2c21aa907aa22245c1538 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 22:06:40 +0200 Subject: [PATCH 100/254] unused: don't crash on aliases to builtins --- unused/testdata/src/alias/alias.go | 1 + unused/unused.go | 12 +++++++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/unused/testdata/src/alias/alias.go b/unused/testdata/src/alias/alias.go index 1f90190d4..f181c173a 100644 --- a/unused/testdata/src/alias/alias.go +++ b/unused/testdata/src/alias/alias.go @@ -7,6 +7,7 @@ type t3 struct{} type alias1 = t1 type alias2 = t2 // MATCH "alias2 is unused" type alias3 = t3 +type alias4 = int func main() { var _ alias1 diff --git a/unused/unused.go b/unused/unused.go index 437dfb3ac..ba81720bd 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1065,7 +1065,17 @@ func (g *Graph) entry(pkg *lint.Pkg) { if v.Assign != 0 { aliasFor := obj.(*types.TypeName).Type() // (2.3) named types use all their aliases. we can't easily track uses of aliases - g.seeAndUse(obj, aliasFor, "alias") + if isIrrelevant(aliasFor) { + // We do not track the type this is an + // alias for (for example builtins), so + // just mark the alias used. + // + // FIXME(dh): what about aliases declared inside functions? + g.use(obj, nil, "alias") + } else { + g.see(aliasFor) + g.seeAndUse(obj, aliasFor, "alias") + } } } } From ab6aeea0b4f2f117e80032f384ab782fb85ad0ce Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 10 Apr 2019 22:07:54 +0200 Subject: [PATCH 101/254] unused: fix handling of go:linkname Our implementation had two flaws, one of them fundamental: 1. we only respected go:linkname for functions, not variables. 2. the directive can appear anywhere in the code, it doesn't have to be attached to the object it affects. --- unused/testdata/src/linkname/linkname.go | 23 +++++++++++++++- unused/unused.go | 34 +++++++++++++++++++++--- 2 files changed, 53 insertions(+), 4 deletions(-) diff --git a/unused/testdata/src/linkname/linkname.go b/unused/testdata/src/linkname/linkname.go index b5268faf5..1c43c35da 100644 --- a/unused/testdata/src/linkname/linkname.go +++ b/unused/testdata/src/linkname/linkname.go @@ -2,5 +2,26 @@ package pkg import _ "unsafe" -//go:linkname foo bar +//other:directive +//go:linkname ol other4 + +//go:linkname foo other1 func foo() {} + +//go:linkname bar other2 +var bar int + +var ( + baz int // MATCH "baz is unused" + //go:linkname qux other3 + qux int +) + +//go:linkname fisk other3 +var ( + fisk int +) + +var ol int + +//go:linkname doesnotexist other5 diff --git a/unused/unused.go b/unused/unused.go index ba81720bd..3dbf29f33 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -913,6 +913,37 @@ func (g *Graph) entry(pkg *lint.Pkg) { // TODO rename Entry g.pkg = pkg.SSA + for _, f := range pkg.Syntax { + for _, cg := range f.Comments { + for _, c := range cg.List { + if strings.HasPrefix(c.Text, "//go:linkname ") { + // FIXME(dh): we're looking at all comments. The + // compiler only looks at comments in the + // left-most column. The intention probably is to + // only look at top-level comments. + + // (1.8) packages use symbols linked via go:linkname + fields := strings.Fields(c.Text) + if len(fields) == 3 { + if m, ok := pkg.SSA.Members[fields[1]]; ok { + var obj interface{} + switch m := m.(type) { + case *ssa.Global: + obj = m.Object() + case *ssa.Function: + obj = m + default: + panic(fmt.Sprintf("unhandled type: %T", m)) + } + assert(obj != nil) + g.seeAndUse(obj, nil, "go:linkname") + } + } + } + } + } + } + surroundingFunc := func(obj types.Object) *ssa.Function { scope := obj.Parent() for scope != nil { @@ -1122,9 +1153,6 @@ func (g *Graph) entry(pkg *lint.Pkg) { if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { // (1.6) packages use functions exported to cgo g.use(m, nil, "cgo exported") - } else if strings.HasPrefix(cmt.Text, "//go:linkname ") { - // (1.8) packages use symbols linked via go:linkname - g.use(m, nil, "go:linkname") } } } From 0e5f7e4d37794123e77f619a0d455574585124f0 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 11 Apr 2019 00:08:42 +0200 Subject: [PATCH 102/254] Add flag for printing documentation about checks Updates gh-436 --- lint/lint.go | 1 + lint/lintutil/util.go | 27 ++ simple/doc.go | 426 +++++++++++++++++++++++++ simple/lint.go | 58 ++-- staticcheck/doc.go | 720 ++++++++++++++++++++++++++++++++++++++++++ staticcheck/lint.go | 150 ++++----- stylecheck/doc.go | 170 ++++++++++ stylecheck/lint.go | 36 +-- 8 files changed, 1466 insertions(+), 122 deletions(-) create mode 100644 simple/doc.go create mode 100644 staticcheck/doc.go create mode 100644 stylecheck/doc.go diff --git a/lint/lint.go b/lint/lint.go index 038048899..2de8037b2 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -165,6 +165,7 @@ type Check struct { Fn Func ID string FilterGenerated bool + Doc string } // A Linter lints Go source code. diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 1142aa04d..26bbe6bf5 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -109,6 +109,7 @@ func FlagSet(name string) *flag.FlagSet { flags.Bool("version", false, "Print version and exit") flags.Bool("show-ignored", false, "Don't filter ignored problems") flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')") + flags.String("explain", "", "Print description of `check`") flags.Int("debug.max-concurrent-jobs", 0, "Number of jobs to run concurrently") flags.Bool("debug.print-stats", false, "Print debug statistics") @@ -131,6 +132,17 @@ func FlagSet(name string) *flag.FlagSet { return flags } +func findCheck(cs []lint.Checker, check string) (lint.Check, bool) { + for _, c := range cs { + for _, cc := range c.Checks() { + if cc.ID == check { + return cc, true + } + } + } + return lint.Check{}, false +} + func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) @@ -139,6 +151,7 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { formatter := fs.Lookup("f").Value.(flag.Getter).Get().(string) printVersion := fs.Lookup("version").Value.(flag.Getter).Get().(bool) showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) + explain := fs.Lookup("explain").Value.(flag.Getter).Get().(string) maxConcurrentJobs := fs.Lookup("debug.max-concurrent-jobs").Value.(flag.Getter).Get().(int) printStats := fs.Lookup("debug.print-stats").Value.(flag.Getter).Get().(bool) @@ -175,6 +188,20 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { exit(0) } + if explain != "" { + check, ok := findCheck(cs, explain) + if !ok { + fmt.Fprintln(os.Stderr, "Couldn't find check", explain) + exit(1) + } + if check.Doc == "" { + fmt.Fprintln(os.Stderr, explain, "has no documentation") + exit(1) + } + fmt.Println(check.Doc) + exit(0) + } + ps, err := Lint(cs, fs.Args(), &Options{ Tags: strings.Fields(tags), LintTests: tests, diff --git a/simple/doc.go b/simple/doc.go new file mode 100644 index 000000000..ea437da56 --- /dev/null +++ b/simple/doc.go @@ -0,0 +1,426 @@ +package simple + +var docS1000 = `Use plain channel send or receive + +Select statements with a single case can be replaced with a simple send or receive. + +Before: + +select { +case x := <-ch: + fmt.Println(x) +} + +After: + +x := <-ch +fmt.Println(x) + +Available since + 2017.1 +` + +var docS1001 = `Replace with copy() + +Use copy() for copying elements from one slice to another. + +Before: + +for i, x := range src { + dst[i] = x +} + +After: + +copy(dst, src) + +Available since + 2017.1 +` + +var docS1002 = `Omit comparison with boolean constant + +Before: + +if x == true {} + +After: + +if x {} + +Available since + 2017.1 +` + +var docS1003 = `Replace with strings.Contains + +Before: + +if strings.Index(x, y) != -1 {} + +After: + +if strings.Contains(x, y) {} + +Available since + 2017.1 +` + +var docS1004 = `Replace with bytes.Equal + +Before: + +if bytes.Compare(x, y) == 0 {} + +After: + +if bytes.Equal(x, y) {} + +Available since + 2017.1 +` + +var docS1005 = `Drop unnecessary use of the blank identifier + +In many cases, assigning to the blank identifier is unnecessary. + +Before: + +for _ = range s {} +x, _ = someMap[key] +_ = <-ch + +After: + +for range s{} +x = someMap[key] +<-ch + +Available since + 2017.1 +` + +var docS1006 = `Replace with for { ... } + +For infinite loops, using for { ... } is the most idiomatic choice. + +Available since + 2017.1 +` + +var docS1007 = `Simplify regular expression by using raw string literal + +Raw string literals use ` + "`" + ` instead of " and do not support any escape sequences. This means that the backslash (\) can be used freely, without the need of escaping. + +Since regular expressions have their own escape sequences, raw strings can improve their readability. + +Before: + +regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z") + +After: + +regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `) + +Available since + 2017.1 +` + +var docS1008 = `Simplify returning boolean expression + +Before: + +if { + return true +} +return false + +After: + +return + +Available since + 2017.1 +` + +var docS1009 = `Omit redundant nil check on slices + +The len function is defined for all slices, even nil ones, which have a length of zero. It is not necessary to check if a slice is not nil before checking that its length is not zero. + +Before: + +if x != nil && len(x) != 0 {} + +After: + +if len(x) != 0 {} + +Available since + 2017.1 +` + +var docS1010 = `Omit default slice index + +When slicing, the second index defaults to the length of the value, making s[n:len(s)] and s[n:] equivalent. + +Available since + 2017.1 +` + +var docS1011 = `Use a single append to concatenate two slices + +Before: + +for _, e := range y { + x = append(x, e) +} + +After: + +x = append(x, y...) + +Available since + 2017.1 +` + +var docS1012 = `Replace with time.Since(x) + +The time.Since helper has the same effect as using time.Now().Sub(x) but is easier to read. + +Before: + +time.Now().Sub(x) + +After: + +time.Since(x) + +Available since + 2017.1 +` + +var docS1016 = `Use a type conversion + +Two struct types with identical fields can be converted between each other. In older versions of Go, the fields had to have identical struct tags. Since Go 1.8, however, struct tags are ignored during conversions. It is thus not necessary to manually copy every field individually. + +Before: + +var x T1 +y := T2{ + Field1: x.Field1, + Field2: x.Field2, +} + +After: + +var x T1 +y := T2(x) + +Available since + 2017.1 +` + +var docS1017 = `Replace with strings.TrimPrefix + +Instead of using strings.HasPrefix and manual slicing, use the strings.TrimPrefix function. If the string doesn't start with the prefix, the original string will be returned. Using strings.TrimPrefix reduces complexity, and avoids common bugs, such as off-by-one mistakes. + +Before: + +if strings.HasPrefix(str, prefix) { + str = str[len(prefix):] +} + +After: + +str = strings.TrimPrefix(str, prefix) + +Available since + 2017.1 +` + +var docS1018 = `Replace with copy() + +copy() permits using the same source and destination slice, even with overlapping ranges. This makes it ideal for sliding elements in a slice. + +Before: + +for i := 0; i < n; i++ { + bs[i] = bs[offset+i] +} + +After: + +copy(bs[:n], bs[offset:]) + +Available since + 2017.1 +` + +var docS1019 = `Simplify make call + +The make function has default values for the length and capacity arguments. For channels and maps, the length defaults to zero. Additionally, for slices the capacity defaults to the length. + +Available since + 2017.1 +` + +var docS1020 = `Omit redundant nil check in type assertion + +Before: + +if _, ok := i.(T); ok && i != nil {} + +After: + +if _, ok := i.(T); ok {} + +Available since + 2017.1 +` + +var docS1021 = `Merge variable declaration and assignment + +Before: + +var x uint +x = 1 + +After: + +var x uint = 1 + +Available since + 2017.1 +` +var docS1023 = `Omit redundant control flow + +Functions that have no return value do not need a return statement as the final statement of the function. + +Switches in Go do not have automatic fallthrough, unlike languages like C. It is not necessary to have a break statement as the final statement in a case block. + +Available since + 2017.1 +` + +var docS1024 = `Replace with time.Until(x) + +The time.Until helper has the same effect as using x.Sub(time.Now()) but is easier to read. + +Before: + +x.Sub(time.Now()) + +After: + +time.Until(x) + +Available since + 2017.1 +` + +var docS1025 = `Don't use fmt.Sprintf("%s", x) unnecessarily + +In many instances, there are easier and more efficient ways of getting a value's string representation. Whenever a value's underlying type is a string already, or the type has a String method, they should be used directly. + +Given the following shared definitions + +type T1 string +type T2 int + +func (T2) String() string { return "Hello, world" } + +var x string +var y T1 +var z T2 + +we can simplify the following + +fmt.Sprintf("%s", x) +fmt.Sprintf("%s", y) +fmt.Sprintf("%s", z) + +to + +x +string(y) +z.String() + +Available since + 2017.1 +` + +var docS1028 = `replace with fmt.Errorf + +Before: + +errors.New(fmt.Sprintf(...)) + +After: + +fmt.Errorf(...) + +Available since + 2017.1 +` + +var docS1029 = `Range over the string + +Ranging over a string will yield byte offsets and runes. If the offset isn't used, this is functionally equivalent to converting the string to a slice of runes and ranging over that. Ranging directly over the string will be more performant, however, as it avoids allocating a new slice, the size of which depends on the length of the string. + +Before: + +for _, r := range []rune(s) {} + +After: + +for _, r := range s {} + +Available since + 2017.1 +` + +var docS1030 = `Use bytes.Buffer.String or bytes.Buffer.Bytes + +bytes.Buffer has both a String and a Bytes method. It is never necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply use the other method. + +Available since + 2017.1 +` + +var docS1031 = `Omit redundant nil check around loop + +You can use range on nil slices and maps, the loop will simply never execute. This makes an additional nil check around the loop unnecessary. + +Before: + +if s != nil { + for _, x := range s { + ... + } +} + +After: + +for _, x := range s { + ... +} + +Available since + 2017.1 +` + +var docS1032 = `Replace with sort.Ints(x), sort.Float64s(x), sort.Strings(x) + +The sort.Ints, sort.Float64s and sort.Strings functions are easier to read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) and sort.Sort(sort.StringSlice(x)). + +Before: + +sort.Sort(sort.StringSlice(x)) + +After: + +sort.Strings(x) + +Available since + 2019.1 +` diff --git a/simple/lint.go b/simple/lint.go index 6376a8c12..37103f5fc 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -36,35 +36,35 @@ func (c *Checker) Init(prog *lint.Program) {} func (c *Checker) Checks() []lint.Check { return []lint.Check{ - {ID: "S1000", FilterGenerated: true, Fn: c.LintSingleCaseSelect}, - {ID: "S1001", FilterGenerated: true, Fn: c.LintLoopCopy}, - {ID: "S1002", FilterGenerated: true, Fn: c.LintIfBoolCmp}, - {ID: "S1003", FilterGenerated: true, Fn: c.LintStringsContains}, - {ID: "S1004", FilterGenerated: true, Fn: c.LintBytesCompare}, - {ID: "S1005", FilterGenerated: true, Fn: c.LintUnnecessaryBlank}, - {ID: "S1006", FilterGenerated: true, Fn: c.LintForTrue}, - {ID: "S1007", FilterGenerated: true, Fn: c.LintRegexpRaw}, - {ID: "S1008", FilterGenerated: true, Fn: c.LintIfReturn}, - {ID: "S1009", FilterGenerated: true, Fn: c.LintRedundantNilCheckWithLen}, - {ID: "S1010", FilterGenerated: true, Fn: c.LintSlicing}, - {ID: "S1011", FilterGenerated: true, Fn: c.LintLoopAppend}, - {ID: "S1012", FilterGenerated: true, Fn: c.LintTimeSince}, - {ID: "S1016", FilterGenerated: true, Fn: c.LintSimplerStructConversion}, - {ID: "S1017", FilterGenerated: true, Fn: c.LintTrim}, - {ID: "S1018", FilterGenerated: true, Fn: c.LintLoopSlide}, - {ID: "S1019", FilterGenerated: true, Fn: c.LintMakeLenCap}, - {ID: "S1020", FilterGenerated: true, Fn: c.LintAssertNotNil}, - {ID: "S1021", FilterGenerated: true, Fn: c.LintDeclareAssign}, - {ID: "S1023", FilterGenerated: true, Fn: c.LintRedundantBreak}, - {ID: "S1024", FilterGenerated: true, Fn: c.LintTimeUntil}, - {ID: "S1025", FilterGenerated: true, Fn: c.LintRedundantSprintf}, - {ID: "S1028", FilterGenerated: true, Fn: c.LintErrorsNewSprintf}, - {ID: "S1029", FilterGenerated: false, Fn: c.LintRangeStringRunes}, - {ID: "S1030", FilterGenerated: true, Fn: c.LintBytesBufferConversions}, - {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange}, - {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers}, - {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete}, - {ID: "S1034", FilterGenerated: true, Fn: c.LintSimplifyTypeSwitch}, + {ID: "S1000", FilterGenerated: true, Fn: c.LintSingleCaseSelect, Doc: docS1000}, + {ID: "S1001", FilterGenerated: true, Fn: c.LintLoopCopy, Doc: docS1001}, + {ID: "S1002", FilterGenerated: true, Fn: c.LintIfBoolCmp, Doc: docS1002}, + {ID: "S1003", FilterGenerated: true, Fn: c.LintStringsContains, Doc: docS1003}, + {ID: "S1004", FilterGenerated: true, Fn: c.LintBytesCompare, Doc: docS1004}, + {ID: "S1005", FilterGenerated: true, Fn: c.LintUnnecessaryBlank, Doc: docS1005}, + {ID: "S1006", FilterGenerated: true, Fn: c.LintForTrue, Doc: docS1006}, + {ID: "S1007", FilterGenerated: true, Fn: c.LintRegexpRaw, Doc: docS1007}, + {ID: "S1008", FilterGenerated: true, Fn: c.LintIfReturn, Doc: docS1008}, + {ID: "S1009", FilterGenerated: true, Fn: c.LintRedundantNilCheckWithLen, Doc: docS1009}, + {ID: "S1010", FilterGenerated: true, Fn: c.LintSlicing, Doc: docS1010}, + {ID: "S1011", FilterGenerated: true, Fn: c.LintLoopAppend, Doc: docS1011}, + {ID: "S1012", FilterGenerated: true, Fn: c.LintTimeSince, Doc: docS1012}, + {ID: "S1016", FilterGenerated: true, Fn: c.LintSimplerStructConversion, Doc: docS1016}, + {ID: "S1017", FilterGenerated: true, Fn: c.LintTrim, Doc: docS1017}, + {ID: "S1018", FilterGenerated: true, Fn: c.LintLoopSlide, Doc: docS1018}, + {ID: "S1019", FilterGenerated: true, Fn: c.LintMakeLenCap, Doc: docS1019}, + {ID: "S1020", FilterGenerated: true, Fn: c.LintAssertNotNil, Doc: docS1020}, + {ID: "S1021", FilterGenerated: true, Fn: c.LintDeclareAssign, Doc: docS1021}, + {ID: "S1023", FilterGenerated: true, Fn: c.LintRedundantBreak, Doc: docS1023}, + {ID: "S1024", FilterGenerated: true, Fn: c.LintTimeUntil, Doc: docS1024}, + {ID: "S1025", FilterGenerated: true, Fn: c.LintRedundantSprintf, Doc: docS1025}, + {ID: "S1028", FilterGenerated: true, Fn: c.LintErrorsNewSprintf, Doc: docS1028}, + {ID: "S1029", FilterGenerated: false, Fn: c.LintRangeStringRunes, Doc: docS1029}, + {ID: "S1030", FilterGenerated: true, Fn: c.LintBytesBufferConversions, Doc: docS1030}, + {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange, Doc: docS1031}, + {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers, Doc: docS1032}, + {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete, Doc: ``}, + {ID: "S1034", FilterGenerated: true, Fn: c.LintSimplifyTypeSwitch, Doc: ``}, } } diff --git a/staticcheck/doc.go b/staticcheck/doc.go new file mode 100644 index 000000000..76f980c69 --- /dev/null +++ b/staticcheck/doc.go @@ -0,0 +1,720 @@ +package staticcheck + +var docSA1000 = `Invalid regular expression + +Available since + 2017.1 +` + +var docSA1001 = `Invalid template + +Available since + 2017.1 +` + +var docSA1002 = `Invalid format in time.Parse + +Available since + 2017.1 +` + +var docSA1003 = `Unsupported argument to functions in encoding/binary + +Available since + 2017.1 +` + +var docSA1004 = `Suspiciously small untyped constant in time.Sleep + +Available since + 2017.1 +` + +var docSA1005 = `Invalid first argument to exec.Command + +os/exec runs programs directly (using variants of the fork and exec +system calls on Unix systems). This shouldn't be confused with running +a command in a shell. The shell will allow for features such as input +redirection, pipes, and general scripting. The shell is also +responsible for splitting the user's input into a program name and its +arguments. For example, the equivalent to + + ls / /tmp + +would be + + exec.Command("ls", "/", "/tmp") + +If you want to run a command in a shell, consider using something like +the following – but be aware that not all systems, particularly +Windows, will have a /bin/sh program: + + exec.Command("/bin/sh", "-c", "ls | grep Awesome") + +Available since + 2017.1 +` + +var docSA1006 = `Printf with dynamic first argument and no further arguments + +Using fmt.Printf with a dynamic first argument can lead to unexpected +output. The first argument is a format string, where certain character +combinations have special meaning. If, for example, a user were to +enter a string such as + + Interest rate: 5% + +and you printed it with + +fmt.Printf(s) + +it would lead to the following output: + + Interest rate: 5%!(NOVERB). + +Similarly, forming the first parameter via string concatenation with +user input should be avoided for the same reason. When printing user +input, either use a variant of fmt.Print, or use the %s Printf verb +and pass the string as an argument. + +Available since + 2017.1 +` + +var docSA1007 = `Invalid URL in net/url.Parse + +Available since + 2017.1 +` + +var docSA1008 = `Non-canonical key in http.Header map + +Available since + 2017.1 +` + +var docSA1010 = `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results + +Available since + 2017.1 +` + +var docSA1011 = `Various methods in the strings package expect valid UTF-8, but invalid input is provided + +Available since + 2017.1 +` + +var docSA1012 = `A nil context.Context is being passed to a function, consider using context.TODO instead + +Available since + 2017.1 +` + +var docSA1013 = `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second + +Available since + 2017.1 +` + +var docSA1014 = `Non-pointer value passed to Unmarshal or Decode + +Available since + 2017.1 +` + +var docSA1015 = `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions + +Available since + 2017.1 +` + +var docSA1016 = `Trapping a signal that cannot be trapped + +Available since + 2017.1 +` + +var docSA1017 = `Channels used with signal.Notify should be buffered + +Available since + 2017.1 +` + +var docSA1018 = `strings.Replace called with n == 0, which does nothing + +Available since + 2017.1 +` + +var docSA1019 = `Using a deprecated function, variable, constant or field + +Available since + 2017.1 +` + +var docSA1020 = `Using an invalid host:port pair with a net.Listen-related function + +Available since + 2017.1 +` + +var docSA1021 = `Using bytes.Equal to compare two net.IP + +A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The +length of the slice for an IPv4 address, however, can be either 4 or +16 bytes long, using different ways of representing IPv4 addresses. In +order to correctly compare two net.IPs, the net.IP.Equal method should +be used, as it takes both representations into account. + +Available since + 2017.1 +` + +var docSA1023 = `Modifying the buffer in an io.Writer implementation + +Available since + 2017.1 +` + +var docSA1024 = `A string cutset contains duplicate characters, suggesting TrimPrefix or TrimSuffix should be used instead of TrimLeft or TrimRight + +Available since + 2017.1 +` + +var docSA1025 = `it is not possible to use Reset's return value correctly + +Available since + 2019.1 +` + +var docSA2000 = `sync.WaitGroup.Add called inside the goroutine, leading to a race condition + +Available since + 2017.1 +` + +var docSA2001 = `Empty critical section, did you mean to defer the unlock? + +Available since + 2017.1 +` + +var docSA2002 = `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed + +Available since + 2017.1 +` + +var docSA2003 = `Deferred Lock right after locking, likely meant to defer Unlock instead + +Available since + 2017.1 +` + +var docSA3000 = `TestMain doesn't call os.Exit, hiding test failures + +Available since + 2017.1 +` + +var docSA3001 = `Assigning to b.N in benchmarks distorts the results + +Available since + 2017.1 +` + +var docSA4000 = `Boolean expression has identical expressions on both sides + +Available since + 2017.1 +` + +var docSA4001 = `&*x gets simplified to x, it does not copy x + +Available since + 2017.1 +` + +var docSA4002 = `Comparing strings with known different sizes has predictable results + +Available since + 2017.1 +` + +var docSA4003 = `Comparing unsigned values against negative values is pointless + +Available since + 2017.1 +` + +var docSA4004 = `The loop exits unconditionally after one iteration + +Available since + 2017.1 +` + +var docSA4005 = `Field assignment that will never be observed. Did you mean to use a pointer receiver? + +Available since + 2017.1 +` + +var docSA4006 = `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code? + +Available since + 2017.1 +` + +var docSA4008 = `The variable in the loop condition never changes, are you incrementing the wrong variable? + +Available since + 2017.1 +` + +var docSA4009 = `A function argument is overwritten before its first use + +Available since + 2017.1 +` + +var docSA4010 = `The result of append will never be observed anywhere + +Available since + 2017.1 +` + +var docSA4011 = `Break statement with no effect. Did you mean to break out of an outer loop? + +Available since + 2017.1 +` + +var docSA4012 = `Comparing a value against NaN even though no value is equal to NaN + +Available since + 2017.1 +` + +var docSA4013 = `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo. + +Available since + 2017.1 +` + +var docSA4014 = `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either + +Available since + 2017.1 +` + +var docSA4015 = `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful + +Available since + 2017.1 +` + +var docSA4016 = `Certain bitwise operations, such as x ^ 0, do not do anything useful + +Available since + 2017.1 +` + +var docSA4017 = `A pure function's return value is discarded, making the call pointless + +Available since + 2017.1 +` + +var docSA4018 = `Self-assignment of variables + +Available since + 2017.1 +` + +var docSA4019 = `Multiple, identical build constraints in the same file + +Available since + 2017.1 +` + +var docSA4020 = `Unreachable case clause in a type switch + +In a type switch like the following + + type T struct{} + func (T) Read(b []byte) (int, error) { return 0, nil } + + var v interface{} = T{} + + switch v.(type) { + case io.Reader: + // ... + case T: + // unreachable + } + +the second case clause can never be reached because T implements +io.Reader and case clauses are evaluated in source order. + +Another example: + + type T struct{} + func (T) Read(b []byte) (int, error) { return 0, nil } + func (T) Close() error { return nil } + + var v interface{} = T{} + + switch v.(type) { + case io.Reader: + // ... + case io.ReadCloser: + // unreachable + } + +Even though T has a Close method and thus implements io.ReadCloser, +io.Reader will always match first. The method set of io.Reader is a +subset of io.ReadCloser. Thus it is impossible to match the second +case without mtching the first case. + + +Structurally equivalent interfaces + +A special case of the previous example are structurally identical +interfaces. Given these declarations + + type T error + type V error + + func doSomething() error { + err, ok := doAnotherThing() + if ok { + return T(err) + } + + return U(err) + } + +the following type switch will have an unreachable case clause: + + switch doSomething().(type) { + case T: + // ... + case V: + // unreachable + } + +T will always match before V because they are structurally equivalent +and therefore doSomething()'s return value implements both. + +Available since + Unreleased +` + +var docSA4021 = `x = append(y) is equivalent to x = y + +Available since + Unreleased +` + +var docSA5000 = `Assignment to nil map + +Available since + 2017.1 +` + +var docSA5001 = `Defering Close before checking for a possible error + +Available since + 2017.1 +` + +var docSA5002 = `The empty for loop (for {}) spins and can block the scheduler + +Available since + 2017.1 +` + +var docSA5003 = `Defers in infinite loops will never execute + +Available since + 2017.1 +` + +var docSA5004 = `for { select { ... with an empty default branch spins + +Available since + 2017.1 +` + +var docSA5005 = `The finalizer references the finalized object, preventing garbage collection + +A finalizer is a function associated with an object that runs when the +garbage collector is ready to collect said object, that is when the +object is no longer referenced by anything. + +If the finalizer references the object, however, it will always remain +as the final reference to that object, preventing the garbage +collector from collecting the object. The finalizer will never run, +and the object will never be collected, leading to a memory leak. That +is why the finalizer should instead use its first argument to operate +on the object. That way, the number of references can temporarily go +to zero before the object is being passed to the finalizer. + +Available since + 2017.1 +` + +var docSA5006 = `Slice index out of bounds + +Available since + 2017.1 +` + +var docSA5007 = `Infinite recursive call + +A function that calls itself recursively needs to have an exit +condition. Otherwise it will recurse forever, until the system runs +out of memory. + +This issue can be caused by simple bugs such as forgetting to add an +exit condition. It can also happen "on purpose". Some languages have +tail call optimization which makes certain infinite recursive calls +safe to use. Go, however, does not implement TCO, and as such a loop +should be used instead. + +Available since + 2017.1 +` + +var docSA6000 = `Using regexp.Match or related in a loop, should use regexp.Compile + +Available since + 2017.1 +` + +var docSA6001 = `Missing an optimization opportunity when indexing maps by byte slices + +Map keys must be comparable, which precludes the use of byte slices. +This usually leads to using string keys and converting byte slices to +strings. + +Normally, a conversion of a byte slice to a string needs to copy the data and +causes allocations. The compiler, however, recognizes m[string(b)] and +uses the data of b directly, without copying it, because it knows that +the data can't change during the map lookup. This leads to the +counter-intuitive situation that + + k := string(b) + println(m[k]) + println(m[k]) + +will be less efficient than + + println(m[string(b)]) + println(m[string(b)]) + +because the first version needs to copy and allocate, while the second +one does not. + +For some history on this optimization, check out commit +f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository. + +Available since + 2017.1 +` + +var docSA6002 = `Storing non-pointer values in sync.Pool allocates memory + +A sync.Pool is used to avoid unnecessary allocations and reduce the +amount of work the garbage collector has to do. + +When passing a value that is not a pointer to a function that accepts +an interface, the value needs to be placed on the heap, which means an +additional allocation. Slices are a common thing to put in sync.Pools, +and they're structs with 3 fields (length, capacity, and a pointer to +an array). In order to avoid the extra allocation, one should store a +pointer to the slice instead. + +See the comments on https://go-review.googlesource.com/c/go/+/24371 +that discuss this problem. + +Available since + 2017.1 +` + +var docSA6003 = `Converting a string to a slice of runes before ranging over it + +You may want to loop over the runes in a string. Instead of converting +the string to a slice of runes and looping over that, you can loop +over the string itself. That is, + + for _, r := range s {} + +and + + for _, r := range []rune(s) {} + +will yield the same values. The first version, however, will be faster +and avoid unnecessary memory allocations. + +Do note that if you are interested in the indices, ranging over a +string and over a slice of runes will yield different indices. The +first one yields byte offsets, while the second one yields indices in +the slice of runes. + +Available since + 2017.1 +` + +var docSA6005 = `Inefficient string comparison with strings.ToLower or strings.ToUpper + +Converting two strings to the same case and comparing them like so + + if strings.ToLower(s1) == strings.ToLower(s2) { + ... + } + +is significantly more expensive than comparing them with +strings.EqualFold(s1, s2). This is due to memory usage as well as +computational complexity. + +strings.ToLower will have to allocate memory for the new strings, as +well as convert both strings fully, even if they differ on the very +first byte. strings.EqualFold, on the other hand, compares the strings +one character at a time. It doesn't need to create two intermediate +strings and can return as soon as the first non-matching character has +been found. + +For a more in-depth explanation of this issue, see +https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/ + +Available since + Unreleased +` + +var docSA9001 = `defers in for range loops may not run when you expect them to + +Available since + 2017.1 +` + +var docSA9002 = `Using a non-octal os.FileMode that looks like it was meant to be in octal. + +Available since + 2017.1 +` + +var docSA9003 = `Empty body in an if or else branch + +Available since + 2017.1 +` + +var docSA9004 = `Only the first constant has an explicit type + +In a constant declaration such as the following: + + const ( + First byte = 1 + Second = 2 + ) + +the constant Second does not have the same type as the constant First. +This construct shouldn't be confused with + + const ( + First byte = iota + Second + ) + +where First and Second do indeed have the same type. The type is only +passed on when no explicit value is assigned to the constant. + +When declaring enumerations with explicit values it is therefore +important not to write + + const ( + EnumFirst EnumType = 1 + EnumSecond = 2 + EnumThird = 3 + ) + +This discrepancy in types can cause various confusing behaviors and +bugs. + + +Wrong type in variable declarations + +The most obvious issue with such incorrect enumerations expresses +itself as a compile error: + +package pkg + + const ( + EnumFirst uint8 = 1 + EnumSecond = 2 + ) + + func fn(useFirst bool) { + x := EnumSecond + if useFirst { + x = EnumFirst + } + } + +fails to compile with + + ./const.go:11:5: cannot use EnumFirst (type uint8) as type int in assignment + + +Losing method sets + +A more subtle issue occurs with types that have methods and optional +interfaces. Consider the following: + + package main + + import "fmt" + + type Enum int + + func (e Enum) String() string { + return "an enum" + } + + const ( + EnumFirst Enum = 1 + EnumSecond = 2 + ) + + func main() { + fmt.Println(EnumFirst) + fmt.Println(EnumSecond) + } + +This code will output + + an enum + 2 + +as EnumSecond has no explicit type, and thus defaults to int. + +Available since + 2019.1 +` + +var docSA9005 = `Trying to marshal a struct with no public fields nor custom marshaling + +The encoding/json and encoding/xml packages only operate on exported +fields in structs, not unexported ones. It is usually an error to try +to (un)marshal structs that only consist of unexported fields. + +This check will not flag calls involving types that define custom +marshaling behavior, e.g. via MarshalJSON methods. It will also not +flag empty structs. + +Available since + Unreleased +` diff --git a/staticcheck/lint.go b/staticcheck/lint.go index c1dfed2b6..aa5c662df 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -330,82 +330,82 @@ func (*Checker) Prefix() string { return "SA" } func (c *Checker) Checks() []lint.Check { return []lint.Check{ - {ID: "SA1000", FilterGenerated: false, Fn: c.callChecker(checkRegexpRules)}, - {ID: "SA1001", FilterGenerated: false, Fn: c.CheckTemplate}, - {ID: "SA1002", FilterGenerated: false, Fn: c.callChecker(checkTimeParseRules)}, - {ID: "SA1003", FilterGenerated: false, Fn: c.callChecker(checkEncodingBinaryRules)}, - {ID: "SA1004", FilterGenerated: false, Fn: c.CheckTimeSleepConstant}, - {ID: "SA1005", FilterGenerated: false, Fn: c.CheckExec}, - {ID: "SA1006", FilterGenerated: false, Fn: c.CheckUnsafePrintf}, - {ID: "SA1007", FilterGenerated: false, Fn: c.callChecker(checkURLsRules)}, - {ID: "SA1008", FilterGenerated: false, Fn: c.CheckCanonicalHeaderKey}, - {ID: "SA1010", FilterGenerated: false, Fn: c.callChecker(checkRegexpFindAllRules)}, - {ID: "SA1011", FilterGenerated: false, Fn: c.callChecker(checkUTF8CutsetRules)}, - {ID: "SA1012", FilterGenerated: false, Fn: c.CheckNilContext}, - {ID: "SA1013", FilterGenerated: false, Fn: c.CheckSeeker}, - {ID: "SA1014", FilterGenerated: false, Fn: c.callChecker(checkUnmarshalPointerRules)}, - {ID: "SA1015", FilterGenerated: false, Fn: c.CheckLeakyTimeTick}, - {ID: "SA1016", FilterGenerated: false, Fn: c.CheckUntrappableSignal}, - {ID: "SA1017", FilterGenerated: false, Fn: c.callChecker(checkUnbufferedSignalChanRules)}, - {ID: "SA1018", FilterGenerated: false, Fn: c.callChecker(checkStringsReplaceZeroRules)}, - {ID: "SA1019", FilterGenerated: false, Fn: c.CheckDeprecated}, - {ID: "SA1020", FilterGenerated: false, Fn: c.callChecker(checkListenAddressRules)}, - {ID: "SA1021", FilterGenerated: false, Fn: c.callChecker(checkBytesEqualIPRules)}, - {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified}, - {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules)}, - {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue}, - {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal)}, - - {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd}, - {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection}, - {ID: "SA2002", FilterGenerated: false, Fn: c.CheckConcurrentTesting}, - {ID: "SA2003", FilterGenerated: false, Fn: c.CheckDeferLock}, - - {ID: "SA3000", FilterGenerated: false, Fn: c.CheckTestMainExit}, - {ID: "SA3001", FilterGenerated: false, Fn: c.CheckBenchmarkN}, - - {ID: "SA4000", FilterGenerated: false, Fn: c.CheckLhsRhsIdentical}, - {ID: "SA4001", FilterGenerated: false, Fn: c.CheckIneffectiveCopy}, - {ID: "SA4002", FilterGenerated: false, Fn: c.CheckDiffSizeComparison}, - {ID: "SA4003", FilterGenerated: false, Fn: c.CheckExtremeComparison}, - {ID: "SA4004", FilterGenerated: false, Fn: c.CheckIneffectiveLoop}, - {ID: "SA4006", FilterGenerated: false, Fn: c.CheckUnreadVariableValues}, - {ID: "SA4008", FilterGenerated: false, Fn: c.CheckLoopCondition}, - {ID: "SA4009", FilterGenerated: false, Fn: c.CheckArgOverwritten}, - {ID: "SA4010", FilterGenerated: false, Fn: c.CheckIneffectiveAppend}, - {ID: "SA4011", FilterGenerated: false, Fn: c.CheckScopedBreak}, - {ID: "SA4012", FilterGenerated: false, Fn: c.CheckNaNComparison}, - {ID: "SA4013", FilterGenerated: false, Fn: c.CheckDoubleNegation}, - {ID: "SA4014", FilterGenerated: false, Fn: c.CheckRepeatedIfElse}, - {ID: "SA4015", FilterGenerated: false, Fn: c.callChecker(checkMathIntRules)}, - {ID: "SA4016", FilterGenerated: false, Fn: c.CheckSillyBitwiseOps}, - {ID: "SA4017", FilterGenerated: false, Fn: c.CheckPureFunctions}, - {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment}, - {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints}, - {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases}, - {ID: "SA4021", FilterGenerated: true, Fn: c.CheckSingleArgAppend}, - - {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps}, - {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer}, - {ID: "SA5002", FilterGenerated: false, Fn: c.CheckInfiniteEmptyLoop}, - {ID: "SA5003", FilterGenerated: false, Fn: c.CheckDeferInInfiniteLoop}, - {ID: "SA5004", FilterGenerated: false, Fn: c.CheckLoopEmptyDefault}, - {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer}, - {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion}, - - {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules)}, - {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey}, - {ID: "SA6002", FilterGenerated: false, Fn: c.callChecker(checkSyncPoolValueRules)}, - {ID: "SA6003", FilterGenerated: false, Fn: c.CheckRangeStringRunes}, - // {ID: "SA6004", FilterGenerated: false, Fn: c.CheckSillyRegexp}, - {ID: "SA6005", FilterGenerated: false, Fn: c.CheckToLowerToUpperComparison}, - - {ID: "SA9001", FilterGenerated: false, Fn: c.CheckDubiousDeferInChannelRangeLoop}, - {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode}, - {ID: "SA9003", FilterGenerated: false, Fn: c.CheckEmptyBranch}, - {ID: "SA9004", FilterGenerated: false, Fn: c.CheckMissingEnumTypesInDeclaration}, + {ID: "SA1000", FilterGenerated: false, Fn: c.callChecker(checkRegexpRules), Doc: docSA1000}, + {ID: "SA1001", FilterGenerated: false, Fn: c.CheckTemplate, Doc: docSA1001}, + {ID: "SA1002", FilterGenerated: false, Fn: c.callChecker(checkTimeParseRules), Doc: docSA1002}, + {ID: "SA1003", FilterGenerated: false, Fn: c.callChecker(checkEncodingBinaryRules), Doc: docSA1003}, + {ID: "SA1004", FilterGenerated: false, Fn: c.CheckTimeSleepConstant, Doc: docSA1004}, + {ID: "SA1005", FilterGenerated: false, Fn: c.CheckExec, Doc: docSA1005}, + {ID: "SA1006", FilterGenerated: false, Fn: c.CheckUnsafePrintf, Doc: docSA1006}, + {ID: "SA1007", FilterGenerated: false, Fn: c.callChecker(checkURLsRules), Doc: docSA1007}, + {ID: "SA1008", FilterGenerated: false, Fn: c.CheckCanonicalHeaderKey, Doc: docSA1008}, + {ID: "SA1010", FilterGenerated: false, Fn: c.callChecker(checkRegexpFindAllRules), Doc: docSA1010}, + {ID: "SA1011", FilterGenerated: false, Fn: c.callChecker(checkUTF8CutsetRules), Doc: docSA1011}, + {ID: "SA1012", FilterGenerated: false, Fn: c.CheckNilContext, Doc: docSA1012}, + {ID: "SA1013", FilterGenerated: false, Fn: c.CheckSeeker, Doc: docSA1013}, + {ID: "SA1014", FilterGenerated: false, Fn: c.callChecker(checkUnmarshalPointerRules), Doc: docSA1014}, + {ID: "SA1015", FilterGenerated: false, Fn: c.CheckLeakyTimeTick, Doc: docSA1015}, + {ID: "SA1016", FilterGenerated: false, Fn: c.CheckUntrappableSignal, Doc: docSA1016}, + {ID: "SA1017", FilterGenerated: false, Fn: c.callChecker(checkUnbufferedSignalChanRules), Doc: docSA1017}, + {ID: "SA1018", FilterGenerated: false, Fn: c.callChecker(checkStringsReplaceZeroRules), Doc: docSA1018}, + {ID: "SA1019", FilterGenerated: false, Fn: c.CheckDeprecated, Doc: docSA1019}, + {ID: "SA1020", FilterGenerated: false, Fn: c.callChecker(checkListenAddressRules), Doc: docSA1020}, + {ID: "SA1021", FilterGenerated: false, Fn: c.callChecker(checkBytesEqualIPRules), Doc: docSA1021}, + {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified, Doc: docSA1023}, + {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules), Doc: docSA1024}, + {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue, Doc: docSA1025}, + {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal), Doc: ``}, + + {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd, Doc: docSA2000}, + {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection, Doc: docSA2001}, + {ID: "SA2002", FilterGenerated: false, Fn: c.CheckConcurrentTesting, Doc: docSA2002}, + {ID: "SA2003", FilterGenerated: false, Fn: c.CheckDeferLock, Doc: docSA2003}, + + {ID: "SA3000", FilterGenerated: false, Fn: c.CheckTestMainExit, Doc: docSA3000}, + {ID: "SA3001", FilterGenerated: false, Fn: c.CheckBenchmarkN, Doc: docSA3001}, + + {ID: "SA4000", FilterGenerated: false, Fn: c.CheckLhsRhsIdentical, Doc: docSA4000}, + {ID: "SA4001", FilterGenerated: false, Fn: c.CheckIneffectiveCopy, Doc: docSA4001}, + {ID: "SA4002", FilterGenerated: false, Fn: c.CheckDiffSizeComparison, Doc: docSA4002}, + {ID: "SA4003", FilterGenerated: false, Fn: c.CheckExtremeComparison, Doc: docSA4003}, + {ID: "SA4004", FilterGenerated: false, Fn: c.CheckIneffectiveLoop, Doc: docSA4004}, + {ID: "SA4006", FilterGenerated: false, Fn: c.CheckUnreadVariableValues, Doc: docSA4006}, + {ID: "SA4008", FilterGenerated: false, Fn: c.CheckLoopCondition, Doc: docSA4008}, + {ID: "SA4009", FilterGenerated: false, Fn: c.CheckArgOverwritten, Doc: docSA4009}, + {ID: "SA4010", FilterGenerated: false, Fn: c.CheckIneffectiveAppend, Doc: docSA4010}, + {ID: "SA4011", FilterGenerated: false, Fn: c.CheckScopedBreak, Doc: docSA4011}, + {ID: "SA4012", FilterGenerated: false, Fn: c.CheckNaNComparison, Doc: docSA4012}, + {ID: "SA4013", FilterGenerated: false, Fn: c.CheckDoubleNegation, Doc: docSA4013}, + {ID: "SA4014", FilterGenerated: false, Fn: c.CheckRepeatedIfElse, Doc: docSA4014}, + {ID: "SA4015", FilterGenerated: false, Fn: c.callChecker(checkMathIntRules), Doc: docSA4015}, + {ID: "SA4016", FilterGenerated: false, Fn: c.CheckSillyBitwiseOps, Doc: docSA4016}, + {ID: "SA4017", FilterGenerated: false, Fn: c.CheckPureFunctions, Doc: docSA4017}, + {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment, Doc: docSA4018}, + {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints, Doc: docSA4019}, + {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases, Doc: docSA4020}, + {ID: "SA4021", FilterGenerated: true, Fn: c.CheckSingleArgAppend, Doc: docSA4021}, + + {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps, Doc: docSA5000}, + {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer, Doc: docSA5001}, + {ID: "SA5002", FilterGenerated: false, Fn: c.CheckInfiniteEmptyLoop, Doc: docSA5002}, + {ID: "SA5003", FilterGenerated: false, Fn: c.CheckDeferInInfiniteLoop, Doc: docSA5003}, + {ID: "SA5004", FilterGenerated: false, Fn: c.CheckLoopEmptyDefault, Doc: docSA5004}, + {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer, Doc: docSA5005}, + {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion, Doc: docSA5007}, + + {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules), Doc: docSA6000}, + {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey, Doc: docSA6001}, + {ID: "SA6002", FilterGenerated: false, Fn: c.callChecker(checkSyncPoolValueRules), Doc: docSA6002}, + {ID: "SA6003", FilterGenerated: false, Fn: c.CheckRangeStringRunes, Doc: docSA6003}, + // {ID: "SA6004", FilterGenerated: false, Fn: c.CheckSillyRegexp, Doc: docSA6004}, + {ID: "SA6005", FilterGenerated: false, Fn: c.CheckToLowerToUpperComparison, Doc: docSA6005}, + + {ID: "SA9001", FilterGenerated: false, Fn: c.CheckDubiousDeferInChannelRangeLoop, Doc: docSA9001}, + {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode, Doc: docSA9002}, + {ID: "SA9003", FilterGenerated: false, Fn: c.CheckEmptyBranch, Doc: docSA9003}, + {ID: "SA9004", FilterGenerated: false, Fn: c.CheckMissingEnumTypesInDeclaration, Doc: docSA9004}, // Filtering generated code because it may include empty structs generated from data models. - {ID: "SA9005", FilterGenerated: true, Fn: c.callChecker(checkNoopMarshal)}, + {ID: "SA9005", FilterGenerated: true, Fn: c.callChecker(checkNoopMarshal), Doc: docSA9005}, } // "SA5006": c.CheckSliceOutOfBounds, diff --git a/stylecheck/doc.go b/stylecheck/doc.go new file mode 100644 index 000000000..efc65092e --- /dev/null +++ b/stylecheck/doc.go @@ -0,0 +1,170 @@ +package stylecheck + +var docST1000 = `Incorrect or missing package comment + +Packages must have a package comment that is formatted according to +the guidelines laid out in +https://github.com/golang/go/wiki/CodeReviewComments#package-comments. + +Available since + 2019.1, non-default +` + +var docST1001 = `Dot imports are discouraged + +Dot imports that aren't in external test packages are discouraged. + +The dot_import_whitelist option can be used to whitelist certain +imports. + +Quoting Go Code Review Comments: + + The import . form can be useful in tests that, due to circular + dependencies, cannot be made part of the package being tested: + + package foo_test + + import ( + "bar/testutil" // also imports "foo" + . "foo" + ) + + In this case, the test file cannot be in package foo because it + uses bar/testutil, which imports foo. So we use the 'import .' + form to let the file pretend to be part of package foo even though + it is not. Except for this one case, do not use import . in your + programs. It makes the programs much harder to read because it is + unclear whether a name like Quux is a top-level identifier in the + current package or in an imported package. + +Available since + 2019.1 + +Options + dot_import_whitelist +` + +var docST1003 = `Poorly chosen identifier + +Identifiers, such as variable and package names, follow certain rules. + +See the following links for details: + + http://golang.org/doc/effective_go.html#package-names + http://golang.org/doc/effective_go.html#mixed-caps + https://github.com/golang/go/wiki/CodeReviewComments#initialisms + https://github.com/golang/go/wiki/CodeReviewComments#variable-names + +Available since + 2019.1, non-default + +Options + initialisms +` + +var docST1005 = `Incorrectly formatted error string + +Error strings follow a set of guidelines to ensure uniformity and good +composability. + +Quoting Go Code Review Comments: + + Error strings should not be capitalized (unless beginning with + proper nouns or acronyms) or end with punctuation, since they are + usually printed following other context. That is, use + fmt.Errorf("something bad") not fmt.Errorf("Something bad"), so + that log.Printf("Reading %s: %v", filename, err) formats without a + spurious capital letter mid-message. + +Available since + 2019.1 +` + +var docST1006 = `Poorly chosen receiver name + +Quoting Go Code Review Comments: + + The name of a method's receiver should be a reflection of its + identity; often a one or two letter abbreviation of its type + suffices (such as "c" or "cl" for "Client"). Don't use generic + names such as "me", "this" or "self", identifiers typical of + object-oriented languages that place more emphasis on methods as + opposed to functions. The name need not be as descriptive as that + of a method argument, as its role is obvious and serves no + documentary purpose. It can be very short as it will appear on + almost every line of every method of the type; familiarity admits + brevity. Be consistent, too: if you call the receiver "c" in one + method, don't call it "cl" in another. + +Available since + 2019.1 +` + +var docST1008 = `A function's error value should be its last return value + +A function's error value should be its last return value. + +Available since + 2019.1 +` + +var docST1011 = `Poorly chosen name for variable of type time.Duration + +time.Duration values represent an amount of time, which is represented +as a count of nanoseconds. An expression like 5 * time.Microsecond +yields the value 5000. It is therefore not appropriate to suffix a +variable of type time.Duration with any time unit, such as Msec or +Milli. + +Available since + 2019.1 +` + +var docST1012 = `Poorly chosen name for error variable + +Error variables that are part of an API should be called errFoo or +ErrFoo. + +Available since + 2019.1 +` + +var docST1013 = `Should use constants for HTTP error codes, not magic numbers + +HTTP has a tremendous number of status codes. While some of those are +well known (200, 400, 404, 500), most of them are not. The net/http +package provides constants for all status codes that are part of the +various specifications. It is recommended to use these constants +instead of hard-coding magic numbers, to vastly improve the +readability of your code. + +Available since + 2019.1 + +Options + http_status_code_whitelist +` + +var docST1015 = `A switch's default case should be the first or last case + +Available since + 2019.1 +` + +var docST1016 = `Use consistent method receiver names + +Available since + 2019.1, non-default +` + +var docST1017 = `Don't use Yoda conditions + +Available since + Unreleased +` + +var docST1018 = `Avoid zero-width and control characters in string literals + +Available since + Unreleased +` diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 107a276d2..69bebaf7e 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -32,24 +32,24 @@ func (c *Checker) Init(prog *lint.Program) {} func (c *Checker) Checks() []lint.Check { return []lint.Check{ - {ID: "ST1000", FilterGenerated: false, Fn: c.CheckPackageComment}, - {ID: "ST1001", FilterGenerated: true, Fn: c.CheckDotImports}, - // {ID: "ST1002", FilterGenerated: true, Fn: c.CheckBlankImports}, - {ID: "ST1003", FilterGenerated: true, Fn: c.CheckNames}, - // {ID: "ST1004", FilterGenerated: false, Fn: nil, }, - {ID: "ST1005", FilterGenerated: false, Fn: c.CheckErrorStrings}, - {ID: "ST1006", FilterGenerated: false, Fn: c.CheckReceiverNames}, - // {ID: "ST1007", FilterGenerated: true, Fn: c.CheckIncDec}, - {ID: "ST1008", FilterGenerated: false, Fn: c.CheckErrorReturn}, - // {ID: "ST1009", FilterGenerated: false, Fn: c.CheckUnexportedReturn}, - // {ID: "ST1010", FilterGenerated: false, Fn: c.CheckContextFirstArg}, - {ID: "ST1011", FilterGenerated: false, Fn: c.CheckTimeNames}, - {ID: "ST1012", FilterGenerated: false, Fn: c.CheckErrorVarNames}, - {ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes}, - {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder}, - {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical}, - {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions}, - {ID: "ST1018", FilterGenerated: false, Fn: c.CheckInvisibleCharacters}, + {ID: "ST1000", FilterGenerated: false, Fn: c.CheckPackageComment, Doc: docST1000}, + {ID: "ST1001", FilterGenerated: true, Fn: c.CheckDotImports, Doc: docST1001}, + // {ID: "ST1002", FilterGenerated: true, Fn: c.CheckBlankImports, Doc: docST1002}, + {ID: "ST1003", FilterGenerated: true, Fn: c.CheckNames, Doc: docST1003}, + // {ID: "ST1004", FilterGenerated: false, Fn: nil, , Doc: docST1004}, + {ID: "ST1005", FilterGenerated: false, Fn: c.CheckErrorStrings, Doc: docST1005}, + {ID: "ST1006", FilterGenerated: false, Fn: c.CheckReceiverNames, Doc: docST1006}, + // {ID: "ST1007", FilterGenerated: true, Fn: c.CheckIncDec, Doc: docST1007}, + {ID: "ST1008", FilterGenerated: false, Fn: c.CheckErrorReturn, Doc: docST1008}, + // {ID: "ST1009", FilterGenerated: false, Fn: c.CheckUnexportedReturn, Doc: docST1009}, + // {ID: "ST1010", FilterGenerated: false, Fn: c.CheckContextFirstArg, Doc: docST1010}, + {ID: "ST1011", FilterGenerated: false, Fn: c.CheckTimeNames, Doc: docST1011}, + {ID: "ST1012", FilterGenerated: false, Fn: c.CheckErrorVarNames, Doc: docST1012}, + {ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes, Doc: docST1013}, + {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder, Doc: docST1015}, + {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical, Doc: docST1016}, + {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions, Doc: docST1017}, + {ID: "ST1018", FilterGenerated: false, Fn: c.CheckInvisibleCharacters, Doc: docST1018}, } } From 747de16c19d0445d584b301957201f6060367166 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 11 Apr 2019 11:46:29 +0200 Subject: [PATCH 103/254] unused: track embedded interfaces that don't contribute any methods of their own --- unused/testdata/src/embedding2/embedding2.go | 28 ++++++++++++++++++++ unused/unused.go | 10 +++++++ 2 files changed, 38 insertions(+) create mode 100644 unused/testdata/src/embedding2/embedding2.go diff --git a/unused/testdata/src/embedding2/embedding2.go b/unused/testdata/src/embedding2/embedding2.go new file mode 100644 index 000000000..7efb67429 --- /dev/null +++ b/unused/testdata/src/embedding2/embedding2.go @@ -0,0 +1,28 @@ +package main + +type AA interface { + A() +} + +type BB interface { + AA +} + +type CC interface { + BB + C() +} + +func c(cc CC) { + cc.A() +} + +type z struct{} + +func (z) A() {} +func (z) B() {} +func (z) C() {} + +func main() { + c(z{}) +} diff --git a/unused/unused.go b/unused/unused.go index 3dbf29f33..b0a690941 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -89,6 +89,11 @@ import ( called. This is to accomodate sum types (unexported interface method that must exist but never gets called.) + - (8.4) All embedded interfaces are marked as used. This is an + extension of 8.3, but we have to explicitly track embedded + interfaces because in a chain C->B->A, B wouldn't be marked as + used by 8.3 just because it contributes A's methods to C. + - Inherent uses: - thunks and other generated wrappers call the real function - (9.2) variables use their types @@ -1373,6 +1378,11 @@ func (g *Graph) typ(t types.Type) { g.seeAndUse(m.Type().(*types.Signature), m, "signature") g.signature(m.Type().(*types.Signature)) } + for i := 0; i < t.NumEmbeddeds(); i++ { + tt := t.EmbeddedType(i) + // (8.4) All embedded interfaces are marked as used + g.seeAndUse(tt, t, "embedded interface") + } case *types.Array: // (9.3) types use their underlying and element types g.seeAndUse(t.Elem(), t, "element type") From 526f6afaca573766f6bb20bbb5e2f86b2ebdc271 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 11 Apr 2019 15:09:22 +0200 Subject: [PATCH 104/254] staticcheck: validate struct tags Closes gh-434 Closes gh-427 --- staticcheck/lint.go | 99 +++++++++++++++++++ staticcheck/structtag.go | 58 +++++++++++ .../src/CheckStructTags/CheckStructTags.go | 36 +++++++ 3 files changed, 193 insertions(+) create mode 100644 staticcheck/structtag.go create mode 100644 staticcheck/testdata/src/CheckStructTags/CheckStructTags.go diff --git a/staticcheck/lint.go b/staticcheck/lint.go index aa5c662df..2dd0c59c4 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -17,6 +17,7 @@ import ( "strings" "sync" texttemplate "text/template" + "unicode" . "honnef.co/go/tools/arg" "honnef.co/go/tools/deprecated" @@ -392,6 +393,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA5004", FilterGenerated: false, Fn: c.CheckLoopEmptyDefault, Doc: docSA5004}, {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer, Doc: docSA5005}, {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion, Doc: docSA5007}, + {ID: "SA5008", FilterGenerated: false, Fn: c.CheckStructTags, Doc: ``}, {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules), Doc: docSA6000}, {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey, Doc: docSA6001}, @@ -3028,3 +3030,100 @@ func (c *Checker) CheckSingleArgAppend(j *lint.Job) { } InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) } + +func (c *Checker) CheckStructTags(j *lint.Job) { + fn := func(node ast.Node) { + for _, field := range node.(*ast.StructType).Fields.List { + if field.Tag == nil { + continue + } + tags, err := parseStructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) + if err != nil { + j.Errorf(field.Tag, "unparseable struct tag: %s", err) + continue + } + for k, v := range tags { + if len(v) > 1 { + j.Errorf(field.Tag, "duplicate struct tag %q", k) + continue + } + + switch k { + case "json": + checkJSONTag(j, field, v[0]) + case "xml": + checkXMLTag(j, field, v[0]) + } + } + } + } + InspectPreorder(j, []ast.Node{(*ast.StructType)(nil)}, fn) +} + +func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { + if len(tag) == 0 { + // TODO(dh): should we flag empty tags? + } + fields := strings.Split(tag, ",") + for _, r := range fields[0] { + if !unicode.IsLetter(r) && !unicode.IsDigit(r) && !strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", r) { + j.Errorf(field.Tag, "invalid JSON field name %q", fields[0]) + } + } + var co, cs int + for _, s := range fields[1:] { + switch s { + case "omitempty": + co++ + case "": + // allow stuff like "-," + case "string": + cs++ + // only for string, floating point, integer and bool + T := Dereference(TypeOf(j, field.Type).Underlying()).Underlying() + basic, ok := T.(*types.Basic) + if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { + j.Errorf(field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") + } + default: + j.Errorf(field.Tag, "unknown JSON option %q", s) + } + } + if co > 1 { + j.Errorf(field.Tag, `duplicate JSON option "omitempty"`) + } + if cs > 1 { + j.Errorf(field.Tag, `duplicate JSON option "string"`) + } +} + +func checkXMLTag(j *lint.Job, field *ast.Field, tag string) { + if len(tag) == 0 { + // TODO(dh): should we flag empty tags? + } + fields := strings.Split(tag, ",") + counts := map[string]int{} + var exclusives []string + for _, s := range fields[1:] { + switch s { + case "attr", "chardata", "cdata", "innerxml", "comment": + counts[s]++ + if counts[s] == 1 { + exclusives = append(exclusives, s) + } + case "omitempty", "any": + counts[s]++ + case "": + default: + j.Errorf(field.Tag, "unknown XML option %q", s) + } + } + for k, v := range counts { + if v > 1 { + j.Errorf(field.Tag, "duplicate XML option %q", k) + } + } + if len(exclusives) > 1 { + j.Errorf(field.Tag, "XML options %s are mutually exclusive", strings.Join(exclusives, " and ")) + } +} diff --git a/staticcheck/structtag.go b/staticcheck/structtag.go new file mode 100644 index 000000000..38830a22c --- /dev/null +++ b/staticcheck/structtag.go @@ -0,0 +1,58 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2019 Dominik Honnef. All rights reserved. + +package staticcheck + +import "strconv" + +func parseStructTag(tag string) (map[string][]string, error) { + // FIXME(dh): detect missing closing quote + out := map[string][]string{} + + for tag != "" { + // Skip leading space. + i := 0 + for i < len(tag) && tag[i] == ' ' { + i++ + } + tag = tag[i:] + if tag == "" { + break + } + + // Scan to colon. A space, a quote or a control character is a syntax error. + // Strictly speaking, control chars include the range [0x7f, 0x9f], not just + // [0x00, 0x1f], but in practice, we ignore the multi-byte control characters + // as it is simpler to inspect the tag's bytes than the tag's runes. + i = 0 + for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f { + i++ + } + if i == 0 || i+1 >= len(tag) || tag[i] != ':' || tag[i+1] != '"' { + break + } + name := string(tag[:i]) + tag = tag[i+1:] + + // Scan quoted string to find value. + i = 1 + for i < len(tag) && tag[i] != '"' { + if tag[i] == '\\' { + i++ + } + i++ + } + if i >= len(tag) { + break + } + qvalue := string(tag[:i+1]) + tag = tag[i+1:] + + value, err := strconv.Unquote(qvalue) + if err != nil { + return nil, err + } + out[name] = append(out[name], value) + } + return out, nil +} diff --git a/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go b/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go new file mode 100644 index 000000000..4d75f9bc7 --- /dev/null +++ b/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go @@ -0,0 +1,36 @@ +package pkg + +type T1 struct { + B int `foo:"" foo:""` // MATCH "duplicate struct tag" + C int `foo:"" bar:""` + D int `json:"-"` + E int `json:"\\"` // MATCH "invalid JSON field name" + F int `json:",omitempty,omitempty"` // MATCH "duplicate JSON option "omitempty"" + G int `json:",omitempty,string"` + H int `json:",string,omitempty,string"` // MATCH "duplicate JSON option "string"" + I int `json:",unknown"` // MATCH "unknown JSON option "unknown"" + J int `json:",string"` + K *int `json:",string"` + L **int `json:",string"` // MATCH "the JSON string option" + M complex128 `json:",string"` // MATCH "the JSON string option" + N int `json:"some-name"` +} + +type T2 struct { + A int `xml:",attr"` + B int `xml:",chardata"` + C int `xml:",cdata"` + D int `xml:",innerxml"` + E int `xml:",comment"` + F int `xml:",omitempty"` + G int `xml:",any"` + H int `xml:",unknown"` // MATCH "unknown XML option" + I int `xml:",any,any"` // MATCH "duplicate XML option" + J int `xml:"a>b>c,"` + K int `xml:",attr,cdata"` // MATCH "mutually exclusive" +} + +type T3 struct { + A int `json:",omitempty" xml:",attr"` + B int `json:",unknown" xml:",attr"` // MATCH "unknown JSON option" +} From 10572299276af2ea5e9f80acabf6ec99ca6d3b98 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 11 Apr 2019 17:18:27 +0200 Subject: [PATCH 105/254] unused: process types of ssa.Const --- unused/testdata/src/types/types.go | 13 +++++++++++++ unused/unused.go | 1 + 2 files changed, 14 insertions(+) create mode 100644 unused/testdata/src/types/types.go diff --git a/unused/testdata/src/types/types.go b/unused/testdata/src/types/types.go new file mode 100644 index 000000000..0c6678e4b --- /dev/null +++ b/unused/testdata/src/types/types.go @@ -0,0 +1,13 @@ +package pkg + +import "reflect" + +type wkt interface { + XXX_WellKnownType() string +} + +var typeOfWkt = reflect.TypeOf((*wkt)(nil)).Elem() + +func Fn() { + _ = typeOfWkt +} diff --git a/unused/unused.go b/unused/unused.go index b0a690941..1d227eecc 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1453,6 +1453,7 @@ func (g *Graph) instructions(fn *ssa.Function) { case *ssa.Const: // (9.6) instructions use their operands' types g.seeAndUse(v.Type(), fn, "constant's type") + g.typ(v.Type()) case *ssa.Global: if v.Object() != nil { // (9.5) instructions use their operands From 9cba8c652c11ec88e3462ae55e33a201a56b4679 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 11 Apr 2019 17:49:00 +0200 Subject: [PATCH 106/254] unused: don't be over-eager in culling irrelevant objects Dereferencing a types.Named representing `type t *int` results in *int. And while *int is an irrelevant type, t very much isn't. --- unused/testdata/src/types/types.go | 4 ++++ unused/unused.go | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/unused/testdata/src/types/types.go b/unused/testdata/src/types/types.go index 0c6678e4b..393df3fa5 100644 --- a/unused/testdata/src/types/types.go +++ b/unused/testdata/src/types/types.go @@ -11,3 +11,7 @@ var typeOfWkt = reflect.TypeOf((*wkt)(nil)).Elem() func Fn() { _ = typeOfWkt } + +type t *int + +var _ t diff --git a/unused/unused.go b/unused/unused.go index 1d227eecc..63da965d0 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -70,7 +70,8 @@ import ( - (6.4) embedded fields that have exported methods (recursively) - (6.5) embedded structs that have exported fields (recursively) -- field accesses use fields +- (7.1) field accesses use fields +- (7.2) fields use their types - (8.0) How we handle interfaces: - (8.1) We do not technically care about interfaces that only consist of @@ -794,7 +795,6 @@ func isIrrelevant(obj interface{}) bool { } } if T, ok := obj.(types.Type); ok { - T = lintdsl.Dereference(T) switch T := T.(type) { case *types.Array: return isIrrelevant(T.Elem()) From 1a905b6394b479c835378540d1423cda86dfad4f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 11 Apr 2019 21:07:27 +0200 Subject: [PATCH 107/254] staticcheck: check alignment of atomics Closes gh-171 --- staticcheck/lint.go | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 2dd0c59c4..918490889 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -230,8 +230,47 @@ var ( "(*encoding/json.Encoder).Encode": checkUnsupportedMarshalImpl(Arg("(*encoding/json.Encoder).Encode.v"), "json", "MarshalJSON", "MarshalText"), "(*encoding/xml.Encoder).Encode": checkUnsupportedMarshalImpl(Arg("(*encoding/xml.Encoder).Encode.v"), "xml", "MarshalXML", "MarshalText"), } + + checkAtomicAlignment = map[string]CallCheck{ + "sync/atomic.AddInt64": checkAtomicAlignmentImpl, + "sync/atomic.AddUint64": checkAtomicAlignmentImpl, + "sync/atomic.CompareAndSwapInt64": checkAtomicAlignmentImpl, + "sync/atomic.CompareAndSwapUint64": checkAtomicAlignmentImpl, + "sync/atomic.LoadInt64": checkAtomicAlignmentImpl, + "sync/atomic.LoadUint64": checkAtomicAlignmentImpl, + "sync/atomic.StoreInt64": checkAtomicAlignmentImpl, + "sync/atomic.StoreUint64": checkAtomicAlignmentImpl, + "sync/atomic.SwapInt64": checkAtomicAlignmentImpl, + "sync/atomic.SwapUint64": checkAtomicAlignmentImpl, + } ) +func checkAtomicAlignmentImpl(call *Call) { + sizes := call.Job.Program.InitialPackages[0].TypesSizes + if sizes.Sizeof(types.Typ[types.Uintptr]) != 4 { + // Not running on a 32-bit platform + return + } + v, ok := call.Args[0].Value.Value.(*ssa.FieldAddr) + if !ok { + // TODO(dh): also check indexing into arrays and slices + return + } + T := v.X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct) + fields := make([]*types.Var, 0, T.NumFields()) + for i := 0; i < T.NumFields() && i <= v.Field; i++ { + fields = append(fields, T.Field(i)) + } + + off := sizes.Offsetsof(fields)[v.Field] + if off%8 != 0 { + msg := fmt.Sprintf("address of non 64-bit aligned field %s passed to %s", + T.Field(v.Field).Name(), + CallName(call.Instr.Common())) + call.Invalid(msg) + } +} + func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { return func(call *Call) { arg := call.Args[argN] @@ -356,6 +395,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules), Doc: docSA1024}, {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue, Doc: docSA1025}, {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal), Doc: ``}, + {ID: "SA1027", FilterGenerated: false, Fn: c.callChecker(checkAtomicAlignment), Doc: ``}, {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd, Doc: docSA2000}, {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection, Doc: docSA2001}, From 803078590467fa79f60bcb1126ceffc34c54f594 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 15:36:49 +0200 Subject: [PATCH 108/254] staticcheck: documentation improvements --- staticcheck/doc.go | 41 +++++++++++++++++++++++++++++++++++++++-- staticcheck/lint.go | 4 ++-- 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/staticcheck/doc.go b/staticcheck/doc.go index 76f980c69..efee240f6 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -26,6 +26,19 @@ Available since var docSA1004 = `Suspiciously small untyped constant in time.Sleep +The time.Sleep function takes a time.Duration as its only argument. +Durations are expressed in nanoseconds. Thus, calling time.Sleep(1) +will sleep for 1 nanosecond. This is a common source of bugs, as sleep +functions in other languages often accept seconds or milliseconds. + +The time package provides constants such as time.Second to express +large durations. These can be combined with arithmetic to express +arbitrary durations, for example '5 * time.Second' for 5 seconds. + +If you truly meant to sleep for a tiny amount of time, use +'n * time.Nanosecond" to signal to staticcheck that you did mean to sleep +for some amount of nanoseconds. + Available since 2017.1 ` @@ -183,12 +196,32 @@ Available since 2017.1 ` -var docSA1025 = `it is not possible to use Reset's return value correctly +var docSA1025 = `It is not possible to use Reset's return value correctly Available since 2019.1 ` +var docSA1026 = `Cannot marshal channels or functions + +Available since + Unreleased +` + +var docSA1027 = `Atomic access to 64-bit variable must be 64-bit aligned + +On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to +arrange for 64-bit alignment of 64-bit words accessed atomically. The +first word in a variable or in an allocated struct, array, or slice +can be relied upon to be 64-bit aligned. + +You can use the structlayout tool to inspect the alignment of fields +in a struct. + +Available since + Unreleased +` + var docSA2000 = `sync.WaitGroup.Add called inside the goroutine, leading to a race condition Available since @@ -438,6 +471,10 @@ Available since var docSA5003 = `Defers in infinite loops will never execute +Defers are scoped to the surrounding function, not the surrounding +block. In a function that never returns, i.e. one containing an +infinite loop, defers will never execute. + Available since 2017.1 ` @@ -594,7 +631,7 @@ Available since Unreleased ` -var docSA9001 = `defers in for range loops may not run when you expect them to +var docSA9001 = `Defers in 'for range' loops may not run when you expect them to Available since 2017.1 diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 918490889..0c5612f5a 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -394,8 +394,8 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified, Doc: docSA1023}, {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules), Doc: docSA1024}, {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue, Doc: docSA1025}, - {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal), Doc: ``}, - {ID: "SA1027", FilterGenerated: false, Fn: c.callChecker(checkAtomicAlignment), Doc: ``}, + {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal), Doc: docSA1026}, + {ID: "SA1027", FilterGenerated: false, Fn: c.callChecker(checkAtomicAlignment), Doc: docSA1027}, {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd, Doc: docSA2000}, {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection, Doc: docSA2001}, From e63a39a5b848a1b45f42e36b4e49d8ed3fa69d66 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 15:45:53 +0200 Subject: [PATCH 109/254] unused: don't discard cross-package edges in whole program mode Closes gh-443 --- unused/unused.go | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 63da965d0..c25cdb49b 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -861,14 +861,16 @@ func (g *Graph) use(used, by interface{}, reason string) { if _, ok := by.(*types.Func); ok { assert(g.pkg.Prog.FuncValue(by.(*types.Func)) == nil) } - if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { - if obj.Pkg() != g.pkg.Pkg { - return + if !g.wholeProgram { + if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { + if obj.Pkg() != g.pkg.Pkg { + return + } } - } - if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { - if obj.Pkg() != g.pkg.Pkg { - return + if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { + if obj.Pkg() != g.pkg.Pkg { + return + } } } usedNode, new := g.node(used) From 5f558939d3aafbdade56e68e14c4f6b4c2df786e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 16:39:24 +0200 Subject: [PATCH 110/254] unused: fix handling of packages in whole program mode e63a39a5b848a1b45f42e36b4e49d8ed3fa69d66 introduced an incorrect fix for whole program mode. The fix was incorrect because it assumed that all packages were relevant. However, even in whole program mode, we do not care about dependencies that aren't being explicitly checked. --- unused/unused.go | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index c25cdb49b..e6676d8e7 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -833,6 +833,18 @@ func isIrrelevant(obj interface{}) bool { return false } +func (g *Graph) isInterestingPackage(pkg *types.Package) bool { + if g.wholeProgram { + for _, opkg := range g.job.Program.InitialPackages { + if opkg.Types == pkg { + return true + } + } + return false + } + return pkg == g.pkg.Pkg +} + func (g *Graph) see(obj interface{}) { if isIrrelevant(obj) { return @@ -840,7 +852,7 @@ func (g *Graph) see(obj interface{}) { assert(obj != nil) if obj, ok := obj.(types.Object); ok && obj.Pkg() != nil { - if obj.Pkg() != g.pkg.Pkg { + if !g.isInterestingPackage(obj.Pkg()) { return } } @@ -861,16 +873,14 @@ func (g *Graph) use(used, by interface{}, reason string) { if _, ok := by.(*types.Func); ok { assert(g.pkg.Prog.FuncValue(by.(*types.Func)) == nil) } - if !g.wholeProgram { - if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { - if obj.Pkg() != g.pkg.Pkg { - return - } + if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { + if !g.isInterestingPackage(obj.Pkg()) { + return } - if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { - if obj.Pkg() != g.pkg.Pkg { - return - } + } + if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { + if !g.isInterestingPackage(obj.Pkg()) { + return } } usedNode, new := g.node(used) From 3501aee4d16b97253e7e4db142425021fb1a6dfa Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 17:02:14 +0200 Subject: [PATCH 111/254] staticcheck: documentation improvements --- staticcheck/doc.go | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/staticcheck/doc.go b/staticcheck/doc.go index efee240f6..9a6c74fff 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -20,6 +20,13 @@ Available since var docSA1003 = `Unsupported argument to functions in encoding/binary +The encoding/binary package can only serialize types with known sizes. +This precludes the use of the 'int' and 'uint' types, as their sizes +differ on different architectures. Furthermore, it doesn't support +serializing maps, channels, strings, or functions. + +Before Go 1.8, bool wasn't supported, either. + Available since 2017.1 ` @@ -144,11 +151,24 @@ Available since var docSA1016 = `Trapping a signal that cannot be trapped +Not all signals can be intercepted by a process. Speficially, on +UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are +never passed to the process, but instead handled directly by the +kernel. It is therefore pointless to try and handle these signals. + Available since 2017.1 ` -var docSA1017 = `Channels used with signal.Notify should be buffered +var docSA1017 = `Channels used with os/signal.Notify should be buffered + +The os/signal package uses non-blocking channel sends when delivering +signals. If the receiving end of the channel isn't ready and the +channel is either unbuffered or full, the signal will be dropped. To +avoid missing signals, the channel should be buffered and of the +appropriate size. For a channel used for notification of just one +signal value, a buffer of size 1 is sufficient. + Available since 2017.1 @@ -186,6 +206,8 @@ Available since var docSA1023 = `Modifying the buffer in an io.Writer implementation +Write must not modify the slice data, even temporarily. + Available since 2017.1 ` @@ -254,6 +276,11 @@ Available since var docSA3001 = `Assigning to b.N in benchmarks distorts the results +The testing package dynamically sets b.N to improve the reliability of +benchmarks and uses it in computations to determine the duration of a +single operation. Benchmark code must not alter b.N as this would +falsify results. + Available since 2017.1 ` From ea3d9bfaf3c63e857a636d16c7649cead11ff77c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 19:02:29 +0200 Subject: [PATCH 112/254] Run garbage collection at specific points Run garbage collection after points we know produce a lot of garbage. This should help keep memory usage slightly lower, as well as more consistent. Peak RSS drops from 5052368.4+-6% kB to 4625624+-4% kB --- lint/lint.go | 2 ++ lint/lintutil/util.go | 1 + 2 files changed, 3 insertions(+) diff --git a/lint/lint.go b/lint/lint.go index 2de8037b2..313b56f25 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -9,6 +9,7 @@ import ( "io" "os" "path/filepath" + "runtime" "sort" "strings" "sync" @@ -268,6 +269,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { if stats != nil { stats.SSABuild = time.Since(t) } + runtime.GC() t = time.Now() pkgMap := map[*ssa.Package]*Pkg{} diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 26bbe6bf5..e562a8775 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -306,6 +306,7 @@ func Lint(cs []lint.Checker, paths []string, opt *Options) ([]lint.Problem, erro return nil, err } stats.PackageLoading = time.Since(t) + runtime.GC() var problems []lint.Problem workingPkgs := make([]*packages.Package, 0, len(pkgs)) From 774503cece8056799559d64fe4dd6720a0baf41c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 20:05:26 +0200 Subject: [PATCH 113/254] Code simplifications --- cmd/staticcheck/staticcheck.go | 4 +--- lint/lint.go | 5 +---- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 3bec82339..6f381850d 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -20,10 +20,8 @@ func main() { simple.NewChecker(), staticcheck.NewChecker(), stylecheck.NewChecker(), + &unused.Checker{}, } - uc := &unused.Checker{} - checkers = append(checkers, uc) - lintutil.ProcessFlagSet(checkers, fs) } diff --git a/lint/lint.go b/lint/lint.go index 313b56f25..7494dcffe 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -677,10 +677,7 @@ func (prog *Program) isGenerated(path string) bool { } func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { - tf := j.Program.SSA.Fset.File(n.Pos()) - f := j.Program.tokenFileMap[tf] - pkg := j.Program.astFileMap[f] - + pkg := j.NodePackage(n) pos := j.Program.DisplayPosition(n.Pos()) if j.Program.isGenerated(pos.Filename) && j.check.FilterGenerated { return nil From 5839eaa28e1a524d8b33857573ef683c86b81257 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 20:19:51 +0200 Subject: [PATCH 114/254] lint: delete dead code --- lint/lint.go | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 7494dcffe..98539cb97 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -401,23 +401,6 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { } } - sizes := struct { - types int - defs int - uses int - implicits int - selections int - scopes int - }{} - for _, pkg := range pkgs { - sizes.types += len(pkg.TypesInfo.Types) - sizes.defs += len(pkg.TypesInfo.Defs) - sizes.uses += len(pkg.TypesInfo.Uses) - sizes.implicits += len(pkg.TypesInfo.Implicits) - sizes.selections += len(pkg.TypesInfo.Selections) - sizes.scopes += len(pkg.TypesInfo.Scopes) - } - if stats != nil { stats.OtherInitWork = time.Since(t) } From 638412e38a502a220e5ad7ee997f3f161b8e859b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 20:38:41 +0200 Subject: [PATCH 115/254] lint: merge tokenFileMap and astFileMap --- lint/lint.go | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 98539cb97..2ea153e82 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -107,6 +107,11 @@ func (gi *GlobIgnore) Match(p Problem) bool { return false } +type tokenFileMapEntry struct { + af *ast.File + pkg *Pkg +} + type Program struct { SSA *ssa.Program InitialPackages []*Pkg @@ -116,8 +121,7 @@ type Program struct { Files []*ast.File GoVersion int - tokenFileMap map[*token.File]*ast.File - astFileMap map[*ast.File]*Pkg + tokenFileMap map[*token.File]tokenFileMapEntry packagesMap map[string]*packages.Package genMu sync.RWMutex @@ -209,7 +213,7 @@ func (l *Linter) ignore(p Problem) bool { } func (prog *Program) File(node Positioner) *ast.File { - return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())] + return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())].af } func (j *Job) File(node Positioner) *ast.File { @@ -309,8 +313,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { InitialPackages: pkgs, AllPackages: allPkgs, GoVersion: l.GoVersion, - tokenFileMap: map[*token.File]*ast.File{}, - astFileMap: map[*ast.File]*Pkg{}, + tokenFileMap: map[*token.File]tokenFileMapEntry{}, generatedMap: map[string]bool{}, } prog.packagesMap = map[string]*packages.Package{} @@ -333,17 +336,16 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { } for _, pkg := range pkgs { prog.Files = append(prog.Files, pkg.Syntax...) - - ssapkg := ssaprog.Package(pkg.Types) - for _, f := range pkg.Syntax { - prog.astFileMap[f] = pkgMap[ssapkg] - } } for _, pkg := range allPkgs { for _, f := range pkg.Syntax { tf := pkg.Fset.File(f.Pos()) - prog.tokenFileMap[tf] = f + ssapkg := ssaprog.Package(pkg.Types) + prog.tokenFileMap[tf] = tokenFileMapEntry{ + af: f, + pkg: pkgMap[ssapkg], + } } } @@ -489,7 +491,9 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { } couldveMatched := false - for f, pkg := range prog.astFileMap { + for _, v := range prog.tokenFileMap { + f := v.af + pkg := v.pkg if prog.Fset().Position(f.Pos()).Filename != ig.File { continue } @@ -677,8 +681,7 @@ func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem } func (j *Job) NodePackage(node Positioner) *Pkg { - f := j.File(node) - return j.Program.astFileMap[f] + return j.Program.tokenFileMap[j.Program.SSA.Fset.File(node.Pos())].pkg } func allPackages(pkgs []*packages.Package) []*packages.Package { From dd935d81a4ff79f99913a674cb9391307bf045f3 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 21:09:28 +0200 Subject: [PATCH 116/254] lint/lintutil: set a default GOGC of 50 Go is a garbage collected language, its memory usage will be higher than actual memory usage. By default up to a factor of 2x. This can lead to out of memory situations even though we would technically be fine. Transient spikes and GC running at unfortunate times can lead to too high memory targets. If we used 3 GB of memory on the last GC, the next GC will run once we hit 6 GB. This is problematic if the system only has 5 GB and we create a lot of short-lived garbage. We can trade some CPU time for lower memory usage by setting GOGC to a lower value, forcing more frequent garbage collection. For `staticcheck std`, runtime and peak RSS change as follows: 27.89+-3% s -> 31.566+-1.5% s 4561579.2+-1.4% kB -> 4095474.4+-1% kB Considering people have more patience than memory, this seems like an okay tradeoff. Users can still set their own value for GOGC by exporting the environment variable. Closes gh-425 --- lint/lintutil/util.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index e562a8775..636025ad5 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -17,6 +17,7 @@ import ( "os" "regexp" "runtime" + "runtime/debug" "runtime/pprof" "strconv" "strings" @@ -144,6 +145,10 @@ func findCheck(cs []lint.Checker, check string) (lint.Check, bool) { } func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { + if _, ok := os.LookupEnv("GOGC"); !ok { + debug.SetGCPercent(50) + } + tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) From 27977a8536cd2ceb44b13ecdaee6a423cb6e6998 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 22:26:06 +0200 Subject: [PATCH 117/254] lint: don't build unnecessary comment maps --- lint/lint.go | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/lint/lint.go b/lint/lint.go index 2ea153e82..5f886825f 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -353,6 +353,19 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { l.automaticIgnores = nil for _, pkg := range initial { for _, f := range pkg.Syntax { + found := false + commentLoop: + for _, cg := range f.Comments { + for _, c := range cg.List { + if strings.Contains(c.Text, "//lint:") { + found = true + break commentLoop + } + } + } + if !found { + continue + } cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) for node, cgs := range cm { for _, cg := range cgs { From e87e8279b4cda00a53befd43a6c808809c80bebd Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 12 Apr 2019 22:59:16 +0200 Subject: [PATCH 118/254] Reduce garbage produced by (*types.Func).FullName --- lint/lint.go | 49 +++++++++++++++++++++++++++++++++++++++++ lint/lintdsl/lintdsl.go | 6 ++--- staticcheck/lint.go | 4 ++-- staticcheck/vrp/vrp.go | 3 ++- 4 files changed, 56 insertions(+), 6 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 5f886825f..c9ddf0601 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -2,6 +2,7 @@ package lint // import "honnef.co/go/tools/lint" import ( + "bytes" "fmt" "go/ast" "go/token" @@ -709,3 +710,51 @@ func allPackages(pkgs []*packages.Package) []*packages.Package { ) return out } + +var bufferPool = &sync.Pool{ + New: func() interface{} { + buf := bytes.NewBuffer(nil) + buf.Grow(64) + return buf + }, +} + +func FuncName(f *types.Func) string { + buf := bufferPool.Get().(*bytes.Buffer) + buf.Reset() + if f.Type() != nil { + sig := f.Type().(*types.Signature) + if recv := sig.Recv(); recv != nil { + buf.WriteByte('(') + if _, ok := recv.Type().(*types.Interface); ok { + // gcimporter creates abstract methods of + // named interfaces using the interface type + // (not the named type) as the receiver. + // Don't print it in full. + buf.WriteString("interface") + } else { + types.WriteType(buf, recv.Type(), nil) + } + buf.WriteByte(')') + buf.WriteByte('.') + } else if f.Pkg() != nil { + writePackage(buf, f.Pkg()) + } + } + buf.WriteString(f.Name()) + s := buf.String() + bufferPool.Put(buf) + return s +} + +func writePackage(buf *bytes.Buffer, pkg *types.Package) { + if pkg == nil { + return + } + var s string + s = pkg.Path() + if s != "" { + buf.WriteString(s) + buf.WriteByte('.') + } +} diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 2fa5708fa..c3943bd6f 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -30,7 +30,7 @@ func CallName(call *ssa.CallCommon) string { if !ok { return "" } - return fn.FullName() + return lint.FuncName(fn) case *ssa.Builtin: return v.Name() } @@ -239,12 +239,12 @@ func CallNameAST(j *lint.Job, call *ast.CallExpr) string { if !ok { return "" } - return fn.FullName() + return lint.FuncName(fn) case *ast.Ident: obj := ObjectOf(j, fun) switch obj := obj.(type) { case *types.Func: - return obj.FullName() + return lint.FuncName(obj) case *types.Builtin: return obj.Name() default: diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 0c5612f5a..686242e21 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -771,7 +771,7 @@ func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { if !ok { return } - if fn.FullName() == "(*sync.WaitGroup).Add" { + if lint.FuncName(fn) == "(*sync.WaitGroup).Add" { j.Errorf(sel, "should call %s before starting the goroutine to avoid a race", Render(j, stmt)) } @@ -2540,7 +2540,7 @@ func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { continue } - r, ok := rules[obj.FullName()] + r, ok := rules[lint.FuncName(obj)] if !ok { continue } diff --git a/staticcheck/vrp/vrp.go b/staticcheck/vrp/vrp.go index cb17f042a..a0011ed08 100644 --- a/staticcheck/vrp/vrp.go +++ b/staticcheck/vrp/vrp.go @@ -12,6 +12,7 @@ import ( "sort" "strings" + "honnef.co/go/tools/lint" "honnef.co/go/tools/ssa" ) @@ -291,7 +292,7 @@ func BuildGraph(f *ssa.Function) *Graph { case *ssa.Call: if static := ins.Common().StaticCallee(); static != nil { if fn, ok := static.Object().(*types.Func); ok { - switch fn.FullName() { + switch lint.FuncName(fn) { case "bytes.Index", "bytes.IndexAny", "bytes.IndexByte", "bytes.IndexFunc", "bytes.IndexRune", "bytes.LastIndex", "bytes.LastIndexAny", "bytes.LastIndexByte", "bytes.LastIndexFunc", From 4f6f8a7af5cde03bbee3b8579faf0fbb2c1057ed Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 13 Apr 2019 18:54:02 +0200 Subject: [PATCH 119/254] staticcheck: improve documentation --- staticcheck/doc.go | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/staticcheck/doc.go b/staticcheck/doc.go index 9a6c74fff..07a39ef45 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -115,6 +115,9 @@ Available since var docSA1010 = `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results +If n >= 0, the function returns at most n matches/submatches. To +return all results, specify a negative number. + Available since 2017.1 ` @@ -176,6 +179,9 @@ Available since var docSA1018 = `strings.Replace called with n == 0, which does nothing +With n == 0, zero instances will be replaced. To replace all +instances, use a negative number, or use strings.ReplaceAll. + Available since 2017.1 ` @@ -270,6 +276,13 @@ Available since var docSA3000 = `TestMain doesn't call os.Exit, hiding test failures +Test executables (and in turn 'go test') exit with a non-zero status +code if any tests failed. When specifying your own TestMain function, +it is your responsibility to arrange for this, by calling os.Exit with +the correct code. The correct code is returned by (*testing.M).Run, so +the usual way of implementing TestMain is to end it with +os.Exit(m.Run()). + Available since 2017.1 ` From 6596888f3453f7af2ffa567a917e88783a457082 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 13 Apr 2019 19:01:47 +0200 Subject: [PATCH 120/254] Delete errcheck Writing a useful version of errcheck requires whole-program analysis and points-to analysis. It would be incredibly slow and memory intensive for real world applications. We can't possibly run it as part of staticcheck, and the Go community is not open to static analysis that needs hours or days to complete. --- cmd/errcheck-ng/README.md | 5 - cmd/errcheck-ng/errcheck-ng.go | 13 -- errcheck/errcheck.go | 158 ------------------ errcheck/errcheck_test.go | 12 -- .../src/CheckErrcheck/CheckErrcheck.go | 156 ----------------- 5 files changed, 344 deletions(-) delete mode 100644 cmd/errcheck-ng/README.md delete mode 100644 cmd/errcheck-ng/errcheck-ng.go delete mode 100644 errcheck/errcheck.go delete mode 100644 errcheck/errcheck_test.go delete mode 100644 errcheck/testdata/src/CheckErrcheck/CheckErrcheck.go diff --git a/cmd/errcheck-ng/README.md b/cmd/errcheck-ng/README.md deleted file mode 100644 index 3693e32a9..000000000 --- a/cmd/errcheck-ng/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# errcheck-ng - -_errcheck-ng_ is the next generation of errcheck. - -It is a prototype and not fit for use. diff --git a/cmd/errcheck-ng/errcheck-ng.go b/cmd/errcheck-ng/errcheck-ng.go deleted file mode 100644 index daae655ee..000000000 --- a/cmd/errcheck-ng/errcheck-ng.go +++ /dev/null @@ -1,13 +0,0 @@ -package main // import "honnef.co/go/tools/cmd/errcheck-ng" - -import ( - "os" - - "honnef.co/go/tools/errcheck" - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" -) - -func main() { - lintutil.ProcessArgs("errcheck-ng", []lint.Checker{errcheck.NewChecker()}, os.Args[1:]) -} diff --git a/errcheck/errcheck.go b/errcheck/errcheck.go deleted file mode 100644 index 713b164d1..000000000 --- a/errcheck/errcheck.go +++ /dev/null @@ -1,158 +0,0 @@ -package errcheck - -import ( - "go/types" - - "honnef.co/go/tools/functions" - "honnef.co/go/tools/lint" - . "honnef.co/go/tools/lint/lintdsl" - "honnef.co/go/tools/ssa" -) - -type Checker struct { - funcDescs *functions.Descriptions -} - -func NewChecker() *Checker { - return &Checker{} -} - -func (*Checker) Name() string { return "errcheck" } -func (*Checker) Prefix() string { return "ERR" } - -func (c *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "ERR1000", FilterGenerated: false, Fn: c.CheckErrcheck}, - } -} - -func (c *Checker) Init(prog *lint.Program) { - c.funcDescs = functions.NewDescriptions(prog.SSA) -} - -func (c *Checker) CheckErrcheck(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { - for _, b := range ssafn.Blocks { - for _, ins := range b.Instrs { - ssacall, ok := ins.(ssa.CallInstruction) - if !ok { - continue - } - - switch CallName(ssacall.Common()) { - case "fmt.Print", "fmt.Println", "fmt.Printf": - continue - } - isRecover := false - if builtin, ok := ssacall.Common().Value.(*ssa.Builtin); ok { - isRecover = ok && builtin.Name() == "recover" - } - - switch ins := ins.(type) { - case ssa.Value: - refs := ins.Referrers() - if refs == nil || len(FilterDebug(*refs)) != 0 { - continue - } - case ssa.Instruction: - // will be a 'go' or 'defer', neither of which has usable return values - default: - // shouldn't happen - continue - } - - if ssacall.Common().IsInvoke() { - if sc, ok := ssacall.Common().Value.(*ssa.Call); ok { - // TODO(dh): support multiple levels of - // interfaces, not just one - ssafn := sc.Common().StaticCallee() - if ssafn != nil { - ct := c.funcDescs.Get(ssafn).ConcreteReturnTypes - // TODO(dh): support >1 concrete types - if len(ct) == 1 { - // TODO(dh): do we have access to a - // cached method set somewhere? - ms := types.NewMethodSet(ct[0].At(ct[0].Len() - 1).Type()) - // TODO(dh): where can we get the pkg - // for Lookup? Passing nil works fine - // for exported methods, but will fail - // on unexported ones - // TODO(dh): holy nesting and poor - // variable names, clean this up - fn, _ := ms.Lookup(nil, ssacall.Common().Method.Name()).Obj().(*types.Func) - if fn != nil { - ssafn := j.Program.SSA.FuncValue(fn) - if ssafn != nil { - if c.funcDescs.Get(ssafn).NilError { - continue - } - } - } - } - } - } - } else { - ssafn := ssacall.Common().StaticCallee() - if ssafn != nil { - if c.funcDescs.Get(ssafn).NilError { - // Don't complain when the error is known to be nil - continue - } - } - } - switch CallName(ssacall.Common()) { - case "(*os.File).Close": - recv := ssacall.Common().Args[0] - if isReadOnlyFile(recv, nil) { - continue - } - } - - res := ssacall.Common().Signature().Results() - if res.Len() == 0 { - continue - } - if !isRecover { - last := res.At(res.Len() - 1) - if types.TypeString(last.Type(), nil) != "error" { - continue - } - } - j.Errorf(ins, "unchecked error") - } - } - } -} - -func isReadOnlyFile(val ssa.Value, seen map[ssa.Value]bool) bool { - if seen == nil { - seen = map[ssa.Value]bool{} - } - if seen[val] { - return true - } - seen[val] = true - switch val := val.(type) { - case *ssa.Phi: - for _, edge := range val.Edges { - if !isReadOnlyFile(edge, seen) { - return false - } - } - return true - case *ssa.Extract: - call, ok := val.Tuple.(*ssa.Call) - if !ok { - return false - } - switch CallName(call.Common()) { - case "os.Open": - return true - case "os.OpenFile": - flags, ok := call.Common().Args[1].(*ssa.Const) - return ok && flags.Uint64() == 0 - } - return false - } - return false -} diff --git a/errcheck/errcheck_test.go b/errcheck/errcheck_test.go deleted file mode 100644 index 55a9e1c5b..000000000 --- a/errcheck/errcheck_test.go +++ /dev/null @@ -1,12 +0,0 @@ -package errcheck - -import ( - "testing" - - "honnef.co/go/tools/lint/testutil" -) - -func TestAll(t *testing.T) { - c := NewChecker() - testutil.TestAll(t, c, "") -} diff --git a/errcheck/testdata/src/CheckErrcheck/CheckErrcheck.go b/errcheck/testdata/src/CheckErrcheck/CheckErrcheck.go deleted file mode 100644 index 932d0be80..000000000 --- a/errcheck/testdata/src/CheckErrcheck/CheckErrcheck.go +++ /dev/null @@ -1,156 +0,0 @@ -package pkg - -import ( - "bytes" - "crypto/md5" - "errors" - "fmt" - "io/ioutil" - "math/rand" - "os" -) - -type t struct{} - -func (x t) a() error { - fmt.Println("this method returns an error") - fmt.Println("this method also returns an error") - return errors.New("") -} - -type u struct { - t t -} - -func a() error { - fmt.Println("this function returns an error") - return errors.New("") -} - -func b() (int, error) { - fmt.Println("this function returns an int and an error") - return 0, errors.New("") -} - -func c() int { - fmt.Println("this function returns an int") - return 7 -} - -func rec() { - defer func() { - recover() // MATCH /unchecked error/ - _ = recover() - }() - defer recover() // MATCH /unchecked error/ -} - -func nilError() error { - println("") - return nil -} - -type MyError string - -func (e MyError) Error() string { - return string(e) -} - -func customError() error { - println() // not pure - return MyError("an error occurred") -} - -type MyPointerError string - -func (e *MyPointerError) Error() string { - return string(*e) -} - -func main() { - // Single error return - _ = a() - a() // MATCH /unchecked error/ - - // Return another value and an error - _, _ = b() - b() // MATCH /unchecked error/ - - // Return a custom error type - _ = customError() - customError() // MATCH /unchecked error/ - - // Method with a single error return - x := t{} - _ = x.a() - x.a() // MATCH /unchecked error/ - - // Method call on a struct member - y := u{x} - _ = y.t.a() - y.t.a() // MATCH /unchecked error/ - - m1 := map[string]func() error{"a": a} - _ = m1["a"]() - m1["a"]() // MATCH /unchecked error/ - - // Additional cases for assigning errors to blank identifier - z, _ := b() - _, w := a(), 5 - - // Assign non error to blank identifier - _ = c() - - _ = z + w // Avoid complaints about unused variables - - // Goroutine - go a() // MATCH /unchecked error/ - defer a() // MATCH /unchecked error/ - - b1 := bytes.Buffer{} - b2 := &bytes.Buffer{} - b1.Write(nil) - b2.Write(nil) - rand.Read(nil) - - ioutil.ReadFile("main.go") // MATCH /unchecked error/ - - nilError() - - err := customError() // MATCH /unchecked error/ - err = customError() - if err != nil { - println() - } - - f1, _ := os.Open("") - f1.Close() - - f2, _ := os.OpenFile("", os.O_RDONLY, 0) - f2.Close() - - f3, _ := os.Create("") - f3.Close() // MATCH /unchecked error/ - - f4, _ := os.OpenFile("", os.O_WRONLY, 0) - f4.Close() // MATCH /unchecked error/ - - var f5 *os.File - if true { - f5, _ = os.Open("a") - } else { - f5, _ = os.Open("b") - } - f5.Close() - - var f6 *os.File - if true { - f6, _ = os.Open("a") - } else { - f6, _ = os.Create("b") - } - f6.Close() // MATCH /unchecked error/ - - h := md5.New() - h.Write(nil) -} From f988b0754fd1a5d20212ad6d8506000f1932ea8b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 14 Apr 2019 05:28:27 +0200 Subject: [PATCH 121/254] lint: remove map from import paths to packages This mapping is not actually 1 to 1. Test variants and vendoring mean that the same path can refer to many different packages. Especially test variants in the standard library make this very complex because of transitive test dependencies. Especially our only use of the map, an Implements function, is inherently unreliable. We can't go from "fmt.Formatter" to the correct package and type. We don't know _which_ fmt package a value may end up in, which means we don't know which fmt.Formatter to apply. These checks will have to be implemented differently. For example, fmt.Stringer can be implemented soundly by looking for a correctly formed String method. fmt.Formatter, on the other hand, will have to rely on less precise string matching and risk matching too many types. --- lint/lint.go | 9 --------- simple/lint.go | 41 ++++++++++++++++++----------------------- 2 files changed, 18 insertions(+), 32 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index c9ddf0601..8000f8d6d 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -123,7 +123,6 @@ type Program struct { GoVersion int tokenFileMap map[*token.File]tokenFileMapEntry - packagesMap map[string]*packages.Package genMu sync.RWMutex generatedMap map[string]bool @@ -317,10 +316,6 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { tokenFileMap: map[*token.File]tokenFileMapEntry{}, generatedMap: map[string]bool{}, } - prog.packagesMap = map[string]*packages.Package{} - for _, pkg := range allPkgs { - prog.packagesMap[pkg.Types.Path()] = pkg - } isInitial := map[*types.Package]struct{}{} for _, pkg := range pkgs { @@ -618,10 +613,6 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { return allowedChecks } -func (prog *Program) Package(path string) *packages.Package { - return prog.packagesMap[path] -} - // Pkg represents a package being linted. type Pkg struct { SSA *ssa.Package diff --git a/simple/lint.go b/simple/lint.go index 37103f5fc..bf7da38d2 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -1496,33 +1496,28 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { InspectPreorder(j, []ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) } -func (c *Checker) Implements(j *lint.Job, typ types.Type, iface string) bool { - // OPT(dh): we can cache the type lookup - idx := strings.IndexRune(iface, '.') - var scope *types.Scope - var ifaceName string - if idx == -1 { - scope = types.Universe - ifaceName = iface - } else { - pkgName := iface[:idx] - pkg := j.Program.Package(pkgName) - if pkg == nil { - return false - } - scope = pkg.Types.Scope() - ifaceName = iface[idx+1:] - } - - obj := scope.Lookup(ifaceName) - if obj == nil { +func isStringer(T types.Type) bool { + ms := types.NewMethodSet(T) + sel := ms.Lookup(nil, "String") + if sel == nil { return false } - i, ok := obj.Type().Underlying().(*types.Interface) + fn, ok := sel.Obj().(*types.Func) if !ok { + // should be unreachable + return false + } + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 0 { + return false + } + if sig.Results().Len() != 1 { + return false + } + if !IsType(sig.Results().At(0).Type(), "string") { return false } - return types.Implements(typ, i) + return true } func (c *Checker) LintRedundantSprintf(j *lint.Job) { @@ -1540,7 +1535,7 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) { arg := call.Args[Arg("fmt.Sprintf.a[0]")] typ := TypeOf(j, arg) - if c.Implements(j, typ, "fmt.Stringer") { + if isStringer(typ) { j.Errorf(call, "should use String() instead of fmt.Sprintf") return } From 896e58ba2df136bf605eb64a83bd3fd7c99824b7 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 13 Apr 2019 19:39:29 +0200 Subject: [PATCH 122/254] staticcheck: check printf format strings and arguments --- lint/lintdsl/lintdsl.go | 2 +- ssautil/ssautil.go | 17 + staticcheck/lint.go | 378 +++++++++++++++++ .../testdata/src/CheckPrintf/CheckPrintf.go | 397 ++++++++++++++++++ .../CheckUnsafePrintf/CheckUnsafePrintf.go | 5 +- 5 files changed, 795 insertions(+), 4 deletions(-) create mode 100644 staticcheck/testdata/src/CheckPrintf/CheckPrintf.go diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index c3943bd6f..453e09e24 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -63,7 +63,7 @@ func IsExample(fn *ssa.Function) bool { func IsPointerLike(T types.Type) bool { switch T := T.Underlying().(type) { - case *types.Interface, *types.Chan, *types.Map, *types.Pointer: + case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer: return true case *types.Basic: return T.Kind() == types.UnsafePointer diff --git a/ssautil/ssautil.go b/ssautil/ssautil.go index a18f849ec..72c3c919d 100644 --- a/ssautil/ssautil.go +++ b/ssautil/ssautil.go @@ -39,3 +39,20 @@ func Walk(b *ssa.BasicBlock, fn func(*ssa.BasicBlock) bool) { wl = append(wl, b.Succs...) } } + +func Vararg(x *ssa.Slice) ([]ssa.Value, bool) { + var out []ssa.Value + slice, ok := x.X.(*ssa.Alloc) + if !ok || slice.Comment != "varargs" { + return nil, false + } + for _, ref := range *slice.Referrers() { + idx, ok := ref.(*ssa.IndexAddr) + if !ok { + continue + } + v := (*idx.Referrers())[0].(*ssa.Store).Val + out = append(out, v) + } + return out, true +} diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 686242e21..637093831 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -25,6 +25,7 @@ import ( "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/lint" . "honnef.co/go/tools/lint/lintdsl" + "honnef.co/go/tools/printf" "honnef.co/go/tools/ssa" "honnef.co/go/tools/ssautil" "honnef.co/go/tools/staticcheck/vrp" @@ -243,8 +244,384 @@ var ( "sync/atomic.SwapInt64": checkAtomicAlignmentImpl, "sync/atomic.SwapUint64": checkAtomicAlignmentImpl, } + + // TODO(dh): detect printf wrappers + checkPrintfRules = map[string]CallCheck{ + "fmt.Errorf": func(call *Call) { checkPrintfCall(call, 0, 1) }, + "fmt.Printf": func(call *Call) { checkPrintfCall(call, 0, 1) }, + "fmt.Sprintf": func(call *Call) { checkPrintfCall(call, 0, 1) }, + "fmt.Fprintf": func(call *Call) { checkPrintfCall(call, 1, 2) }, + } ) +func checkPrintfCall(call *Call, fIdx, vIdx int) { + f := call.Args[fIdx] + var args []ssa.Value + switch v := call.Args[vIdx].Value.Value.(type) { + case *ssa.Slice: + var ok bool + args, ok = ssautil.Vararg(v) + if !ok { + // We don't know what the actual arguments to the function are + return + } + case *ssa.Const: + // nil, i.e. no arguments + default: + // We don't know what the actual arguments to the function are + return + } + checkPrintfCallImpl(call, f.Value.Value, args) +} + +type verbFlag int + +const ( + isInt verbFlag = 1 << iota + isBool + isFP + isString + isPointer + isPseudoPointer + isSlice + isAny + noRecurse +) + +var verbs = [...]verbFlag{ + 'b': isPseudoPointer | isInt | isFP, + 'c': isInt, + 'd': isPseudoPointer | isInt, + 'e': isFP, + 'E': isFP, + 'f': isFP, + 'F': isFP, + 'g': isFP, + 'G': isFP, + 'o': isPseudoPointer | isInt, + 'p': isSlice | isPointer | noRecurse, + 'q': isInt | isString, + 's': isString, + 't': isBool, + 'T': isAny, + 'U': isInt, + 'v': isAny, + 'X': isPseudoPointer | isInt | isString, + 'x': isPseudoPointer | isInt | isString, +} + +func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { + var elem func(T types.Type, verb rune) ([]types.Type, bool) + elem = func(T types.Type, verb rune) ([]types.Type, bool) { + if verbs[verb]&noRecurse != 0 { + return []types.Type{T}, false + } + switch T := T.(type) { + case *types.Slice: + if verbs[verb]&isSlice != 0 { + return []types.Type{T}, false + } + if verbs[verb]&isString != 0 && IsType(T.Elem().Underlying(), "byte") { + return []types.Type{T}, false + } + return []types.Type{T.Elem()}, true + case *types.Map: + key := T.Key() + val := T.Elem() + return []types.Type{key, val}, true + case *types.Struct: + out := make([]types.Type, 0, T.NumFields()) + for i := 0; i < T.NumFields(); i++ { + out = append(out, T.Field(i).Type()) + } + return out, true + case *types.Array: + return []types.Type{T.Elem()}, true + default: + return []types.Type{T}, false + } + } + isInfo := func(T types.Type, info types.BasicInfo) bool { + basic, ok := T.Underlying().(*types.Basic) + return ok && basic.Info()&info != 0 + } + + isStringer := func(T types.Type, ms *types.MethodSet) bool { + sel := ms.Lookup(nil, "String") + if sel == nil { + return false + } + fn, ok := sel.Obj().(*types.Func) + if !ok { + // should be unreachable + return false + } + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 0 { + return false + } + if sig.Results().Len() != 1 { + return false + } + if !IsType(sig.Results().At(0).Type(), "string") { + return false + } + return true + } + isError := func(T types.Type, ms *types.MethodSet) bool { + sel := ms.Lookup(nil, "Error") + if sel == nil { + return false + } + fn, ok := sel.Obj().(*types.Func) + if !ok { + // should be unreachable + return false + } + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 0 { + return false + } + if sig.Results().Len() != 1 { + return false + } + if !IsType(sig.Results().At(0).Type(), "string") { + return false + } + return true + } + + isFormatter := func(T types.Type, ms *types.MethodSet) bool { + sel := ms.Lookup(nil, "Format") + if sel == nil { + return false + } + fn, ok := sel.Obj().(*types.Func) + if !ok { + // should be unreachable + return false + } + sig := fn.Type().(*types.Signature) + if sig.Params().Len() != 2 { + return false + } + // TODO(dh): check the types of the arguments for more + // precision + if sig.Results().Len() != 0 { + return false + } + return true + } + + seen := map[types.Type]bool{} + var checkType func(verb rune, T types.Type, top bool) bool + checkType = func(verb rune, T types.Type, top bool) bool { + if top { + for k := range seen { + delete(seen, k) + } + } + if seen[T] { + return true + } + seen[T] = true + if int(verb) >= len(verbs) { + // Unknown verb + return true + } + + flags := verbs[verb] + if flags == 0 { + // Unknown verb + return true + } + + ms := types.NewMethodSet(T) + if isFormatter(T, ms) { + // the value is responsible for formatting itself + return true + } + + if flags&isString != 0 && (isStringer(T, ms) || isError(T, ms)) { + // Check for stringer early because we're about to dereference + return true + } + + T = T.Underlying() + if flags&(isPointer|isPseudoPointer) == 0 && top { + T = Dereference(T) + } + if flags&isPseudoPointer != 0 && top { + t := Dereference(T) + if _, ok := t.Underlying().(*types.Struct); ok { + T = t + } + } + + if _, ok := T.(*types.Interface); ok { + // We don't know what's in the interface + return true + } + + var info types.BasicInfo + if flags&isInt != 0 { + info |= types.IsInteger + } + if flags&isBool != 0 { + info |= types.IsBoolean + } + if flags&isFP != 0 { + info |= types.IsFloat | types.IsComplex + } + if flags&isString != 0 { + info |= types.IsString + } + + if info != 0 && isInfo(T, info) { + return true + } + + if flags&isString != 0 && (IsType(T, "[]byte") || isStringer(T, ms) || isError(T, ms)) { + return true + } + + if flags&isPointer != 0 && IsPointerLike(T) { + return true + } + if flags&isPseudoPointer != 0 { + switch U := T.Underlying().(type) { + case *types.Pointer: + if !top { + return true + } + + if _, ok := U.Elem().Underlying().(*types.Struct); !ok { + return true + } + case *types.Chan, *types.Signature: + return true + } + } + + if flags&isSlice != 0 { + if _, ok := T.(*types.Slice); ok { + return true + } + } + + if flags&isAny != 0 { + return true + } + + elems, ok := elem(T.Underlying(), verb) + if !ok { + return false + } + for _, elem := range elems { + if !checkType(verb, elem, false) { + return false + } + } + + return true + } + + k, ok := f.(*ssa.Const) + if !ok { + return + } + actions, err := printf.Parse(constant.StringVal(k.Value)) + if err != nil { + call.Invalid("couldn't parse format string") + return + } + + ptr := 1 + hasExplicit := false + + checkStar := func(verb printf.Verb, star printf.Argument) bool { + if star, ok := star.(printf.Star); ok { + idx := 0 + if star.Index == -1 { + idx = ptr + ptr++ + } else { + hasExplicit = true + idx = star.Index + ptr = star.Index + 1 + } + if idx == 0 { + call.Invalid(fmt.Sprintf("Printf format %s reads invalid arg 0; indices are 1-based", verb.Raw)) + return false + } + if idx > len(args) { + call.Invalid( + fmt.Sprintf("Printf format %s reads arg #%d, but call has only %d args", + verb.Raw, idx, len(args))) + return false + } + if arg, ok := args[idx-1].(*ssa.MakeInterface); ok { + if !isInfo(arg.X.Type(), types.IsInteger) { + call.Invalid(fmt.Sprintf("Printf format %s reads non-int arg #%d as argument of *", verb.Raw, idx)) + } + } + } + return true + } + + // We only report one problem per format string. Making a + // mistake with an index tends to invalidate all future + // implicit indices. + for _, action := range actions { + verb, ok := action.(printf.Verb) + if !ok { + continue + } + + if !checkStar(verb, verb.Width) || !checkStar(verb, verb.Precision) { + return + } + + off := ptr + if verb.Value != -1 { + hasExplicit = true + off = verb.Value + } + if off > len(args) { + call.Invalid( + fmt.Sprintf("Printf format %s reads arg #%d, but call has only %d args", + verb.Raw, off, len(args))) + return + } else if verb.Value == 0 && verb.Letter != '%' { + call.Invalid(fmt.Sprintf("Printf format %s reads invalid arg 0; indices are 1-based", verb.Raw)) + return + } else if off != 0 { + arg, ok := args[off-1].(*ssa.MakeInterface) + if ok { + if !checkType(verb.Letter, arg.X.Type(), true) { + call.Invalid(fmt.Sprintf("Printf format %s has arg #%d of wrong type %s", + verb.Raw, ptr, args[ptr-1].(*ssa.MakeInterface).X.Type())) + return + } + } + } + + switch verb.Value { + case -1: + // Consume next argument + ptr++ + case 0: + // Don't consume any arguments + default: + ptr = verb.Value + 1 + } + } + + if !hasExplicit && ptr <= len(args) { + call.Invalid(fmt.Sprintf("Printf call needs %d args but has %d args", ptr-1, len(args))) + } +} + func checkAtomicAlignmentImpl(call *Call) { sizes := call.Job.Program.InitialPackages[0].TypesSizes if sizes.Sizeof(types.Typ[types.Uintptr]) != 4 { @@ -434,6 +811,7 @@ func (c *Checker) Checks() []lint.Check { {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer, Doc: docSA5005}, {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion, Doc: docSA5007}, {ID: "SA5008", FilterGenerated: false, Fn: c.CheckStructTags, Doc: ``}, + {ID: "SA5009", FilterGenerated: false, Fn: c.callChecker(checkPrintfRules), Doc: ``}, {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules), Doc: docSA6000}, {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey, Doc: docSA6001}, diff --git a/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go b/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go new file mode 100644 index 000000000..1433dc1c2 --- /dev/null +++ b/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go @@ -0,0 +1,397 @@ +// Package pkg is amazing. +package pkg + +import ( + "fmt" + "math/big" + "os" +) + +type Error int + +func (Error) Error() string { return "" } + +func fn() { + var b bool + var i int + var r rune + var s string + var x float64 + var p *int + var imap map[int]int + var fslice []float64 + var c complex64 + // Some good format/argtypes + fmt.Printf("") + fmt.Printf("%b %b %b", 3, i, x) + fmt.Printf("%c %c %c %c", 3, i, 'x', r) + fmt.Printf("%d %d %d", 3, i, imap) + fmt.Printf("%e %e %e %e", 3e9, x, fslice, c) + fmt.Printf("%E %E %E %E", 3e9, x, fslice, c) + fmt.Printf("%f %f %f %f", 3e9, x, fslice, c) + fmt.Printf("%F %F %F %F", 3e9, x, fslice, c) + fmt.Printf("%g %g %g %g", 3e9, x, fslice, c) + fmt.Printf("%G %G %G %G", 3e9, x, fslice, c) + fmt.Printf("%b %b %b %b", 3e9, x, fslice, c) + fmt.Printf("%o %o", 3, i) + fmt.Printf("%p", p) + fmt.Printf("%q %q %q %q", 3, i, 'x', r) + fmt.Printf("%s %s %s", "hi", s, []byte{65}) + fmt.Printf("%t %t", true, b) + fmt.Printf("%T %T", 3, i) + fmt.Printf("%U %U", 3, i) + fmt.Printf("%v %v", 3, i) + fmt.Printf("%x %x %x %x", 3, i, "hi", s) + fmt.Printf("%X %X %X %X", 3, i, "hi", s) + fmt.Printf("%.*s %d %g", 3, "hi", 23, 2.3) + fmt.Printf("%s", &stringerv) + fmt.Printf("%v", &stringerv) + fmt.Printf("%T", &stringerv) + fmt.Printf("%s", &embeddedStringerv) + fmt.Printf("%v", &embeddedStringerv) + fmt.Printf("%T", &embeddedStringerv) + fmt.Printf("%v", notstringerv) + fmt.Printf("%T", notstringerv) + fmt.Printf("%q", stringerarrayv) + fmt.Printf("%v", stringerarrayv) + fmt.Printf("%s", stringerarrayv) + fmt.Printf("%v", notstringerarrayv) + fmt.Printf("%T", notstringerarrayv) + fmt.Printf("%d", new(fmt.Formatter)) + fmt.Printf("%f", new(big.Float)) + fmt.Printf("%*%", 2) // Ridiculous but allowed. + fmt.Printf("%s", interface{}(nil)) // Nothing useful we can say. + + fmt.Printf("%g", 1+2i) + fmt.Printf("%#e %#E %#f %#F %#g %#G", 1.2, 1.2, 1.2, 1.2, 1.2, 1.2) // OK since Go 1.9 + // Some bad format/argTypes + fmt.Printf("%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" + _ = fmt.Sprintf("%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" + fmt.Fprintf(os.Stdout, "%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" + fmt.Printf("%t", c) // MATCH "Printf format %t has arg #1 of wrong type complex64" + fmt.Printf("%t", 1+2i) // MATCH "Printf format %t has arg #1 of wrong type complex128" + fmt.Printf("%c", 2.3) // MATCH "Printf format %c has arg #1 of wrong type float64" + fmt.Printf("%d", 2.3) // MATCH "Printf format %d has arg #1 of wrong type float64" + fmt.Printf("%e", "hi") // MATCH "Printf format %e has arg #1 of wrong type string" + fmt.Printf("%E", true) // MATCH "Printf format %E has arg #1 of wrong type bool" + fmt.Printf("%f", "hi") // MATCH "Printf format %f has arg #1 of wrong type string" + fmt.Printf("%F", 'x') // MATCH "Printf format %F has arg #1 of wrong type rune" + fmt.Printf("%g", "hi") // MATCH "Printf format %g has arg #1 of wrong type string" + fmt.Printf("%g", imap) // MATCH "Printf format %g has arg #1 of wrong type map[int]int" + fmt.Printf("%G", i) // MATCH "Printf format %G has arg #1 of wrong type int" + fmt.Printf("%o", x) // MATCH "Printf format %o has arg #1 of wrong type float64" + fmt.Printf("%p", 23) // MATCH "Printf format %p has arg #1 of wrong type int" + fmt.Printf("%q", x) // MATCH "Printf format %q has arg #1 of wrong type float64" + fmt.Printf("%s", b) // MATCH "Printf format %s has arg #1 of wrong type bool" + fmt.Printf("%s", byte(65)) // MATCH "Printf format %s has arg #1 of wrong type byte" + fmt.Printf("%t", 23) // MATCH "Printf format %t has arg #1 of wrong type int" + fmt.Printf("%U", x) // MATCH "Printf format %U has arg #1 of wrong type float64" + fmt.Printf("%X", 2.3) // MATCH "Printf format %X has arg #1 of wrong type float64" + fmt.Printf("%s", stringerv) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.ptrStringer" + fmt.Printf("%t", stringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.ptrStringer" + fmt.Printf("%s", embeddedStringerv) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.embeddedStringer" + fmt.Printf("%t", embeddedStringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.embeddedStringer" + fmt.Printf("%q", notstringerv) // MATCH "Printf format %q has arg #1 of wrong type CheckPrintf.notstringer" + fmt.Printf("%t", notstringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.notstringer" + fmt.Printf("%t", stringerarrayv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.stringerarray" + fmt.Printf("%t", notstringerarrayv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.notstringerarray" + fmt.Printf("%q", notstringerarrayv) // MATCH "Printf format %q has arg #1 of wrong type CheckPrintf.notstringerarray" + fmt.Printf("%d", BoolFormatter(true)) // MATCH "Printf format %d has arg #1 of wrong type CheckPrintf.BoolFormatter" + fmt.Printf("%z", FormatterVal(true)) // correct (the type is responsible for formatting) + fmt.Printf("%d", FormatterVal(true)) // correct (the type is responsible for formatting) + fmt.Printf("%s", nonemptyinterface) // correct (the type is responsible for formatting) + fmt.Printf("%.*s %d %6g", 3, "hi", 23, 'x') // MATCH "Printf format %6g has arg #4 of wrong type rune" + fmt.Printf("%s", "hi", 3) // MATCH "Printf call needs 1 args but has 2 args" + fmt.Printf("%"+("s"), "hi", 3) // MATCH "Printf call needs 1 args but has 2 args" + fmt.Printf("%s%%%d", "hi", 3) // correct + fmt.Printf("%08s", "woo") // correct + fmt.Printf("% 8s", "woo") // correct + fmt.Printf("%.*d", 3, 3) // correct + fmt.Printf("%.*d x", 3, 3, 3, 3) // MATCH "Printf call needs 2 args but has 4 args" + fmt.Printf("%.*d x", "hi", 3) // MATCH "Printf format %.*d reads non-int arg #1 as argument of *" + fmt.Printf("%.*d x", i, 3) // correct + fmt.Printf("%.*d x", s, 3) // MATCH "Printf format %.*d reads non-int arg #1 as argument of *" + fmt.Printf("%*% x", 0.22) // MATCH "Printf format %*% reads non-int arg #1 as argument of *" + fmt.Printf("%q %q", multi()...) // ok + fmt.Printf("%#q", `blah`) // ok + const format = "%s %s\n" + fmt.Printf(format, "hi", "there") + fmt.Printf(format, "hi") // MATCH "Printf format %s reads arg #2, but call has only 1 args" + fmt.Printf("%s %d %.3v %q", "str", 4) // MATCH "Printf format %.3v reads arg #3, but call has only 2 args" + + fmt.Printf("%#s", FormatterVal(true)) // correct (the type is responsible for formatting) + fmt.Printf("d%", 2) // MATCH "couldn't parse format string" + fmt.Printf("%d", percentDV) + fmt.Printf("%d", &percentDV) + fmt.Printf("%d", notPercentDV) // MATCH "Printf format %d has arg #1 of wrong type CheckPrintf.notPercentDStruct" + fmt.Printf("%d", ¬PercentDV) // MATCH "Printf format %d has arg #1 of wrong type *CheckPrintf.notPercentDStruct" + fmt.Printf("%p", ¬PercentDV) // Works regardless: we print it as a pointer. + fmt.Printf("%q", &percentDV) // MATCH "Printf format %q has arg #1 of wrong type *CheckPrintf.percentDStruct" + fmt.Printf("%s", percentSV) + fmt.Printf("%s", &percentSV) + // Good argument reorderings. + fmt.Printf("%[1]d", 3) + fmt.Printf("%[1]*d", 3, 1) + fmt.Printf("%[2]*[1]d", 1, 3) + fmt.Printf("%[2]*.[1]*[3]d", 2, 3, 4) + fmt.Fprintf(os.Stderr, "%[2]*.[1]*[3]d", 2, 3, 4) // Use Fprintf to make sure we count arguments correctly. + // Bad argument reorderings. + fmt.Printf("%[xd", 3) // MATCH "couldn't parse format string" + fmt.Printf("%[x]d x", 3) // MATCH "couldn't parse format string" + fmt.Printf("%[3]*s x", "hi", 2) // MATCH "Printf format %[3]*s reads arg #3, but call has only 2 args" + fmt.Printf("%[3]d x", 2) // MATCH "Printf format %[3]d reads arg #3, but call has only 1 args" + fmt.Printf("%[2]*.[1]*[3]d x", 2, "hi", 4) // MATCH "Printf format %[2]*.[1]*[3]d reads non-int arg #2 as argument of *" + fmt.Printf("%[0]s x", "arg1") // MATCH "Printf format %[0]s reads invalid arg 0; indices are 1-based" + fmt.Printf("%[0]d x", 1) // MATCH "Printf format %[0]d reads invalid arg 0; indices are 1-based" + + // Interfaces can be used with any verb. + var iface interface { + ToTheMadness() bool // Method ToTheMadness usually returns false + } + fmt.Printf("%f", iface) // ok: fmt treats interfaces as transparent and iface may well have a float concrete type + // Can print functions in many ways + fmt.Printf("%s", someFunction) // MATCH "Printf format %s has arg #1 of wrong type func()" + fmt.Printf("%d", someFunction) // ok: maybe someone wants to see the pointer + fmt.Printf("%v", someFunction) // ok: maybe someone wants to see the pointer in decimal + fmt.Printf("%p", someFunction) // ok: maybe someone wants to see the pointer + fmt.Printf("%T", someFunction) // ok: maybe someone wants to see the type + // Bug: used to recur forever. + fmt.Printf("%p %x", recursiveStructV, recursiveStructV.next) + fmt.Printf("%p %x", recursiveStruct1V, recursiveStruct1V.next) + fmt.Printf("%p %x", recursiveSliceV, recursiveSliceV) + //fmt.Printf("%p %x", recursiveMapV, recursiveMapV) + + // indexed arguments + fmt.Printf("%d %[3]d %d %[2]d x", 1, 2, 3, 4) // OK + fmt.Printf("%d %[0]d %d %[2]d x", 1, 2, 3, 4) // MATCH "Printf format %[0]d reads invalid arg 0; indices are 1-based" + fmt.Printf("%d %[3]d %d %[-2]d x", 1, 2, 3, 4) // MATCH "couldn't parse format string" + fmt.Printf("%d %[3]d %d %[2234234234234]d x", 1, 2, 3, 4) // MATCH "Printf format %[2234234234234]d reads arg #2234234234234, but call has only 4 args" + fmt.Printf("%d %[3]d %-10d %[2]d x", 1, 2, 3) // MATCH "Printf format %-10d reads arg #4, but call has only 3 args" + fmt.Printf("%[1][3]d x", 1, 2) // MATCH "couldn't parse format string" + fmt.Printf("%[1]d x", 1, 2) // OK + fmt.Printf("%d %[3]d %d %[2]d x", 1, 2, 3, 4, 5) // OK + + fmt.Printf(someString(), "hello") // OK + + // d accepts pointers as long as they're not to structs. + // pointers to structs are dereferencd and walked. + fmt.Printf("%d", &s) + + // staticcheck's own checks, based on bugs in go vet; see https://github.com/golang/go/issues/27672 + { + + type T2 struct { + X string + } + + type T1 struct { + X *T2 + } + x1 := []string{"hi"} + t1 := T1{&T2{"hi"}} + + fmt.Printf("%s\n", &x1) + fmt.Printf("%s\n", t1) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.T1" + var x2 struct{ A *int } + fmt.Printf("%p\n", x2) // MATCH "Printf format %p has arg #1 of wrong type struct{A *int}" + var x3 [2]int + fmt.Printf("%p", x3) // MATCH "Printf format %p has arg #1 of wrong type [2]int" + + ue := unexportedError{nil} + fmt.Printf("%s", ue) + } + + fmt.Printf("%s", Error(0)) +} + +func someString() string { return "X" } + +// A function we use as a function value; it has no other purpose. +func someFunction() {} + +// multi is used by the test. +func multi() []interface{} { + panic("don't call - testing only") +} + +type stringer int + +func (stringer) String() string { return "string" } + +type ptrStringer float64 + +var stringerv ptrStringer + +func (*ptrStringer) String() string { + return "string" +} + +type embeddedStringer struct { + foo string + ptrStringer + bar int +} + +var embeddedStringerv embeddedStringer + +type notstringer struct { + f float64 +} + +var notstringerv notstringer + +type stringerarray [4]float64 + +func (stringerarray) String() string { + return "string" +} + +var stringerarrayv stringerarray + +type notstringerarray [4]float64 + +var notstringerarrayv notstringerarray + +var nonemptyinterface = interface { + f() +}(nil) + +// A data type we can print with "%d". +type percentDStruct struct { + a int + b []byte + c *float64 +} + +var percentDV percentDStruct + +// A data type we cannot print correctly with "%d". +type notPercentDStruct struct { + a int + b []byte + c bool +} + +var notPercentDV notPercentDStruct + +// A data type we can print with "%s". +type percentSStruct struct { + a string + b []byte + C stringerarray +} + +var percentSV percentSStruct + +type BoolFormatter bool + +func (*BoolFormatter) Format(fmt.State, rune) { +} + +// Formatter with value receiver +type FormatterVal bool + +func (FormatterVal) Format(fmt.State, rune) { +} + +type RecursiveSlice []RecursiveSlice + +var recursiveSliceV = &RecursiveSlice{} + +type RecursiveMap map[int]RecursiveMap + +var recursiveMapV = make(RecursiveMap) + +type RecursiveStruct struct { + next *RecursiveStruct +} + +var recursiveStructV = &RecursiveStruct{} + +type RecursiveStruct1 struct { + next *RecursiveStruct2 +} + +type RecursiveStruct2 struct { + next *RecursiveStruct1 +} + +var recursiveStruct1V = &RecursiveStruct1{} + +type unexportedInterface struct { + f interface{} +} + +// Issue 17798: unexported ptrStringer cannot be formatted. +type unexportedStringer struct { + t ptrStringer +} +type unexportedStringerOtherFields struct { + s string + t ptrStringer + S string +} + +// Issue 17798: unexported error cannot be formatted. +type unexportedError struct { + e error +} +type unexportedErrorOtherFields struct { + s string + e error + S string +} + +type errorer struct{} + +func (e errorer) Error() string { return "errorer" } + +type unexportedCustomError struct { + e errorer +} + +type errorInterface interface { + error + ExtraMethod() +} + +type unexportedErrorInterface struct { + e errorInterface +} + +func UnexportedStringerOrError() { + fmt.Printf("%s", unexportedInterface{"foo"}) // ok; prints {foo} + fmt.Printf("%s", unexportedInterface{3}) // ok; we can't see the problem + + us := unexportedStringer{} + fmt.Printf("%s", us) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.unexportedStringer" + fmt.Printf("%s", &us) // MATCH "Printf format %s has arg #1 of wrong type *CheckPrintf.unexportedStringer" + + usf := unexportedStringerOtherFields{ + s: "foo", + S: "bar", + } + fmt.Printf("%s", usf) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.unexportedStringerOtherFields" + fmt.Printf("%s", &usf) // MATCH "Printf format %s has arg #1 of wrong type *CheckPrintf.unexportedStringerOtherFields" + + intSlice := []int{3, 4} + fmt.Printf("%s", intSlice) // MATCH "Printf format %s has arg #1 of wrong type []int" + nonStringerArray := [1]unexportedStringer{{}} + fmt.Printf("%s", nonStringerArray) // MATCH "Printf format %s has arg #1 of wrong type [1]CheckPrintf.unexportedStringer" + fmt.Printf("%s", []stringer{3, 4}) // not an error + fmt.Printf("%s", [2]stringer{3, 4}) // not an error +} + +// TODO: Disable complaint about '0' for Go 1.10. To be fixed properly in 1.11. +// See issues 23598 and 23605. +func DisableErrorForFlag0() { + fmt.Printf("%0t", true) +} + +// Issue 26486. +func dbg(format string, args ...interface{}) { + if format == "" { + format = "%v" + } + fmt.Printf(format, args...) +} diff --git a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go index a573f072c..48cad7933 100644 --- a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go +++ b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go @@ -6,8 +6,7 @@ import ( "os" ) -func fn() { - var s string +func fn(s string) { fn2 := func() string { return "" } fmt.Printf(fn2()) // MATCH /should use print-style function/ _ = fmt.Sprintf(fn2()) // MATCH /should use print-style function/ @@ -19,5 +18,5 @@ func fn() { fmt.Printf(fn2(), "") fmt.Printf("") - fmt.Printf("", "") + fmt.Printf("%s", "") } From 5d01066784d46e200226e4f237f5cd711bcdf3d5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 15 Apr 2019 03:42:48 +0200 Subject: [PATCH 123/254] Change framework to one job per check per package This changes our checks from "one job per check" to "one job per check per package". This brings us closer to the way go/analysis works and should make migrating in the future easier. In the process some now useless lintdsl helpers were removed. --- internal/sharedcheck/lint.go | 2 +- lint/generated.go | 10 +- lint/lint.go | 202 +++++---------- lint/lint_test.go | 2 +- lint/lintdsl/lintdsl.go | 60 ++--- simple/lint.go | 134 +++++----- staticcheck/lint.go | 199 ++++++++------- stylecheck/lint.go | 268 ++++++++++---------- stylecheck/names.go | 180 +++++++------- unused/unused.go | 465 +++++++++++++++++++---------------- 10 files changed, 725 insertions(+), 797 deletions(-) diff --git a/internal/sharedcheck/lint.go b/internal/sharedcheck/lint.go index cbbafbcdf..cf797fb1b 100644 --- a/internal/sharedcheck/lint.go +++ b/internal/sharedcheck/lint.go @@ -10,7 +10,7 @@ import ( ) func CheckRangeStringRunes(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { fn := func(node ast.Node) bool { rng, ok := node.(*ast.RangeStmt) if !ok || !IsBlank(rng.Key) { diff --git a/lint/generated.go b/lint/generated.go index 58b23f68f..655328296 100644 --- a/lint/generated.go +++ b/lint/generated.go @@ -4,6 +4,7 @@ import ( "bufio" "bytes" "io" + "os" ) var ( @@ -15,8 +16,13 @@ var ( crnl = []byte("\r\n") ) -func isGenerated(r io.Reader) bool { - br := bufio.NewReader(r) +func isGenerated(path string) bool { + f, err := os.Open(path) + if err != nil { + return false + } + defer f.Close() + br := bufio.NewReader(f) for { s, err := br.ReadBytes('\n') if err != nil && err != io.EOF { diff --git a/lint/lint.go b/lint/lint.go index 8000f8d6d..5838676bb 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -25,7 +25,8 @@ import ( ) type Job struct { - Program *Program + Pkg *Pkg + GoVersion int checker string check Check @@ -108,24 +109,11 @@ func (gi *GlobIgnore) Match(p Problem) bool { return false } -type tokenFileMapEntry struct { - af *ast.File - pkg *Pkg -} - type Program struct { - SSA *ssa.Program - InitialPackages []*Pkg - InitialFunctions []*ssa.Function - AllPackages []*packages.Package - AllFunctions []*ssa.Function - Files []*ast.File - GoVersion int - - tokenFileMap map[*token.File]tokenFileMapEntry - - genMu sync.RWMutex - generatedMap map[string]bool + SSA *ssa.Program + InitialPackages []*Pkg + AllPackages []*packages.Package + AllFunctions []*ssa.Function } func (prog *Program) Fset() *token.FileSet { @@ -212,12 +200,8 @@ func (l *Linter) ignore(p Problem) bool { return false } -func (prog *Program) File(node Positioner) *ast.File { - return prog.tokenFileMap[prog.SSA.Fset.File(node.Pos())].af -} - func (j *Job) File(node Positioner) *ast.File { - return j.Program.File(node) + return j.Pkg.tokenFileMap[j.Pkg.Fset.File(node.Pos())] } func parseDirective(s string) (cmd string, args []string) { @@ -299,11 +283,20 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { } pkg := &Pkg{ - SSA: ssapkg, - Package: pkg, - Config: cfg, + SSA: ssapkg, + Package: pkg, + Config: cfg, + Generated: map[string]bool{}, + tokenFileMap: map[*token.File]*ast.File{}, } pkg.Inspector = inspector.New(pkg.Syntax) + for _, f := range pkg.Syntax { + tf := pkg.Fset.File(f.Pos()) + pkg.tokenFileMap[tf] = f + + path := DisplayPosition(pkg.Fset, f.Pos()).Filename + pkg.Generated[path] = isGenerated(path) + } pkgMap[ssapkg] = pkg pkgs = append(pkgs, pkg) } @@ -312,36 +305,15 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { SSA: ssaprog, InitialPackages: pkgs, AllPackages: allPkgs, - GoVersion: l.GoVersion, - tokenFileMap: map[*token.File]tokenFileMapEntry{}, - generatedMap: map[string]bool{}, } - isInitial := map[*types.Package]struct{}{} - for _, pkg := range pkgs { - isInitial[pkg.Types] = struct{}{} - } for fn := range ssautil.AllFunctions(ssaprog) { + prog.AllFunctions = append(prog.AllFunctions, fn) if fn.Pkg == nil { continue } - prog.AllFunctions = append(prog.AllFunctions, fn) - if _, ok := isInitial[fn.Pkg.Pkg]; ok { - prog.InitialFunctions = append(prog.InitialFunctions, fn) - } - } - for _, pkg := range pkgs { - prog.Files = append(prog.Files, pkg.Syntax...) - } - - for _, pkg := range allPkgs { - for _, f := range pkg.Syntax { - tf := pkg.Fset.File(f.Pos()) - ssapkg := ssaprog.Package(pkg.Types) - prog.tokenFileMap[tf] = tokenFileMapEntry{ - af: f, - pkg: pkgMap[ssapkg], - } + if pkg, ok := pkgMap[fn.Pkg]; ok { + pkg.InitialFunctions = append(pkg.InitialFunctions, fn) } } @@ -375,7 +347,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { if len(args) < 2 { // FIXME(dh): this causes duplicated warnings when using megacheck p := Problem{ - Position: prog.DisplayPosition(c.Pos()), + Position: DisplayPosition(prog.Fset(), c.Pos()), Text: "malformed linter directive; missing the required reason field?", Check: "", Checker: "lint", @@ -389,7 +361,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { continue } checks := strings.Split(args[0], ",") - pos := prog.DisplayPosition(node.Pos()) + pos := DisplayPosition(prog.Fset(), node.Pos()) var ig Ignore switch cmd { case "ignore": @@ -427,41 +399,32 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { var jobs []*Job var allChecks []string + var wg sync.WaitGroup for _, checker := range l.Checkers { - checks := checker.Checks() - for _, check := range checks { + for _, check := range checker.Checks() { allChecks = append(allChecks, check.ID) - j := &Job{ - Program: prog, - checker: checker.Name(), - check: check, + if check.Fn == nil { + continue + } + for _, pkg := range pkgs { + j := &Job{ + Pkg: pkg, + checker: checker.Name(), + check: check, + GoVersion: l.GoVersion, + } + jobs = append(jobs, j) + wg.Add(1) + go func(check Check, j *Job) { + t := time.Now() + check.Fn(j) + j.duration = time.Since(t) + wg.Done() + }(check, j) } - jobs = append(jobs, j) } } - max := len(jobs) - if l.MaxConcurrentJobs > 0 { - max = l.MaxConcurrentJobs - } - - sem := make(chan struct{}, max) - wg := &sync.WaitGroup{} - for _, j := range jobs { - wg.Add(1) - go func(j *Job) { - defer wg.Done() - sem <- struct{}{} - defer func() { <-sem }() - fn := j.check.Fn - if fn == nil { - return - } - t := time.Now() - fn(j) - j.duration = time.Since(t) - }(j) - } wg.Wait() for _, j := range jobs { @@ -500,21 +463,21 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { } couldveMatched := false - for _, v := range prog.tokenFileMap { - f := v.af - pkg := v.pkg - if prog.Fset().Position(f.Pos()).Filename != ig.File { - continue - } - allowedChecks := FilterChecks(allChecks, pkg.Config.Checks) - for _, c := range ig.Checks { - if !allowedChecks[c] { + for _, pkg := range pkgs { + for _, f := range pkg.tokenFileMap { + if prog.Fset().Position(f.Pos()).Filename != ig.File { continue } - couldveMatched = true + allowedChecks := FilterChecks(allChecks, pkg.Config.Checks) + for _, c := range ig.Checks { + if !allowedChecks[c] { + continue + } + couldveMatched = true + break + } break } - break } if !couldveMatched { @@ -523,7 +486,7 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { continue } p := Problem{ - Position: prog.DisplayPosition(ig.pos), + Position: DisplayPosition(prog.Fset(), ig.pos), Text: "this linter directive didn't match anything; should it be removed?", Check: "", Checker: "lint", @@ -615,23 +578,28 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { // Pkg represents a package being linted. type Pkg struct { - SSA *ssa.Package + SSA *ssa.Package + InitialFunctions []*ssa.Function *packages.Package Config config.Config Inspector *inspector.Inspector + // TODO(dh): this map should probably map from *ast.File, not string + Generated map[string]bool + + tokenFileMap map[*token.File]*ast.File } type Positioner interface { Pos() token.Pos } -func (prog *Program) DisplayPosition(p token.Pos) token.Position { +func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { // Only use the adjusted position if it points to another Go file. // This means we'll point to the original file for cgo files, but // we won't point to a YACC grammar file. - pos := prog.Fset().PositionFor(p, false) - adjPos := prog.Fset().PositionFor(p, true) + pos := fset.PositionFor(p, false) + adjPos := fset.PositionFor(p, true) if filepath.Ext(adjPos.Filename) == ".go" { return adjPos @@ -639,39 +607,9 @@ func (prog *Program) DisplayPosition(p token.Pos) token.Position { return pos } -func (prog *Program) isGenerated(path string) bool { - // This function isn't very efficient in terms of lock contention - // and lack of parallelism, but it really shouldn't matter. - // Projects consists of thousands of files, and have hundreds of - // errors. That's not a lot of calls to isGenerated. - - prog.genMu.RLock() - if b, ok := prog.generatedMap[path]; ok { - prog.genMu.RUnlock() - return b - } - prog.genMu.RUnlock() - prog.genMu.Lock() - defer prog.genMu.Unlock() - // recheck to avoid doing extra work in case of race - if b, ok := prog.generatedMap[path]; ok { - return b - } - - f, err := os.Open(path) - if err != nil { - return false - } - defer f.Close() - b := isGenerated(f) - prog.generatedMap[path] = b - return b -} - func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { - pkg := j.NodePackage(n) - pos := j.Program.DisplayPosition(n.Pos()) - if j.Program.isGenerated(pos.Filename) && j.check.FilterGenerated { + pos := DisplayPosition(j.Pkg.Fset, n.Pos()) + if j.Pkg.Generated[pos.Filename] && j.check.FilterGenerated { return nil } problem := Problem{ @@ -679,16 +617,12 @@ func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem Text: fmt.Sprintf(format, args...), Check: j.check.ID, Checker: j.checker, - Package: pkg, + Package: j.Pkg, } j.problems = append(j.problems, problem) return &j.problems[len(j.problems)-1] } -func (j *Job) NodePackage(node Positioner) *Pkg { - return j.Program.tokenFileMap[j.Program.SSA.Fset.File(node.Pos())].pkg -} - func allPackages(pkgs []*packages.Package) []*packages.Package { var out []*packages.Package packages.Visit( diff --git a/lint/lint_test.go b/lint/lint_test.go index 48dbaa79b..437e75015 100644 --- a/lint/lint_test.go +++ b/lint/lint_test.go @@ -21,7 +21,7 @@ func (testChecker) Checks() []Check { func testLint(j *Job) { // Flag all functions - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { if fn.Synthetic == "" { j.Errorf(fn, "This is a test problem") } diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 453e09e24..ab6800ab9 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -103,26 +103,14 @@ func IsZero(expr ast.Expr) bool { return IsIntLiteral(expr, "0") } -func TypeOf(j *lint.Job, expr ast.Expr) types.Type { - if expr == nil { - return nil - } - return j.NodePackage(expr).TypesInfo.TypeOf(expr) -} - -func IsOfType(j *lint.Job, expr ast.Expr, name string) bool { return IsType(TypeOf(j, expr), name) } - -func ObjectOf(j *lint.Job, ident *ast.Ident) types.Object { - if ident == nil { - return nil - } - return j.NodePackage(ident).TypesInfo.ObjectOf(ident) +func IsOfType(j *lint.Job, expr ast.Expr, name string) bool { + return IsType(j.Pkg.TypesInfo.TypeOf(expr), name) } func IsInTest(j *lint.Job, node lint.Positioner) bool { // FIXME(dh): this doesn't work for global variables with // initializers - f := j.Program.SSA.Fset.File(node.Pos()) + f := j.Pkg.Fset.File(node.Pos()) return f != nil && strings.HasSuffix(f.Name(), "_test.go") } @@ -130,15 +118,11 @@ func IsInMain(j *lint.Job, node lint.Positioner) bool { if node, ok := node.(packager); ok { return node.Package().Pkg.Name() == "main" } - pkg := j.NodePackage(node) - if pkg == nil { - return false - } - return pkg.Types.Name() == "main" + return j.Pkg.Types.Name() == "main" } func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { - info := j.NodePackage(expr).TypesInfo + info := j.Pkg.TypesInfo sel := info.Selections[expr] if sel == nil { if x, ok := expr.X.(*ast.Ident); ok { @@ -155,11 +139,11 @@ func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { } func IsNil(j *lint.Job, expr ast.Expr) bool { - return j.NodePackage(expr).TypesInfo.Types[expr].IsNil() + return j.Pkg.TypesInfo.Types[expr].IsNil() } func BoolConst(j *lint.Job, expr ast.Expr) bool { - val := j.NodePackage(expr).TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() + val := j.Pkg.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() return constant.BoolVal(val) } @@ -172,7 +156,7 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool { if !ok { return false } - obj := j.NodePackage(expr).TypesInfo.ObjectOf(ident) + obj := j.Pkg.TypesInfo.ObjectOf(ident) c, ok := obj.(*types.Const) if !ok { return false @@ -188,7 +172,7 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool { } func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { - tv := j.NodePackage(expr).TypesInfo.Types[expr] + tv := j.Pkg.TypesInfo.Types[expr] if tv.Value == nil { return 0, false } @@ -199,7 +183,7 @@ func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { } func ExprToString(j *lint.Job, expr ast.Expr) (string, bool) { - val := j.NodePackage(expr).TypesInfo.Types[expr].Value + val := j.Pkg.TypesInfo.Types[expr].Value if val == nil { return "", false } @@ -229,19 +213,19 @@ func DereferenceR(T types.Type) types.Type { } func IsGoVersion(j *lint.Job, minor int) bool { - return j.Program.GoVersion >= minor + return j.GoVersion >= minor } func CallNameAST(j *lint.Job, call *ast.CallExpr) string { switch fun := call.Fun.(type) { case *ast.SelectorExpr: - fn, ok := ObjectOf(j, fun.Sel).(*types.Func) + fn, ok := j.Pkg.TypesInfo.ObjectOf(fun.Sel).(*types.Func) if !ok { return "" } return lint.FuncName(fn) case *ast.Ident: - obj := ObjectOf(j, fun) + obj := j.Pkg.TypesInfo.ObjectOf(fun) switch obj := obj.(type) { case *types.Func: return lint.FuncName(obj) @@ -273,9 +257,8 @@ func IsCallToAnyAST(j *lint.Job, node ast.Node, names ...string) bool { } func Render(j *lint.Job, x interface{}) string { - fset := j.Program.SSA.Fset var buf bytes.Buffer - if err := printer.Fprint(&buf, fset, x); err != nil { + if err := printer.Fprint(&buf, j.Pkg.Fset, x); err != nil { panic(err) } return buf.String() @@ -311,11 +294,10 @@ func Inspect(node ast.Node, fn func(node ast.Node) bool) { ast.Inspect(node, fn) } -func GroupSpecs(j *lint.Job, specs []ast.Spec) [][]ast.Spec { +func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec { if len(specs) == 0 { return nil } - fset := j.Program.SSA.Fset groups := make([][]ast.Spec, 1) groups[0] = append(groups[0], specs[0]) @@ -377,15 +359,3 @@ func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Fiel } return out } - -func InspectPreorder(j *lint.Job, types []ast.Node, fn func(ast.Node)) { - for _, pkg := range j.Program.InitialPackages { - pkg.Inspector.Preorder(types, fn) - } -} - -func InspectNodes(j *lint.Job, types []ast.Node, fn func(node ast.Node, push bool) (prune bool)) { - for _, pkg := range j.Program.InitialPackages { - pkg.Inspector.Nodes(types, fn) - } -} diff --git a/simple/lint.go b/simple/lint.go index bf7da38d2..db805770c 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -103,7 +103,7 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { j.Errorf(node, "should use a simple channel send/receive instead of select with a single case") } } - InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) } func (c *Checker) LintLoopCopy(j *lint.Job) { @@ -128,7 +128,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { return } - if _, ok := TypeOf(j, lhs.X).(*types.Slice); !ok { + if _, ok := j.Pkg.TypesInfo.TypeOf(lhs.X).(*types.Slice); !ok { return } lidx, ok := lhs.Index.(*ast.Ident) @@ -139,16 +139,16 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if TypeOf(j, lhs) == nil || TypeOf(j, stmt.Rhs[0]) == nil { + if j.Pkg.TypesInfo.TypeOf(lhs) == nil || j.Pkg.TypesInfo.TypeOf(stmt.Rhs[0]) == nil { return } - if ObjectOf(j, lidx) != ObjectOf(j, key) { + if j.Pkg.TypesInfo.ObjectOf(lidx) != j.Pkg.TypesInfo.ObjectOf(key) { return } - if !types.Identical(TypeOf(j, lhs), TypeOf(j, stmt.Rhs[0])) { + if !types.Identical(j.Pkg.TypesInfo.TypeOf(lhs), j.Pkg.TypesInfo.TypeOf(stmt.Rhs[0])) { return } - if _, ok := TypeOf(j, loop.X).(*types.Slice); !ok { + if _, ok := j.Pkg.TypesInfo.TypeOf(loop.X).(*types.Slice); !ok { return } @@ -162,7 +162,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if ObjectOf(j, ridx) != ObjectOf(j, key) { + if j.Pkg.TypesInfo.ObjectOf(ridx) != j.Pkg.TypesInfo.ObjectOf(key) { return } } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok { @@ -170,7 +170,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if ObjectOf(j, rhs) != ObjectOf(j, value) { + if j.Pkg.TypesInfo.ObjectOf(rhs) != j.Pkg.TypesInfo.ObjectOf(value) { return } } else { @@ -178,7 +178,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { } j.Errorf(loop, "should use copy() instead of a loop") } - InspectPreorder(j, []ast.Node{(*ast.RangeStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) LintIfBoolCmp(j *lint.Job) { @@ -201,7 +201,7 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { val = BoolConst(j, expr.Y) other = expr.X } - basic, ok := TypeOf(j, other).Underlying().(*types.Basic) + basic, ok := j.Pkg.TypesInfo.TypeOf(other).Underlying().(*types.Basic) if !ok || basic.Kind() != types.Bool { return } @@ -220,7 +220,7 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { } j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) } - InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintBytesBufferConversions(j *lint.Job) { @@ -239,7 +239,7 @@ func (c *Checker) LintBytesBufferConversions(j *lint.Job) { return } - typ := TypeOf(j, call.Fun) + typ := j.Pkg.TypesInfo.TypeOf(call.Fun) if typ == types.Universe.Lookup("string").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).Bytes") { j.Errorf(call, "should use %v.String() instead of %v", Render(j, sel.X), Render(j, call)) } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).String") { @@ -247,7 +247,7 @@ func (c *Checker) LintBytesBufferConversions(j *lint.Job) { } } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintStringsContains(j *lint.Job) { @@ -312,7 +312,7 @@ func (c *Checker) LintStringsContains(j *lint.Job) { } j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(j, call.Args)) } - InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintBytesCompare(j *lint.Job) { @@ -339,7 +339,7 @@ func (c *Checker) LintBytesCompare(j *lint.Job) { } j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) } - InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintForTrue(j *lint.Job) { @@ -353,7 +353,7 @@ func (c *Checker) LintForTrue(j *lint.Job) { } j.Errorf(loop, "should use for {} instead of for true {}") } - InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) LintRegexpRaw(j *lint.Job) { @@ -410,7 +410,7 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintIfReturn(j *lint.Job) { @@ -471,7 +471,7 @@ func (c *Checker) LintIfReturn(j *lint.Job) { } j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") } - InspectPreorder(j, []ast.Node{(*ast.BlockStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } // LintRedundantNilCheckWithLen checks for the following reduntant nil-checks: @@ -492,7 +492,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { if !ok { return false, false } - c, ok := ObjectOf(j, id).(*types.Const) + c, ok := j.Pkg.TypesInfo.ObjectOf(id).(*types.Const) if !ok { return false, false } @@ -585,7 +585,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { // finally check that xx type is one of array, slice, map or chan // this is to prevent false positive in case if xx is a pointer to an array var nilType string - switch TypeOf(j, xx).(type) { + switch j.Pkg.TypesInfo.TypeOf(xx).(type) { case *types.Slice: nilType = "nil slices" case *types.Map: @@ -597,7 +597,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { } j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) } - InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) LintSlicing(j *lint.Job) { @@ -618,7 +618,7 @@ func (c *Checker) LintSlicing(j *lint.Job) { if !ok || fun.Name != "len" { return } - if _, ok := ObjectOf(j, fun).(*types.Builtin); !ok { + if _, ok := j.Pkg.TypesInfo.ObjectOf(fun).(*types.Builtin); !ok { return } arg, ok := call.Args[Arg("len.v")].(*ast.Ident) @@ -627,7 +627,7 @@ func (c *Checker) LintSlicing(j *lint.Job) { } j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") } - InspectPreorder(j, []ast.Node{(*ast.SliceExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) } func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { @@ -637,7 +637,7 @@ func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { if !ok { return true } - if ObjectOf(j, ident) == ObjectOf(j, ident2) { + if j.Pkg.TypesInfo.ObjectOf(ident) == j.Pkg.TypesInfo.ObjectOf(ident2) { found = true return false } @@ -681,14 +681,14 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if !ok { return } - obj := ObjectOf(j, fun) + obj := j.Pkg.TypesInfo.ObjectOf(fun) fn, ok := obj.(*types.Builtin) if !ok || fn.Name() != "append" { return } - src := TypeOf(j, loop.X) - dst := TypeOf(j, call.Args[Arg("append.slice")]) + src := j.Pkg.TypesInfo.TypeOf(loop.X) + dst := j.Pkg.TypesInfo.TypeOf(call.Args[Arg("append.slice")]) // TODO(dominikh) remove nil check once Go issue #15173 has // been fixed if src == nil { @@ -706,13 +706,13 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if !ok { return } - if ObjectOf(j, val) != ObjectOf(j, el) { + if j.Pkg.TypesInfo.ObjectOf(val) != j.Pkg.TypesInfo.ObjectOf(el) { return } j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", Render(j, stmt.Lhs[0]), Render(j, call.Args[Arg("append.slice")]), Render(j, loop.X)) } - InspectPreorder(j, []ast.Node{(*ast.RangeStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) LintTimeSince(j *lint.Job) { @@ -730,7 +730,7 @@ func (c *Checker) LintTimeSince(j *lint.Job) { } j.Errorf(call, "should use time.Since instead of time.Now().Sub") } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintTimeUntil(j *lint.Job) { @@ -747,7 +747,7 @@ func (c *Checker) LintTimeUntil(j *lint.Job) { } j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { @@ -763,7 +763,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { case *ast.IndexExpr: // The type-checker should make sure that it's a map, but // let's be safe. - if _, ok := TypeOf(j, rhs.X).Underlying().(*types.Map); !ok { + if _, ok := j.Pkg.TypesInfo.TypeOf(rhs.X).Underlying().(*types.Map); !ok { return } case *ast.UnaryExpr: @@ -812,10 +812,10 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } } - InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn1) - InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn2) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2) if IsGoVersion(j, 4) { - InspectPreorder(j, []ast.Node{(*ast.RangeStmt)(nil)}, fn3) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3) } } @@ -838,7 +838,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return } - typ1, _ := TypeOf(j, lit.Type).(*types.Named) + typ1, _ := j.Pkg.TypesInfo.TypeOf(lit.Type).(*types.Named) if typ1 == nil { return } @@ -858,7 +858,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return nil, nil, false } - typ := TypeOf(j, sel.X) + typ := j.Pkg.TypesInfo.TypeOf(sel.X) return typ, ident, typ != nil } if len(lit.Elts) == 0 { @@ -938,7 +938,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) } - InspectPreorder(j, []ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) } func (c *Checker) LintTrim(j *lint.Job) { @@ -1139,7 +1139,7 @@ func (c *Checker) LintTrim(j *lint.Job) { j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) } } - InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) LintLoopSlide(j *lint.Job) { @@ -1176,7 +1176,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } postvar, ok := post.X.(*ast.Ident) - if !ok || ObjectOf(j, postvar) != ObjectOf(j, initvar) { + if !ok || j.Pkg.TypesInfo.ObjectOf(postvar) != j.Pkg.TypesInfo.ObjectOf(initvar) { return } bin, ok := loop.Cond.(*ast.BinaryExpr) @@ -1184,7 +1184,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } binx, ok := bin.X.(*ast.Ident) - if !ok || ObjectOf(j, binx) != ObjectOf(j, initvar) { + if !ok || j.Pkg.TypesInfo.ObjectOf(binx) != j.Pkg.TypesInfo.ObjectOf(initvar) { return } biny, ok := bin.Y.(*ast.Ident) @@ -1213,8 +1213,8 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { if !ok { return } - obj1 := ObjectOf(j, bs1) - obj2 := ObjectOf(j, bs2) + obj1 := j.Pkg.TypesInfo.ObjectOf(bs1) + obj2 := j.Pkg.TypesInfo.ObjectOf(bs2) if obj1 != obj2 { return } @@ -1223,7 +1223,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { } index1, ok := lhs.Index.(*ast.Ident) - if !ok || ObjectOf(j, index1) != ObjectOf(j, initvar) { + if !ok || j.Pkg.TypesInfo.ObjectOf(index1) != j.Pkg.TypesInfo.ObjectOf(initvar) { return } index2, ok := rhs.Index.(*ast.BinaryExpr) @@ -1235,13 +1235,13 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } add2, ok := index2.Y.(*ast.Ident) - if !ok || ObjectOf(j, add2) != ObjectOf(j, initvar) { + if !ok || j.Pkg.TypesInfo.ObjectOf(add2) != j.Pkg.TypesInfo.ObjectOf(initvar) { return } j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(j, bs1), Render(j, biny), Render(j, bs1), Render(j, add1)) } - InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) LintMakeLenCap(j *lint.Job) { @@ -1254,7 +1254,7 @@ func (c *Checker) LintMakeLenCap(j *lint.Job) { switch len(call.Args) { case 2: // make(T, len) - if _, ok := TypeOf(j, call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { + if _, ok := j.Pkg.TypesInfo.TypeOf(call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { break } if IsZero(call.Args[Arg("make.size[0]")]) { @@ -1269,7 +1269,7 @@ func (c *Checker) LintMakeLenCap(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintAssertNotNil(j *lint.Job) { @@ -1378,8 +1378,8 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { } j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) } - InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn1) - InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn2) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) } func (c *Checker) LintDeclareAssign(j *lint.Job) { @@ -1450,7 +1450,7 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { j.Errorf(decl, "should merge variable declaration with assignment on next line") } } - InspectPreorder(j, []ast.Node{(*ast.BlockStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } func (c *Checker) LintRedundantBreak(j *lint.Job) { @@ -1492,8 +1492,8 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { // checked x.Type.Results to be nil. j.Errorf(rst, "redundant return statement") } - InspectPreorder(j, []ast.Node{(*ast.CaseClause)(nil)}, fn1) - InspectPreorder(j, []ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) } func isStringer(T types.Type) bool { @@ -1533,7 +1533,7 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) { return } arg := call.Args[Arg("fmt.Sprintf.a[0]")] - typ := TypeOf(j, arg) + typ := j.Pkg.TypesInfo.TypeOf(arg) if isStringer(typ) { j.Errorf(call, "should use String() instead of fmt.Sprintf") @@ -1548,7 +1548,7 @@ func (c *Checker) LintRedundantSprintf(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { @@ -1562,7 +1562,7 @@ func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { } j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) LintRangeStringRunes(j *lint.Job) { @@ -1596,12 +1596,12 @@ func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { if ifXIdent.Obj != rangeXIdent.Obj { return } - switch TypeOf(j, rangeXIdent).(type) { + switch j.Pkg.TypesInfo.TypeOf(rangeXIdent).(type) { case *types.Slice, *types.Map: j.Errorf(node, "unnecessary nil check around range") } } - InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func isPermissibleSort(j *lint.Job, node ast.Node) bool { @@ -1682,7 +1682,7 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { } return } - InspectPreorder(j, []ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) } func (c *Checker) LintGuardedDelete(j *lint.Job) { @@ -1707,7 +1707,7 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { if !ok { return nil, nil, nil, false } - if _, ok := TypeOf(j, index.X).(*types.Map); !ok { + if _, ok := j.Pkg.TypesInfo.TypeOf(index.X).(*types.Map); !ok { return nil, nil, nil, false } key = index.Index @@ -1736,7 +1736,7 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { if !ok { return } - if cond, ok := stmt.Cond.(*ast.Ident); !ok || ObjectOf(j, cond) != ObjectOf(j, b) { + if cond, ok := stmt.Cond.(*ast.Ident); !ok || j.Pkg.TypesInfo.ObjectOf(cond) != j.Pkg.TypesInfo.ObjectOf(b) { return } if Render(j, call.Args[0]) != Render(j, m) || Render(j, call.Args[1]) != Render(j, key) { @@ -1744,7 +1744,7 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { } j.Errorf(stmt, "unnecessary guard around call to delete") } - InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { @@ -1764,7 +1764,7 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { if !ok { return } - x := ObjectOf(j, ident) + x := j.Pkg.TypesInfo.ObjectOf(ident) var allOffenders []ast.Node for _, clause := range stmt.Body.List { clause := clause.(*ast.CaseClause) @@ -1783,12 +1783,12 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { hasUnrelatedAssertion = true return false } - if ObjectOf(j, ident) != x { + if j.Pkg.TypesInfo.ObjectOf(ident) != x { hasUnrelatedAssertion = true return false } - if !types.Identical(TypeOf(j, clause.List[0]), TypeOf(j, assert2.Type)) { + if !types.Identical(j.Pkg.TypesInfo.TypeOf(clause.List[0]), j.Pkg.TypesInfo.TypeOf(assert2.Type)) { hasUnrelatedAssertion = true return false } @@ -1807,11 +1807,11 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { if len(allOffenders) != 0 { at := "" for _, offender := range allOffenders { - pos := j.Program.DisplayPosition(offender.Pos()) + pos := lint.DisplayPosition(j.Pkg.Fset, offender.Pos()) at += "\n\t" + pos.String() } j.Errorf(expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(j, ident), Render(j, ident), at) } } - InspectPreorder(j, []ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) } diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 637093831..bad1efb1e 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -623,7 +623,7 @@ func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { } func checkAtomicAlignmentImpl(call *Call) { - sizes := call.Job.Program.InitialPackages[0].TypesSizes + sizes := call.Job.Pkg.TypesSizes if sizes.Sizeof(types.Typ[types.Uintptr]) != 4 { // Not running on a 32-bit platform return @@ -1024,11 +1024,6 @@ func applyStdlibKnowledge(fn *ssa.Function) { } } -func hasType(j *lint.Job, expr ast.Expr, name string) bool { - T := TypeOf(j, expr) - return IsType(T, name) -} - func (c *Checker) CheckUntrappableSignal(j *lint.Job) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) @@ -1049,7 +1044,7 @@ func (c *Checker) CheckUntrappableSignal(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckTemplate(j *lint.Job) { @@ -1090,7 +1085,7 @@ func (c *Checker) CheckTemplate(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { @@ -1120,7 +1115,7 @@ func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { j.Errorf(call.Args[Arg("time.Sleep.d")], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { @@ -1145,7 +1140,7 @@ func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { if !ok { return } - fn, ok := ObjectOf(j, sel.Sel).(*types.Func) + fn, ok := j.Pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) if !ok { return } @@ -1154,7 +1149,7 @@ func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { Render(j, stmt)) } } - InspectPreorder(j, []ast.Node{(*ast.GoStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn) } func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { @@ -1186,7 +1181,7 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { return } if ident, ok := loop.Cond.(*ast.Ident); ok { - if k, ok := ObjectOf(j, ident).(*types.Const); ok { + if k, ok := j.Pkg.TypesInfo.ObjectOf(ident).(*types.Const); ok { if !constant.BoolVal(k.Val()) { // don't flag `for false {}` loops. They're a debug aid. return @@ -1197,7 +1192,7 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { } j.Errorf(loop, "this loop will spin, using 100%% CPU") } - InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { @@ -1236,13 +1231,13 @@ func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { j.Errorf(stmt, "defers in this infinite loop will never run") } } - InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { fn := func(node ast.Node) { loop := node.(*ast.RangeStmt) - typ := TypeOf(j, loop.X) + typ := j.Pkg.TypesInfo.TypeOf(loop.X) _, ok := typ.Underlying().(*types.Chan) if !ok { return @@ -1259,7 +1254,7 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { } ast.Inspect(loop.Body, fn2) } - InspectPreorder(j, []ast.Node{(*ast.RangeStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) } func (c *Checker) CheckTestMainExit(j *lint.Job) { @@ -1268,7 +1263,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { return } - arg := ObjectOf(j, node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) + arg := j.Pkg.TypesInfo.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) callsRun := false fn2 := func(node ast.Node) bool { call, ok := node.(*ast.CallExpr) @@ -1283,7 +1278,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { if !ok { return true } - if arg != ObjectOf(j, ident) { + if arg != j.Pkg.TypesInfo.ObjectOf(ident) { return true } if sel.Sel.Name == "Run" { @@ -1307,7 +1302,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { j.Errorf(node, "TestMain should call os.Exit to set exit code") } } - InspectPreorder(j, nil, fn) + j.Pkg.Inspector.Preorder(nil, fn) } func isTestMain(j *lint.Job, node ast.Node) bool { @@ -1344,7 +1339,7 @@ func (c *Checker) CheckExec(j *lint.Job) { j.Errorf(call.Args[Arg("os/exec.Command.name")], "first argument to exec.Command looks like a shell command, but a program name or path are expected") } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { @@ -1363,7 +1358,7 @@ func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) } func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { @@ -1371,7 +1366,7 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { op := node.(*ast.BinaryExpr) switch op.Op { case token.EQL, token.NEQ: - if basic, ok := TypeOf(j, op.X).Underlying().(*types.Basic); ok { + if basic, ok := j.Pkg.TypesInfo.TypeOf(op.X).Underlying().(*types.Basic); ok { if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { // f == f and f != f might be used to check for NaN return @@ -1405,7 +1400,7 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { } j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) } - InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckScopedBreak(j *lint.Job) { @@ -1463,7 +1458,7 @@ func (c *Checker) CheckScopedBreak(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) } func (c *Checker) CheckUnsafePrintf(j *lint.Job) { @@ -1488,7 +1483,7 @@ func (c *Checker) CheckUnsafePrintf(j *lint.Job) { j.Errorf(call.Args[arg], "printf-style function with dynamic format string and no further arguments should use print-style function instead") } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckEarlyDefer(j *lint.Job) { @@ -1518,7 +1513,7 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { if !ok { continue } - sig, ok := TypeOf(j, call.Fun).(*types.Signature) + sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { continue } @@ -1556,7 +1551,7 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { j.Errorf(def, "should check returned error before deferring %s", Render(j, def.Call)) } } - InspectPreorder(j, []ast.Node{(*ast.BlockStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } func selectorX(sel *ast.SelectorExpr) ast.Node { @@ -1594,7 +1589,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { return nil, "", false } - fn, ok := ObjectOf(j, sel.Sel).(*types.Func) + fn, ok := j.Pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) if !ok { return nil, "", false } @@ -1624,7 +1619,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.BlockStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) } // cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't @@ -1648,11 +1643,11 @@ func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) } func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, b := range ssafn.Blocks { for _, ins := range b.Instrs { binop, ok := ins.(*ssa.BinOp) @@ -1693,7 +1688,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !ok { continue } - if hasType(j, op.X, "net/http.Header") { + if IsOfType(j, op.X, "net/http.Header") { return false } } @@ -1703,7 +1698,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !ok { return true } - if !hasType(j, op.X, "net/http.Header") { + if !IsOfType(j, op.X, "net/http.Header") { return true } s, ok := ExprToString(j, op.Index) @@ -1716,7 +1711,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) return true } - InspectNodes(j, []ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) + j.Pkg.Inspector.Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) } func (c *Checker) CheckBenchmarkN(j *lint.Job) { @@ -1732,16 +1727,16 @@ func (c *Checker) CheckBenchmarkN(j *lint.Job) { if sel.Sel.Name != "N" { return } - if !hasType(j, sel.X, "*testing.B") { + if !IsOfType(j, sel.X, "*testing.B") { return } j.Errorf(assign, "should not assign to %s", Render(j, sel)) } - InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { if IsExample(ssafn) { continue } @@ -1811,7 +1806,7 @@ func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { } func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ssabinop, ok := ins.(*ssa.BinOp) @@ -1855,7 +1850,7 @@ func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { } func (c *Checker) CheckNilMaps(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { mu, ok := ins.(*ssa.MapUpdate) @@ -1881,12 +1876,12 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { if !ok { return false } - return IsObject(ObjectOf(j, sel.Sel), name) + return IsObject(j.Pkg.TypesInfo.ObjectOf(sel.Sel), name) } fn := func(node ast.Node) { expr := node.(*ast.BinaryExpr) - tx := TypeOf(j, expr.X) + tx := j.Pkg.TypesInfo.TypeOf(expr.X) basic, ok := tx.Underlying().(*types.Basic) if !ok { return @@ -1954,7 +1949,7 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { } } - InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) { @@ -1999,7 +1994,7 @@ func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ss } func (c *Checker) CheckLoopCondition(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { fn := func(node ast.Node) bool { loop, ok := node.(*ast.ForStmt) if !ok { @@ -2064,7 +2059,7 @@ func (c *Checker) CheckLoopCondition(j *lint.Job) { } func (c *Checker) CheckArgOverwritten(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { fn := func(node ast.Node) bool { var typ *ast.FuncType var body *ast.BlockStmt @@ -2084,7 +2079,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { } for _, field := range typ.Params.List { for _, arg := range field.Names { - obj := ObjectOf(j, arg) + obj := j.Pkg.TypesInfo.ObjectOf(arg) var ssaobj *ssa.Parameter for _, param := range ssafn.Params { if param.Object() == obj { @@ -2114,7 +2109,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { if !ok { continue } - if ObjectOf(j, ident) == obj { + if j.Pkg.TypesInfo.ObjectOf(ident) == obj { assigned = true return false } @@ -2172,7 +2167,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { body = node.Body loop = node case *ast.RangeStmt: - typ := TypeOf(j, node.X) + typ := j.Pkg.TypesInfo.TypeOf(node.X) if _, ok := typ.Underlying().(*types.Map); ok { // looping once over a map is a valid pattern for // getting an arbitrary element. @@ -2237,7 +2232,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { return true }) } - InspectPreorder(j, []ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) } func (c *Checker) CheckNilContext(j *lint.Job) { @@ -2246,10 +2241,10 @@ func (c *Checker) CheckNilContext(j *lint.Job) { if len(call.Args) == 0 { return } - if typ, ok := TypeOf(j, call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { + if typ, ok := j.Pkg.TypesInfo.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { return } - sig, ok := TypeOf(j, call.Fun).(*types.Signature) + sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { return } @@ -2262,7 +2257,7 @@ func (c *Checker) CheckNilContext(j *lint.Job) { j.Errorf(call.Args[0], "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckSeeker(j *lint.Job) { @@ -2296,7 +2291,7 @@ func (c *Checker) CheckSeeker(j *lint.Job) { } j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { @@ -2314,7 +2309,7 @@ func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { return true } - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { val, ok := ins.(ssa.Value) @@ -2366,7 +2361,7 @@ func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { } func (c *Checker) CheckConcurrentTesting(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { gostmt, ok := ins.(*ssa.Go) @@ -2424,7 +2419,7 @@ func (c *Checker) CheckConcurrentTesting(j *lint.Job) { } func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { node := c.funcDescs.CallGraph.CreateNode(ssafn) for _, edge := range node.Out { if edge.Callee.Func.RelString(nil) != "runtime.SetFinalizer" { @@ -2452,7 +2447,7 @@ func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { } for _, b := range mc.Bindings { if b == v { - pos := j.Program.DisplayPosition(mc.Fn.Pos()) + pos := lint.DisplayPosition(j.Pkg.Fset, mc.Fn.Pos()) j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) } } @@ -2461,7 +2456,7 @@ func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { } func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ia, ok := ins.(*ssa.IndexAddr) @@ -2485,7 +2480,7 @@ func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { } func (c *Checker) CheckDeferLock(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { instrs := FilterDebug(block.Instrs) if len(instrs) < 2 { @@ -2531,7 +2526,7 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { } return IsCallTo(call.Common(), "math.NaN") } - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ins, ok := ins.(*ssa.BinOp) @@ -2547,7 +2542,7 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { } func (c *Checker) CheckInfiniteRecursion(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { node := c.funcDescs.CallGraph.CreateNode(ssafn) for _, edge := range node.Out { if edge.Callee != node { @@ -2600,15 +2595,15 @@ func isName(j *lint.Job, expr ast.Expr, name string) bool { var obj types.Object switch expr := expr.(type) { case *ast.Ident: - obj = ObjectOf(j, expr) + obj = j.Pkg.TypesInfo.ObjectOf(expr) case *ast.SelectorExpr: - obj = ObjectOf(j, expr.Sel) + obj = j.Pkg.TypesInfo.ObjectOf(expr.Sel) } return objectName(obj) == name } func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { if IsInMain(j, ssafn) || IsInTest(j, ssafn) { continue } @@ -2639,7 +2634,7 @@ func (c *Checker) CheckDoubleNegation(j *lint.Job) { } j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?") } - InspectPreorder(j, []ast.Node{(*ast.UnaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) } func hasSideEffects(node ast.Node) bool { @@ -2698,11 +2693,11 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.IfStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) } func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ins, ok := ins.(*ssa.BinOp) @@ -2742,7 +2737,7 @@ func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - sig, ok := TypeOf(j, call.Fun).(*types.Signature) + sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { return } @@ -2773,12 +2768,12 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckPureFunctions(j *lint.Job) { fnLoop: - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { if IsInTest(j, ssafn) { params := ssafn.Signature.Params() for i := 0; i < params.Len(); i++ { @@ -2820,7 +2815,7 @@ fnLoop: } func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { - obj := ObjectOf(j, ident) + obj := j.Pkg.TypesInfo.ObjectOf(ident) if obj.Pkg() == nil { return false, "" } @@ -2844,18 +2839,18 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { ssafn = nil } if fn, ok := node.(*ast.FuncDecl); ok { - ssafn = j.Program.SSA.FuncValue(ObjectOf(j, fn.Name).(*types.Func)) + ssafn = j.Pkg.SSA.Prog.FuncValue(j.Pkg.TypesInfo.ObjectOf(fn.Name).(*types.Func)) } sel, ok := node.(*ast.SelectorExpr) if !ok { return true } - obj := ObjectOf(j, sel.Sel) + obj := j.Pkg.TypesInfo.ObjectOf(sel.Sel) if obj.Pkg() == nil { return true } - nodePkg := j.NodePackage(node).Types + nodePkg := j.Pkg.Types if nodePkg == obj.Pkg() || obj.Pkg().Path()+"_test" == nodePkg.Path() { // Don't flag stuff in our own package return true @@ -2884,22 +2879,20 @@ func (c *Checker) CheckDeprecated(j *lint.Job) { } return true } - for _, pkg := range j.Program.InitialPackages { - for _, f := range pkg.Syntax { - ast.Inspect(f, func(node ast.Node) bool { - if node, ok := node.(*ast.ImportSpec); ok { - p := node.Path.Value - path := p[1 : len(p)-1] - imp := pkg.Imports[path] - if alt := c.deprecatedPkgs[imp.Types]; alt != "" { - j.Errorf(node, "Package %s is deprecated: %s", path, alt) - } + for _, f := range j.Pkg.Syntax { + ast.Inspect(f, func(node ast.Node) bool { + if node, ok := node.(*ast.ImportSpec); ok { + p := node.Path.Value + path := p[1 : len(p)-1] + imp := j.Pkg.Imports[path] + if alt := c.deprecatedPkgs[imp.Types]; alt != "" { + j.Errorf(node, "Package %s is deprecated: %s", path, alt) } - return true - }) - } + } + return true + }) } - InspectNodes(j, nil, fn) + j.Pkg.Inspector.Nodes(nil, fn) } func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { @@ -2909,7 +2902,7 @@ func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { } func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { node := c.funcDescs.CallGraph.CreateNode(ssafn) for _, edge := range node.Out { callee := edge.Callee.Func @@ -2987,7 +2980,7 @@ func (c *Checker) CheckWriterBufferModified(j *lint.Job) { // Taint the argument as MUST_NOT_MODIFY, then propagate that // through functions like bytes.Split - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { sig := ssafn.Signature if ssafn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 { continue @@ -3045,7 +3038,7 @@ func loopedRegexp(name string) CallCheck { } func (c *Checker) CheckEmptyBranch(j *lint.Job) { - for _, ssafn := range j.Program.InitialFunctions { + for _, ssafn := range j.Pkg.InitialFunctions { if ssafn.Syntax() == nil { continue } @@ -3078,7 +3071,7 @@ func (c *Checker) CheckEmptyBranch(j *lint.Job) { } func (c *Checker) CheckMapBytesKey(j *lint.Job) { - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { for _, b := range fn.Blocks { insLoop: for _, ins := range b.Instrs { @@ -3146,7 +3139,7 @@ func (c *Checker) CheckSelfAssignment(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func buildTagsIdentical(s1, s2 []string) bool { @@ -3168,7 +3161,7 @@ func buildTagsIdentical(s1, s2 []string) bool { } func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { - for _, f := range job.Program.Files { + for _, f := range job.Pkg.Syntax { constraints := buildTags(f) for i, constraint1 := range constraints { for j, constraint2 := range constraints { @@ -3188,7 +3181,7 @@ func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { func (c *Checker) CheckSillyRegexp(j *lint.Job) { // We could use the rule checking engine for this, but the // arguments aren't really invalid. - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { for _, b := range fn.Blocks { for _, ins := range b.Instrs { call, ok := ins.(*ssa.Call) @@ -3228,7 +3221,7 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { return } - groups := GroupSpecs(j, decl.Specs) + groups := GroupSpecs(j.Pkg.Fset, decl.Specs) groupLoop: for _, group := range groups { if len(group) < 2 { @@ -3264,11 +3257,11 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { j.Errorf(group[0], "only the first constant in this group has an explicit type") } } - InspectPreorder(j, []ast.Node{(*ast.GenDecl)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) } func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { for _, block := range fn.Blocks { for _, ins := range block.Instrs { call, ok := ins.(*ssa.Call) @@ -3365,7 +3358,7 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { j.Errorf(binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) } - InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { @@ -3411,7 +3404,7 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { Ts := make([]types.Type, len(cc.List)) for i, expr := range cc.List { - Ts[i] = TypeOf(j, expr) + Ts[i] = j.Pkg.TypesInfo.TypeOf(expr) } ccs = append(ccs, ccAndTypes{cc: cc, types: Ts}) @@ -3432,7 +3425,7 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { } } - InspectPreorder(j, []ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) } func (c *Checker) CheckSingleArgAppend(j *lint.Job) { @@ -3446,7 +3439,7 @@ func (c *Checker) CheckSingleArgAppend(j *lint.Job) { } j.Errorf(call, "x = append(y) is equivalent to x = y") } - InspectPreorder(j, []ast.Node{(*ast.CallExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } func (c *Checker) CheckStructTags(j *lint.Job) { @@ -3475,7 +3468,7 @@ func (c *Checker) CheckStructTags(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.StructType)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.StructType)(nil)}, fn) } func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { @@ -3498,7 +3491,7 @@ func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { case "string": cs++ // only for string, floating point, integer and bool - T := Dereference(TypeOf(j, field.Type).Underlying()).Underlying() + T := Dereference(j.Pkg.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() basic, ok := T.(*types.Basic) if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { j.Errorf(field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 69bebaf7e..120d97f03 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -62,60 +62,56 @@ func (c *Checker) CheckPackageComment(j *lint.Job) { // which case they get appended. But that doesn't happen a lot in // the real world. - for _, pkg := range j.Program.InitialPackages { - if pkg.Name == "main" { + if j.Pkg.Name == "main" { + return + } + hasDocs := false + for _, f := range j.Pkg.Syntax { + if IsInTest(j, f) { continue } - hasDocs := false - for _, f := range pkg.Syntax { - if IsInTest(j, f) { - continue - } - if f.Doc != nil && len(f.Doc.List) > 0 { - hasDocs = true - prefix := "Package " + f.Name.Name + " " - if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { - j.Errorf(f.Doc, `package comment should be of the form "%s..."`, prefix) - } - f.Doc.Text() + if f.Doc != nil && len(f.Doc.List) > 0 { + hasDocs = true + prefix := "Package " + f.Name.Name + " " + if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { + j.Errorf(f.Doc, `package comment should be of the form "%s..."`, prefix) } + f.Doc.Text() } + } - if !hasDocs { - for _, f := range pkg.Syntax { - if IsInTest(j, f) { - continue - } - j.Errorf(f, "at least one file in a package should have a package comment") + if !hasDocs { + for _, f := range j.Pkg.Syntax { + if IsInTest(j, f) { + continue } + j.Errorf(f, "at least one file in a package should have a package comment") } } } func (c *Checker) CheckDotImports(j *lint.Job) { - for _, pkg := range j.Program.InitialPackages { - for _, f := range pkg.Syntax { - imports: - for _, imp := range f.Imports { - path := imp.Path.Value - path = path[1 : len(path)-1] - for _, w := range pkg.Config.DotImportWhitelist { - if w == path { - continue imports - } + for _, f := range j.Pkg.Syntax { + imports: + for _, imp := range f.Imports { + path := imp.Path.Value + path = path[1 : len(path)-1] + for _, w := range j.Pkg.Config.DotImportWhitelist { + if w == path { + continue imports } + } - if imp.Name != nil && imp.Name.Name == "." && !IsInTest(j, f) { - j.Errorf(imp, "should not use dot imports") - } + if imp.Name != nil && imp.Name.Name == "." && !IsInTest(j, f) { + j.Errorf(imp, "should not use dot imports") } } } } func (c *Checker) CheckBlankImports(j *lint.Job) { - fset := j.Program.Fset() - for _, f := range j.Program.Files { + fset := j.Pkg.Fset + for _, f := range j.Pkg.Syntax { if IsInMain(j, f) || IsInTest(j, f) { continue } @@ -198,12 +194,12 @@ func (c *Checker) CheckIncDec(j *lint.Job) { j.Errorf(assign, "should replace %s with %s%s", Render(j, assign), Render(j, assign.Lhs[0]), suffix) } - InspectPreorder(j, []ast.Node{(*ast.AssignStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) } func (c *Checker) CheckErrorReturn(j *lint.Job) { fnLoop: - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { sig := fn.Type().(*types.Signature) rets := sig.Results() if rets == nil || rets.Len() < 2 { @@ -227,7 +223,7 @@ fnLoop: // CheckUnexportedReturn checks that exported functions on exported // types do not return unexported types. func (c *Checker) CheckUnexportedReturn(j *lint.Job) { - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { if fn.Synthetic != "" || fn.Parent() != nil { continue } @@ -250,23 +246,21 @@ func (c *Checker) CheckUnexportedReturn(j *lint.Job) { } func (c *Checker) CheckReceiverNames(j *lint.Job) { - for _, pkg := range j.Program.InitialPackages { - for _, m := range pkg.SSA.Members { - if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { - ms := typeutil.IntuitiveMethodSet(T.Type(), nil) - for _, sel := range ms { - fn := sel.Obj().(*types.Func) - recv := fn.Type().(*types.Signature).Recv() - if Dereference(recv.Type()) != T.Type() { - // skip embedded methods - continue - } - if recv.Name() == "self" || recv.Name() == "this" { - j.Errorf(recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) - } - if recv.Name() == "_" { - j.Errorf(recv, "receiver name should not be an underscore, omit the name if it is unused") - } + for _, m := range j.Pkg.SSA.Members { + if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { + ms := typeutil.IntuitiveMethodSet(T.Type(), nil) + for _, sel := range ms { + fn := sel.Obj().(*types.Func) + recv := fn.Type().(*types.Signature).Recv() + if Dereference(recv.Type()) != T.Type() { + // skip embedded methods + continue + } + if recv.Name() == "self" || recv.Name() == "this" { + j.Errorf(recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) + } + if recv.Name() == "_" { + j.Errorf(recv, "receiver name should not be an underscore, omit the name if it is unused") } } } @@ -274,37 +268,35 @@ func (c *Checker) CheckReceiverNames(j *lint.Job) { } func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) { - for _, pkg := range j.Program.InitialPackages { - for _, m := range pkg.SSA.Members { - names := map[string]int{} - - var firstFn *types.Func - if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { - ms := typeutil.IntuitiveMethodSet(T.Type(), nil) - for _, sel := range ms { - fn := sel.Obj().(*types.Func) - recv := fn.Type().(*types.Signature).Recv() - if Dereference(recv.Type()) != T.Type() { - // skip embedded methods - continue - } - if firstFn == nil { - firstFn = fn - } - if recv.Name() != "" && recv.Name() != "_" { - names[recv.Name()]++ - } + for _, m := range j.Pkg.SSA.Members { + names := map[string]int{} + + var firstFn *types.Func + if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { + ms := typeutil.IntuitiveMethodSet(T.Type(), nil) + for _, sel := range ms { + fn := sel.Obj().(*types.Func) + recv := fn.Type().(*types.Signature).Recv() + if Dereference(recv.Type()) != T.Type() { + // skip embedded methods + continue } - } - - if len(names) > 1 { - var seen []string - for name, count := range names { - seen = append(seen, fmt.Sprintf("%dx %q", count, name)) + if firstFn == nil { + firstFn = fn + } + if recv.Name() != "" && recv.Name() != "_" { + names[recv.Name()]++ } + } + } - j.Errorf(firstFn, "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) + if len(names) > 1 { + var seen []string + for name, count := range names { + seen = append(seen, fmt.Sprintf("%dx %q", count, name)) } + + j.Errorf(firstFn, "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) } } } @@ -313,7 +305,7 @@ func (c *Checker) CheckContextFirstArg(j *lint.Job) { // TODO(dh): this check doesn't apply to test helpers. Example from the stdlib: // func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) { fnLoop: - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { if fn.Synthetic != "" || fn.Parent() != nil { continue } @@ -336,20 +328,18 @@ fnLoop: func (c *Checker) CheckErrorStrings(j *lint.Job) { objNames := map[*ssa.Package]map[string]bool{} - for _, pkg := range j.Program.InitialPackages { - ssapkg := pkg.SSA - objNames[ssapkg] = map[string]bool{} - for _, m := range ssapkg.Members { - if typ, ok := m.(*ssa.Type); ok { - objNames[ssapkg][typ.Name()] = true - } + ssapkg := j.Pkg.SSA + objNames[ssapkg] = map[string]bool{} + for _, m := range ssapkg.Members { + if typ, ok := m.(*ssa.Type); ok { + objNames[ssapkg][typ.Name()] = true } } - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { objNames[fn.Package()][fn.Name()] = true } - for _, fn := range j.Program.InitialFunctions { + for _, fn := range j.Pkg.InitialFunctions { if IsInTest(j, fn) { // We don't care about malformed error messages in tests; // they're usually for direct human consumption, not part @@ -439,15 +429,15 @@ func (c *Checker) CheckTimeNames(j *lint.Job) { } } } - for _, f := range j.Program.Files { + for _, f := range j.Pkg.Syntax { ast.Inspect(f, func(node ast.Node) bool { switch node := node.(type) { case *ast.ValueSpec: - T := TypeOf(j, node.Type) + T := j.Pkg.TypesInfo.TypeOf(node.Type) fn(T, node.Names) case *ast.FieldList: for _, field := range node.List { - T := TypeOf(j, field.Type) + T := j.Pkg.TypesInfo.TypeOf(field.Type) fn(T, field.Names) } } @@ -457,7 +447,7 @@ func (c *Checker) CheckTimeNames(j *lint.Job) { } func (c *Checker) CheckErrorVarNames(j *lint.Job) { - for _, f := range j.Program.Files { + for _, f := range j.Pkg.Syntax { for _, decl := range f.Decls { gen, ok := decl.(*ast.GenDecl) if !ok || gen.Tok != token.VAR { @@ -551,52 +541,50 @@ var httpStatusCodes = map[int]string{ } func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) { - for _, pkg := range j.Program.InitialPackages { - whitelist := map[string]bool{} - for _, code := range pkg.Config.HTTPStatusCodeWhitelist { - whitelist[code] = true - } - fn := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { - return true - } + whitelist := map[string]bool{} + for _, code := range j.Pkg.Config.HTTPStatusCodeWhitelist { + whitelist[code] = true + } + fn := func(node ast.Node) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return true + } - var arg int - switch CallNameAST(j, call) { - case "net/http.Error": - arg = 2 - case "net/http.Redirect": - arg = 3 - case "net/http.StatusText": - arg = 0 - case "net/http.RedirectHandler": - arg = 1 - default: - return true - } - lit, ok := call.Args[arg].(*ast.BasicLit) - if !ok { - return true - } - if whitelist[lit.Value] { - return true - } + var arg int + switch CallNameAST(j, call) { + case "net/http.Error": + arg = 2 + case "net/http.Redirect": + arg = 3 + case "net/http.StatusText": + arg = 0 + case "net/http.RedirectHandler": + arg = 1 + default: + return true + } + lit, ok := call.Args[arg].(*ast.BasicLit) + if !ok { + return true + } + if whitelist[lit.Value] { + return true + } - n, err := strconv.Atoi(lit.Value) - if err != nil { - return true - } - s, ok := httpStatusCodes[n] - if !ok { - return true - } - j.Errorf(lit, "should use constant http.%s instead of numeric literal %d", s, n) + n, err := strconv.Atoi(lit.Value) + if err != nil { return true } - for _, f := range pkg.Syntax { - ast.Inspect(f, fn) + s, ok := httpStatusCodes[n] + if !ok { + return true } + j.Errorf(lit, "should use constant http.%s instead of numeric literal %d", s, n) + return true + } + for _, f := range j.Pkg.Syntax { + ast.Inspect(f, fn) } } @@ -611,7 +599,7 @@ func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.SwitchStmt)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn) } func (c *Checker) CheckYodaConditions(j *lint.Job) { @@ -629,7 +617,7 @@ func (c *Checker) CheckYodaConditions(j *lint.Job) { } j.Errorf(cond, "don't use Yoda conditions") } - InspectPreorder(j, []ast.Node{(*ast.BinaryExpr)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) } func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { @@ -646,5 +634,5 @@ func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { } } } - InspectPreorder(j, []ast.Node{(*ast.BasicLit)(nil)}, fn) + j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn) } diff --git a/stylecheck/names.go b/stylecheck/names.go index e855590f6..1c0718fdd 100644 --- a/stylecheck/names.go +++ b/stylecheck/names.go @@ -71,109 +71,107 @@ func (c *Checker) CheckNames(j *lint.Job) { } } - for _, pkg := range j.Program.InitialPackages { - initialisms := make(map[string]bool, len(pkg.Config.Initialisms)) - for _, word := range pkg.Config.Initialisms { - initialisms[word] = true + initialisms := make(map[string]bool, len(j.Pkg.Config.Initialisms)) + for _, word := range j.Pkg.Config.Initialisms { + initialisms[word] = true + } + for _, f := range j.Pkg.Syntax { + // Package names need slightly different handling than other names. + if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") { + j.Errorf(f, "should not use underscores in package names") + } + if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 { + j.Errorf(f, "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) } - for _, f := range pkg.Syntax { - // Package names need slightly different handling than other names. - if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") { - j.Errorf(f, "should not use underscores in package names") - } - if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 { - j.Errorf(f, "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) - } - ast.Inspect(f, func(node ast.Node) bool { - switch v := node.(type) { - case *ast.AssignStmt: - if v.Tok != token.DEFINE { - return true - } - for _, exp := range v.Lhs { - if id, ok := exp.(*ast.Ident); ok { - check(id, "var", initialisms) - } - } - case *ast.FuncDecl: - // Functions with no body are defined elsewhere (in - // assembly, or via go:linkname). These are likely to - // be something very low level (such as the runtime), - // where our rules don't apply. - if v.Body == nil { - return true + ast.Inspect(f, func(node ast.Node) bool { + switch v := node.(type) { + case *ast.AssignStmt: + if v.Tok != token.DEFINE { + return true + } + for _, exp := range v.Lhs { + if id, ok := exp.(*ast.Ident); ok { + check(id, "var", initialisms) } + } + case *ast.FuncDecl: + // Functions with no body are defined elsewhere (in + // assembly, or via go:linkname). These are likely to + // be something very low level (such as the runtime), + // where our rules don't apply. + if v.Body == nil { + return true + } - if IsInTest(j, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { - return true - } + if IsInTest(j, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + return true + } - thing := "func" - if v.Recv != nil { - thing = "method" - } + thing := "func" + if v.Recv != nil { + thing = "method" + } - if !isTechnicallyExported(v) { - check(v.Name, thing, initialisms) - } + if !isTechnicallyExported(v) { + check(v.Name, thing, initialisms) + } - checkList(v.Type.Params, thing+" parameter", initialisms) - checkList(v.Type.Results, thing+" result", initialisms) - case *ast.GenDecl: - if v.Tok == token.IMPORT { - return true - } - var thing string - switch v.Tok { - case token.CONST: - thing = "const" - case token.TYPE: - thing = "type" - case token.VAR: - thing = "var" - } - for _, spec := range v.Specs { - switch s := spec.(type) { - case *ast.TypeSpec: - check(s.Name, thing, initialisms) - case *ast.ValueSpec: - for _, id := range s.Names { - check(id, thing, initialisms) - } - } - } - case *ast.InterfaceType: - // Do not check interface method names. - // They are often constrainted by the method names of concrete types. - for _, x := range v.Methods.List { - ft, ok := x.Type.(*ast.FuncType) - if !ok { // might be an embedded interface name - continue + checkList(v.Type.Params, thing+" parameter", initialisms) + checkList(v.Type.Results, thing+" result", initialisms) + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return true + } + var thing string + switch v.Tok { + case token.CONST: + thing = "const" + case token.TYPE: + thing = "type" + case token.VAR: + thing = "var" + } + for _, spec := range v.Specs { + switch s := spec.(type) { + case *ast.TypeSpec: + check(s.Name, thing, initialisms) + case *ast.ValueSpec: + for _, id := range s.Names { + check(id, thing, initialisms) } - checkList(ft.Params, "interface method parameter", initialisms) - checkList(ft.Results, "interface method result", initialisms) - } - case *ast.RangeStmt: - if v.Tok == token.ASSIGN { - return true } - if id, ok := v.Key.(*ast.Ident); ok { - check(id, "range var", initialisms) - } - if id, ok := v.Value.(*ast.Ident); ok { - check(id, "range var", initialisms) + } + case *ast.InterfaceType: + // Do not check interface method names. + // They are often constrainted by the method names of concrete types. + for _, x := range v.Methods.List { + ft, ok := x.Type.(*ast.FuncType) + if !ok { // might be an embedded interface name + continue } - case *ast.StructType: - for _, f := range v.Fields.List { - for _, id := range f.Names { - check(id, "struct field", initialisms) - } + checkList(ft.Params, "interface method parameter", initialisms) + checkList(ft.Results, "interface method result", initialisms) + } + case *ast.RangeStmt: + if v.Tok == token.ASSIGN { + return true + } + if id, ok := v.Key.(*ast.Ident); ok { + check(id, "range var", initialisms) + } + if id, ok := v.Value.(*ast.Ident); ok { + check(id, "range var", initialisms) + } + case *ast.StructType: + for _, f := range v.Fields.List { + for _, id := range f.Names { + check(id, "struct field", initialisms) } } - return true - }) - } + } + return true + }) } } diff --git a/unused/unused.go b/unused/unused.go index e6676d8e7..f69bddae8 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -7,6 +7,7 @@ import ( "go/types" "io" "strings" + "sync" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/lint" @@ -133,12 +134,19 @@ func assert(b bool) { type Checker struct { WholeProgram bool Debug io.Writer + + interfaces []*types.Interface + initialPackages []*lint.Pkg + scopes map[*types.Scope]*ssa.Function + + seenMu sync.Mutex + seen map[token.Position]struct{} + out []types.Object } func (*Checker) Name() string { return "unused" } func (*Checker) Prefix() string { return "U" } -func (l *Checker) Init(*lint.Program) {} func (l *Checker) Checks() []lint.Check { return []lint.Check{ {ID: "U1000", FilterGenerated: true, Fn: l.Lint}, @@ -405,9 +413,63 @@ var runtimeFuncs = map[string]bool{ "write": true, } +func (c *Checker) Init(prog *lint.Program) { + for _, pkg := range prog.AllPackages { + c.interfaces = append(c.interfaces, interfacesFromExportData(pkg.Types)...) + } + c.initialPackages = prog.InitialPackages + c.seen = map[token.Position]struct{}{} + + c.scopes = map[*types.Scope]*ssa.Function{} + for _, pkg := range prog.InitialPackages { + for _, fn := range pkg.InitialFunctions { + if fn.Object() != nil { + scope := fn.Object().(*types.Func).Scope() + c.scopes[scope] = fn + } + } + } + + // This is a hack to work in the confines of "one package per + // job". We do all the actual work in the Init function, and only + // report results in the actual checker function. + var out []types.Object + if c.WholeProgram { + // (e1) all packages share a single graph + out = c.processPkgs(prog.InitialPackages...) + } else { + var wg sync.WaitGroup + var mu sync.Mutex + for _, pkg := range prog.InitialPackages { + pkg := pkg + wg.Add(1) + go func() { + res := c.processPkgs(pkg) + mu.Lock() + out = append(out, res...) + mu.Unlock() + wg.Done() + }() + } + wg.Wait() + } + out2 := make([]types.Object, 0, len(out)) + for _, v := range out { + if _, ok := c.seen[prog.Fset().Position(v.Pos())]; !ok { + out2 = append(out2, v) + } + } + c.out = out2 +} + func (c *Checker) Lint(j *lint.Job) { - unused := c.Check(j.Program, j) + // The actual work is being done in Init. We only report existing + // results here. + unused := c.out for _, u := range unused { + if u.Pkg() != j.Pkg.Types { + continue + } name := u.Name() if sig, ok := u.Type().(*types.Signature); ok && sig.Recv() != nil { switch sig.Recv().Type().(type) { @@ -430,250 +492,228 @@ func (c *Checker) debugf(f string, v ...interface{}) { } } -func (c *Checker) Check(prog *lint.Program, j *lint.Job) []types.Object { - scopes := map[*types.Scope]*ssa.Function{} - for _, fn := range j.Program.InitialFunctions { - if fn.Object() != nil { - scope := fn.Object().(*types.Func).Scope() - scopes[scope] = fn +func (graph *Graph) quieten(node *Node) { + if node.seen { + return + } + switch obj := node.obj.(type) { + case *ssa.Function: + sig := obj.Type().(*types.Signature) + if sig.Recv() != nil { + if node, ok := graph.nodeMaybe(sig.Recv()); ok { + node.quiet = true + } + } + for i := 0; i < sig.Params().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { + node.quiet = true + } + } + for i := 0; i < sig.Results().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { + node.quiet = true + } + } + case *types.Named: + for i := 0; i < obj.NumMethods(); i++ { + m := graph.pkg.Prog.FuncValue(obj.Method(i)) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true + } + } + case *types.Struct: + for i := 0; i < obj.NumFields(); i++ { + if node, ok := graph.nodeMaybe(obj.Field(i)); ok { + node.quiet = true + } + } + case *types.Interface: + for i := 0; i < obj.NumExplicitMethods(); i++ { + m := obj.ExplicitMethod(i) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true + } } } +} + +func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { + graph := NewGraph() + graph.wholeProgram = c.WholeProgram + graph.scopes = c.scopes + graph.initialPackages = c.initialPackages - seen := map[token.Position]struct{}{} var out []types.Object - processPkgs := func(pkgs ...*lint.Pkg) { - graph := NewGraph() - graph.wholeProgram = c.WholeProgram - graph.job = j - graph.scopes = scopes - - for _, pkg := range pkgs { - if pkg.PkgPath == "unsafe" { - continue - } - graph.entry(pkg) + + for _, pkg := range pkgs { + if pkg.PkgPath == "unsafe" { + continue } + graph.entry(pkg) + } - if c.WholeProgram { - var ifaces []*types.Interface - var notIfaces []types.Type - - // implement as many interfaces as possible - graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { - switch t := t.(type) { - case *types.Interface: - ifaces = append(ifaces, t) - default: - if _, ok := t.Underlying().(*types.Interface); !ok { - notIfaces = append(notIfaces, t) - } - } - }) + if c.WholeProgram { + var ifaces []*types.Interface + var notIfaces []types.Type - for _, pkg := range prog.AllPackages { - ifaces = append(ifaces, interfacesFromExportData(pkg.Types)...) + // implement as many interfaces as possible + graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { + switch t := t.(type) { + case *types.Interface: + ifaces = append(ifaces, t) + default: + if _, ok := t.Underlying().(*types.Interface); !ok { + notIfaces = append(notIfaces, t) + } } + }) - // (8.0) handle interfaces - // (e2) types aim to implement all exported interfaces from all packages - for _, t := range notIfaces { - ms := graph.msCache.MethodSet(t) - for _, iface := range ifaces { - if sels, ok := graph.implements(t, iface, ms); ok { - for _, sel := range sels { - graph.useMethod(t, sel, t, "implements") - } + // OPT(dh): this is not terribly efficient + ifaces = append(ifaces, c.interfaces...) + + // (8.0) handle interfaces + // (e2) types aim to implement all exported interfaces from all packages + for _, t := range notIfaces { + ms := graph.msCache.MethodSet(t) + for _, iface := range ifaces { + if sels, ok := graph.implements(t, iface, ms); ok { + for _, sel := range sels { + graph.useMethod(t, sel, t, "implements") } } } } + } - if c.Debug != nil { - debugNode := func(node *Node) { - if node.obj == nil { - c.debugf("n%d [label=\"Root\"];\n", node.id) - } else { - c.debugf("n%d [label=%q];\n", node.id, node.obj) - } - for used, reasons := range node.used { - for _, reason := range reasons { - c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) - } + if c.Debug != nil { + debugNode := func(node *Node) { + if node.obj == nil { + c.debugf("n%d [label=\"Root\"];\n", node.id) + } else { + c.debugf("n%d [label=%q];\n", node.id, node.obj) + } + for used, reasons := range node.used { + for _, reason := range reasons { + c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) } } + } - c.debugf("digraph{\n") - debugNode(graph.Root) - for _, node := range graph.Nodes { - debugNode(node) - } - graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { - debugNode(value.(*Node)) - }) - c.debugf("}\n") + c.debugf("digraph{\n") + debugNode(graph.Root) + for _, node := range graph.Nodes { + debugNode(node) } + graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { + debugNode(value.(*Node)) + }) + c.debugf("}\n") + } - graph.color(graph.Root) - // if a node is unused, don't report any of the node's - // children as unused. for example, if a function is unused, - // don't flag its receiver. if a named type is unused, don't - // flag its methods. - quieten := func(node *Node) { - if node.seen { - return - } + graph.color(graph.Root) + // if a node is unused, don't report any of the node's + // children as unused. for example, if a function is unused, + // don't flag its receiver. if a named type is unused, don't + // flag its methods. + + for _, node := range graph.Nodes { + graph.quieten(node) + } + graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + graph.quieten(value.(*Node)) + }) + + report := func(node *Node) { + if node.seen { + var pos token.Pos switch obj := node.obj.(type) { + case types.Object: + pos = obj.Pos() case *ssa.Function: - sig := obj.Type().(*types.Signature) - if sig.Recv() != nil { - if node, ok := graph.nodeMaybe(sig.Recv()); ok { - node.quiet = true - } - } - for i := 0; i < sig.Params().Len(); i++ { - if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { - node.quiet = true - } - } - for i := 0; i < sig.Results().Len(); i++ { - if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { - node.quiet = true - } - } - case *types.Named: - for i := 0; i < obj.NumMethods(); i++ { - m := pkgs[0].SSA.Prog.FuncValue(obj.Method(i)) - if node, ok := graph.nodeMaybe(m); ok { - node.quiet = true - } - } - case *types.Struct: - for i := 0; i < obj.NumFields(); i++ { - if node, ok := graph.nodeMaybe(obj.Field(i)); ok { - node.quiet = true - } - } - case *types.Interface: - for i := 0; i < obj.NumExplicitMethods(); i++ { - m := obj.ExplicitMethod(i) - if node, ok := graph.nodeMaybe(m); ok { - node.quiet = true - } - } + pos = obj.Pos() } + + if pos != 0 { + c.seenMu.Lock() + c.seen[pkgs[0].Fset.Position(pos)] = struct{}{} + c.seenMu.Unlock() + } + return } - for _, node := range graph.Nodes { - quieten(node) + if node.quiet { + c.debugf("n%d [color=purple];\n", node.id) + return } - graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { - quieten(value.(*Node)) - }) - report := func(node *Node) { - if node.seen { - var pos token.Pos - switch obj := node.obj.(type) { - case types.Object: - pos = obj.Pos() - case *ssa.Function: - pos = obj.Pos() - } + type packager1 interface { + Pkg() *types.Package + } + type packager2 interface { + Package() *ssa.Package + } - if pos != 0 { - seen[prog.Fset().Position(pos)] = struct{}{} + // do not report objects from packages we aren't checking. + checkPkg: + switch obj := node.obj.(type) { + case packager1: + for _, pkg := range pkgs { + if pkg.Types == obj.Pkg() { + break checkPkg } - return } - if node.quiet { - c.debugf("n%d [color=purple];\n", node.id) - return + c.debugf("n%d [color=yellow];\n", node.id) + return + case packager2: + // This happens to filter $bound and $thunk, which + // should be fine, since we wouldn't want to report + // them, anyway. Remember that this filtering is only + // for the output, it doesn't affect the reachability + // of nodes in the graph. + for _, pkg := range pkgs { + if pkg.SSA == obj.Package() { + break checkPkg + } } + c.debugf("n%d [color=yellow];\n", node.id) + return + } - type packager1 interface { - Pkg() *types.Package + c.debugf("n%d [color=red];\n", node.id) + switch obj := node.obj.(type) { + case *types.Var: + // don't report unnamed variables (receivers, interface embedding) + if obj.Name() != "" || obj.IsField() { + out = append(out, obj) } - type packager2 interface { - Package() *ssa.Package + case types.Object: + if obj.Name() != "_" { + out = append(out, obj) } - - // do not report objects from packages we aren't checking. - checkPkg: - switch obj := node.obj.(type) { - case packager1: - for _, pkg := range pkgs { - if pkg.Types == obj.Pkg() { - break checkPkg - } - } - c.debugf("n%d [color=yellow];\n", node.id) - return - case packager2: - // This happens to filter $bound and $thunk, which - // should be fine, since we wouldn't want to report - // them, anyway. Remember that this filtering is only - // for the output, it doesn't affect the reachability - // of nodes in the graph. - for _, pkg := range pkgs { - if pkg.SSA == obj.Package() { - break checkPkg - } - } - c.debugf("n%d [color=yellow];\n", node.id) + case *ssa.Function: + if obj == nil { + // TODO(dh): how does this happen? return } - - c.debugf("n%d [color=red];\n", node.id) - switch obj := node.obj.(type) { - case *types.Var: - // don't report unnamed variables (receivers, interface embedding) - if obj.Name() != "" || obj.IsField() { - out = append(out, obj) - } - case types.Object: - if obj.Name() != "_" { - out = append(out, obj) - } - case *ssa.Function: - if obj == nil { - // TODO(dh): how does this happen? - return - } - if obj.Object() == nil { - // Closures - return - } - out = append(out, obj.Object()) - default: - c.debugf("n%d [color=gray];\n", node.id) + if obj.Object() == nil { + // Closures + return } + out = append(out, obj.Object()) + default: + c.debugf("n%d [color=gray];\n", node.id) } - for _, node := range graph.Nodes { - report(node) - } - graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { - report(value.(*Node)) - }) } - - if c.WholeProgram { - // (e1) all packages share a single graph - processPkgs(prog.InitialPackages...) - } else { - for _, pkg := range prog.InitialPackages { - processPkgs(pkg) - } + for _, node := range graph.Nodes { + report(node) } - out2 := make([]types.Object, 0, len(out)) - for _, v := range out { - if _, ok := seen[prog.Fset().Position(v.Pos())]; !ok { - out2 = append(out2, v) - } - } - return out2 + graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + report(value.(*Node)) + }) + + return out } type Graph struct { - job *lint.Job pkg *ssa.Package msCache typeutil.MethodSetCache scopes map[*types.Scope]*ssa.Function @@ -688,6 +728,8 @@ type Graph struct { seenTypes typeutil.Map seenFns map[*ssa.Function]struct{} + + initialPackages []*lint.Pkg } func NewGraph() *Graph { @@ -835,7 +877,7 @@ func isIrrelevant(obj interface{}) bool { func (g *Graph) isInterestingPackage(pkg *types.Package) bool { if g.wholeProgram { - for _, opkg := range g.job.Program.InitialPackages { + for _, opkg := range g.initialPackages { if opkg.Types == pkg { return true } @@ -995,10 +1037,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { // Find constants being used inside functions, find sinks in tests handledConsts := map[*ast.Ident]struct{}{} - for _, fn := range g.job.Program.InitialFunctions { - if fn.Pkg != g.pkg { - continue - } + for _, fn := range pkg.InitialFunctions { g.see(fn) node := fn.Syntax() if node == nil { @@ -1058,14 +1097,14 @@ func (g *Graph) entry(pkg *lint.Pkg) { pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)}, func(n ast.Node) { switch n := n.(type) { case *ast.FuncDecl: - fn = pkg.SSA.Prog.FuncValue(lintdsl.ObjectOf(g.job, n.Name).(*types.Func)) + fn = pkg.SSA.Prog.FuncValue(pkg.TypesInfo.ObjectOf(n.Name).(*types.Func)) if fn != nil { g.see(fn) } case *ast.GenDecl: switch n.Tok { case token.CONST: - groups := lintdsl.GroupSpecs(g.job, n.Specs) + groups := lintdsl.GroupSpecs(pkg.Fset, n.Specs) for _, specs := range groups { if len(specs) > 1 { cg := &ConstGroup{} @@ -1084,7 +1123,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { for _, spec := range n.Specs { v := spec.(*ast.ValueSpec) for _, name := range v.Names { - T := lintdsl.TypeOf(g.job, name) + T := pkg.TypesInfo.TypeOf(name) if fn != nil { g.seeAndUse(T, fn, "var decl") } else { From b088cadd8b3dc8d72506530f25022b3f55d9dd5e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 15 Apr 2019 06:02:36 +0200 Subject: [PATCH 124/254] lint: remove Problem.Checker field Nothing used it anymore. --- lint/lint.go | 6 ------ lint/lintutil/util.go | 1 - 2 files changed, 7 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 5838676bb..c4d9ff671 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -28,7 +28,6 @@ type Job struct { Pkg *Pkg GoVersion int - checker string check Check problems []Problem @@ -135,7 +134,6 @@ type Problem struct { Position token.Position // position in source file Text string // the prose that describes the problem Check string - Checker string Package *Pkg Severity Severity } @@ -350,7 +348,6 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { Position: DisplayPosition(prog.Fset(), c.Pos()), Text: "malformed linter directive; missing the required reason field?", Check: "", - Checker: "lint", Package: nil, } out = append(out, p) @@ -409,7 +406,6 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { for _, pkg := range pkgs { j := &Job{ Pkg: pkg, - checker: checker.Name(), check: check, GoVersion: l.GoVersion, } @@ -489,7 +485,6 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { Position: DisplayPosition(prog.Fset(), ig.pos), Text: "this linter directive didn't match anything; should it be removed?", Check: "", - Checker: "lint", Package: nil, } out = append(out, p) @@ -616,7 +611,6 @@ func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem Position: pos, Text: fmt.Sprintf(format, args...), Check: j.check.ID, - Checker: j.checker, Package: j.Pkg, } j.problems = append(j.problems, problem) diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 636025ad5..701711831 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -379,7 +379,6 @@ func compileErrors(pkg *packages.Package) []lint.Problem { p := lint.Problem{ Position: parsePos(err.Pos), Text: err.Msg, - Checker: "compiler", Check: "compile", } ps = append(ps, p) From e561f6794a2a09dd97bd3cdefde08db0d564f55c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 18 Apr 2019 02:09:45 +0200 Subject: [PATCH 125/254] staticcheck: recognize inline option in JSON tags Some libraries reuse the json tags but add support for an "inline" option. --- staticcheck/lint.go | 7 ++++++- .../testdata/src/CheckStructTags/CheckStructTags.go | 1 + 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index bad1efb1e..1300eff8c 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -3481,7 +3481,7 @@ func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { j.Errorf(field.Tag, "invalid JSON field name %q", fields[0]) } } - var co, cs int + var co, cs, ci int for _, s := range fields[1:] { switch s { case "omitempty": @@ -3496,6 +3496,8 @@ func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { j.Errorf(field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") } + case "inline": + ci++ default: j.Errorf(field.Tag, "unknown JSON option %q", s) } @@ -3506,6 +3508,9 @@ func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { if cs > 1 { j.Errorf(field.Tag, `duplicate JSON option "string"`) } + if ci > 1 { + j.Errorf(field.Tag, `duplicate JSON option "inline"`) + } } func checkXMLTag(j *lint.Job, field *ast.Field, tag string) { diff --git a/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go b/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go index 4d75f9bc7..b8d4ddde7 100644 --- a/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go +++ b/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go @@ -14,6 +14,7 @@ type T1 struct { L **int `json:",string"` // MATCH "the JSON string option" M complex128 `json:",string"` // MATCH "the JSON string option" N int `json:"some-name"` + O int `json:"some-name,inline"` } type T2 struct { From bad1bd262ba843f3cdbff9812edff25f0cee54e5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 22 May 2019 04:25:31 +0200 Subject: [PATCH 126/254] Add FUNDING.yml --- .github/FUNDING.yml | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..89271fc37 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +patreon: dominikh From ea95bdfd59fc14c1c9afa08716e0cd013eae4e12 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 23 May 2019 10:30:50 +0200 Subject: [PATCH 127/254] FUNDING.yml: add GitHub provider --- .github/FUNDING.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index 89271fc37..6f059f31e 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1 +1,2 @@ patreon: dominikh +github: dominikh From a1e48607f5793ffdd412cda1337fe21b2b6823fa Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 16 Apr 2019 19:29:10 +0200 Subject: [PATCH 128/254] Port staticcheck to the go/analysis framework This change ports all static analysis checks to the go/analysis framework and turns cmd/staticcheck into a sophisticated runner for analyses. Since our previous framework was built around the idea of having all data in memory at once, some changes had to be made to accomodate the modular go/analysis framework. All information about dependencies have to be serialized as facts. This includes information such as which objects are deprecated and which functions are pure. We have thus converted the 'functions' package to act as analyses instead of generating a global set of information. SSA packages are built per package under analysis no single SSA program exists. This also means that nodes in the SSA graph aren't canonical; the same function in a dependency may be represented by many different objects. We no longer store anything SSA related across analyses. go/analysis is designed around the idea of enabling caching of facts, and thus cmd/staticcheck was designed to utilize caching. We rely on the Go build cache to avoid loading packages from source, and we implement our own cache of facts to avoid reanalyzing packages that haven't changed. This combination can greatly reduce memory use as well as runtime. For smaller packages, it even allows real-time checking in something like a language server. We've replaced our own testing utilities with go/analysis/analysistest, primarily for the sake of consistency with other analyses. Reimplementing 'unused' in the new framework required extra work, and special knowledge in the runner. Unused cannot analyze packages in isolation, since files may be shared between test variants and identifiers may only be used in some variations of a package. Unused still exposes an entry-point matching the go/analysis framework, but it doesn't compute any facts nor report any diagnostics. Instead, it incrementally builds a view of all packages it sees. After all packages have been analyzed, the result can be queried and processed. While all other analyses can be reused by other runners directly, using unused will require special code in the runner. We've deleted the gosimple, unused and megacheck binaries. They had already been deprecated and there was little point in porting them to the new framework. With the removal of the unused binary there is currently no way to use its whole program mode. This will be rectified in a follow-up commit which adds said mode as its own check in staticcheck. --- cmd/gosimple/README.md | 1 - cmd/gosimple/gosimple.go | 20 - cmd/megacheck/README.md | 1 - cmd/megacheck/megacheck.go | 93 -- cmd/staticcheck/staticcheck.go | 19 +- cmd/unused/README.md | 1 - cmd/unused/main.go | 57 - config/config.go | 28 +- functions/concrete.go | 56 - functions/functions.go | 150 -- functions/loops.go | 2 +- functions/pure.go | 88 +- functions/terminates.go | 4 +- internal/cache/cache.go | 473 ++++++ internal/cache/cache_test.go | 270 ++++ internal/cache/default.go | 85 ++ internal/cache/hash.go | 176 +++ internal/cache/hash_test.go | 52 + internal/passes/buildssa/buildssa.go | 116 ++ internal/passes/buildssa/buildssa_test.go | 29 + internal/passes/buildssa/testdata/src/a/a.go | 16 + internal/renameio/renameio.go | 83 ++ internal/sharedcheck/lint.go | 10 +- lint/analysis.go | 39 + lint/lint.go | 607 +++----- lint/lint_test.go | 34 - lint/lintdsl/lintdsl.go | 98 +- lint/lintutil/format/format.go | 23 +- lint/lintutil/util.go | 237 ++- lint/runner.go | 659 +++++++++ lint/testdata/src/Test/line-ignores.go | 11 +- lint/testutil/util.go | 261 ---- loader/loader.go | 197 +++ simple/analysis.go | 223 +++ simple/doc.go | 12 + simple/lint.go | 509 ++++--- simple/lint_test.go | 66 +- .../LimeTimeUntil_go18/LimeTimeUntil_go18.go | 2 +- .../src/LintAssertNotNil/LintAssertNotNil.go | 6 +- .../testdata/src/LintBlankOK/LintBlankOK.go | 4 +- .../LintBytesBufferConversions.go | 8 +- .../LintDeclareAssign/LintDeclareAssign.go | 6 +- .../LintErrorsNewSprintf.go | 2 +- .../LintGuardedDelete/LintGuardedDelete.go | 4 +- .../src/LintLoopSlide/LintLoopSlide.go | 2 +- .../src/LintMakeLenCap/LintMakeLenCap.go | 10 +- .../LintNilCheckAroundRange.go | 4 +- .../LintRangeStringRunes.go | 4 +- .../LintRedundantBreak/LintRedundantBreak.go | 2 +- .../LintRedundantReturn.go | 8 +- .../LintRedundantSprintf.go | 12 +- .../LintSimplifyTypeSwitch.go | 2 +- .../src/LintSortHelpers/LintSortHelpers.go | 14 +- simple/testdata/src/bool-cmp/bool-cmp.go | 34 +- simple/testdata/src/compare/compare.go | 4 +- simple/testdata/src/contains/contains.go | 36 +- simple/testdata/src/convert/convert.go | 8 +- simple/testdata/src/convert_go17/convert.go | 2 +- simple/testdata/src/convert_go18/convert.go | 4 +- simple/testdata/src/copy/copy.go | 6 +- simple/testdata/src/for-true/for-true.go | 2 +- simple/testdata/src/generated/input.go | 2 +- simple/testdata/src/if-return/if-return.go | 8 +- .../testdata/src/loop-append/loop-append.go | 2 +- simple/testdata/src/nil-len/nil-len.go | 18 +- simple/testdata/src/range_go14/range_go14.go | 8 +- .../src/receive-blank/receive-blank.go | 8 +- simple/testdata/src/regexp-raw/regexp-raw.go | 4 +- .../single-case-select/single-case-select.go | 8 +- simple/testdata/src/slicing/slicing.go | 2 +- simple/testdata/src/time-since/time-since.go | 2 +- simple/testdata/src/trim/trim.go | 26 +- ssa/func.go | 64 + staticcheck/analysis.go | 527 +++++++ staticcheck/doc.go | 14 + staticcheck/knowledge.go | 25 + staticcheck/lint.go | 1291 +++++++++-------- staticcheck/lint_test.go | 121 +- staticcheck/rules.go | 19 +- .../CheckArgOverwritten.go | 6 + .../src/CheckBenchmarkN/CheckBenchmarkN.go | 2 +- .../CheckBytesEqualIP/CheckBytesEqualIP.go | 2 +- .../CheckCanonicalHeaderKey.go | 4 +- .../CheckConcurrentTesting.go | 6 +- .../CheckCyclicFinalizer.go | 7 +- .../CheckDeferInInfiniteLoop.go | 4 +- .../src/CheckDeferLock/CheckDeferLock.go | 4 +- .../src/CheckDeprecated/CheckDeprecated.go | 2 +- .../CheckDeprecated_go14/CheckDeprecated.go | 10 +- .../CheckDeprecated_go18/CheckDeprecated.go | 18 +- .../CheckDiffSizeComparison.go | 30 +- .../CheckDoubleNegation.go | 4 +- .../CheckDubiousDeferInChannelRangeLoop.go | 2 +- .../CheckDuplicateBuildConstraints.go | 2 +- .../src/CheckEarlyDefer/CheckEarlyDefer.go | 4 +- .../src/CheckEmptyBranch/CheckEmptyBranch.go | 12 +- .../CheckEmptyBranch/CheckEmptyBranch_test.go | 2 +- .../CheckEmptyCriticalSection.go | 18 +- .../CheckEncodingBinary.go | 14 +- .../CheckEncodingBinary.go | 2 +- .../testdata/src/CheckExec/CheckExec.go | 2 +- .../CheckExtremeComparison.go | 26 +- .../CheckIneffectiveAppend.go | 13 +- .../CheckIneffectiveCopy.go | 4 +- .../CheckIneffectiveLoop.go | 13 +- .../CheckInfiniteEmptyLoop.go | 15 +- .../CheckInfiniteRecursion.go | 8 +- .../CheckLeakyTimeTick/CheckLeakyTimeTick.go | 4 +- .../CheckLhsRhsIdentical.go | 16 +- .../CheckListenAddress/CheckListenAddress.go | 4 +- .../CheckLoopCondition/CheckLoopCondition.go | 4 +- .../CheckLoopEmptyDefault.go | 2 +- .../testdata/src/CheckMathInt/CheckMathInt.go | 4 +- .../CheckMissingEnumTypesInDeclaration.go | 8 +- .../CheckNaNComparison/CheckNaNComparison.go | 6 +- .../testdata/src/CheckNilMaps/CheckNilMaps.go | 2 +- .../CheckNonOctalFileMode.go | 2 +- .../CheckNonUniqueCutset.go | 2 +- .../src/CheckNoopMarshal/CheckNoopMarshal.go | 34 +- .../testdata/src/CheckPrintf/CheckPrintf.go | 134 +- .../CheckPureFunctions/CheckPureFunctions.go | 8 +- .../CheckPureFunctions_test.go | 2 +- .../CheckRangeStringRunes.go | 4 +- .../CheckRegexpMatchLoop.go | 6 +- .../testdata/src/CheckRegexps/CheckRegexps.go | 24 +- .../CheckRepeatedIfElse.go | 4 +- .../src/CheckScopedBreak/CheckScopedBreak.go | 14 +- .../CheckSelfAssignment.go | 9 +- .../CheckSillyBitwiseOps.go | 8 +- .../CheckSingleArgAppend.go | 4 +- .../CheckStringsReplaceZero.go | 2 +- .../src/CheckStructTags/CheckStructTags.go | 22 +- .../CheckSyncPoolValue/CheckSyncPoolValue.go | 10 +- .../src/CheckTemplate/CheckTemplate.go | 4 +- .../CheckTestMainExit-1.go | 2 +- .../CheckTestMainExit-4.go | 2 +- .../src/CheckTimeParse/CheckTimeParse.go | 4 +- .../CheckTimeSleepConstant.go | 4 +- .../CheckTimerResetReturnValue.go | 6 +- .../CheckToLowerToUpperComparison.go | 10 +- .../testdata/src/CheckURLs/CheckURLs.go | 2 +- .../CheckUnbufferedSignalChan.go | 2 +- .../CheckUnmarshalPointer.go | 6 +- .../CheckUnreachableTypeCases.go | 18 +- .../CheckUnreadVariableValues.go | 31 +- .../CheckUnreadVariableValues_test.go | 4 +- .../CheckUnsafePrintf/CheckUnsafePrintf.go | 10 +- .../CheckUnsupportedMarshal.go | 18 +- .../CheckUntrappableSignal.go | 20 +- .../CheckWaitgroupAdd/CheckWaitgroupAdd.go | 2 +- .../CheckWriterBufferModified.go | 4 +- .../checkStdlibUsageNilContext.go | 2 +- .../checkStdlibUsageRegexpFindAll.go | 2 +- .../checkStdlibUsageSeeker.go | 2 +- .../checkStdlibUsageUTF8Cutset.go | 2 +- .../function-literals/function-literals.go | 23 - .../testdata/src/synthetic/synthetic.go | 15 - stylecheck/analysis.go | 111 ++ stylecheck/lint.go | 203 ++- stylecheck/lint_test.go | 40 +- stylecheck/names.go | 25 +- .../CheckContextFirstArg.go | 4 +- .../CheckDefaultCaseOrder.go | 2 +- .../src/CheckDotImports/CheckDotImports.go | 2 +- .../src/CheckErrorReturn/CheckErrorReturn.go | 4 +- .../CheckErrorStrings/CheckErrorStrings.go | 6 +- .../CheckErrorVarNames/CheckErrorVarNames.go | 11 +- .../CheckHTTPStatusCodes.go | 8 +- .../src/CheckIncDec.disabled/CheckIncDec.go | 4 +- .../CheckInvisibleCharacters.go | 9 +- .../testdata/src/CheckNames/CheckNames.go | 32 +- .../CheckPackageComment-1.go | 2 +- .../CheckPackageComment-2.go | 4 +- .../CheckReceiverNames/CheckReceiverNames.go | 6 +- .../CheckReceiverNames.go | 11 + .../src/CheckTimeNames/CheckTimeNames.go | 8 +- .../CheckYodaConditions.go | 6 +- .../CheckBlankImports-2.go | 22 +- .../CheckUnexportedReturn.go | 6 +- unused/testdata/src/alias/alias.go | 4 +- unused/testdata/src/blank/blank.go | 4 +- unused/testdata/src/cgo/cgo.go | 2 +- unused/testdata/src/consts/consts.go | 8 +- unused/testdata/src/conversion/conversion.go | 4 +- unused/testdata/src/cyclic/cyclic.go | 4 +- unused/testdata/src/embedding/embedding.go | 4 +- unused/testdata/src/fields/fields.go | 9 +- unused/testdata/src/functions/functions.go | 8 +- unused/testdata/src/generated1/generated1.go | 5 - unused/testdata/src/generated2/generated2.go | 5 - unused/testdata/src/interfaces/interfaces.go | 2 +- unused/testdata/src/linkname/linkname.go | 2 +- unused/testdata/src/main/main.go | 8 +- unused/testdata/src/methods/methods.go | 2 +- unused/testdata/src/nested/nested.go | 4 +- .../testdata/src/nocopy-main/nocopy-main.go | 10 +- unused/testdata/src/nocopy/nocopy.go | 10 +- .../pointer-type-embedding.go | 2 +- unused/testdata/src/quiet/quiet.go | 8 +- .../testdata/src/unused_type/unused_type.go | 4 +- unused/unused.go | 441 +++--- unused/unused_test.go | 198 ++- 202 files changed, 5828 insertions(+), 3357 deletions(-) delete mode 100644 cmd/gosimple/README.md delete mode 100644 cmd/gosimple/gosimple.go delete mode 100644 cmd/megacheck/README.md delete mode 100644 cmd/megacheck/megacheck.go delete mode 100644 cmd/unused/README.md delete mode 100644 cmd/unused/main.go delete mode 100644 functions/concrete.go delete mode 100644 functions/functions.go create mode 100644 internal/cache/cache.go create mode 100644 internal/cache/cache_test.go create mode 100644 internal/cache/default.go create mode 100644 internal/cache/hash.go create mode 100644 internal/cache/hash_test.go create mode 100644 internal/passes/buildssa/buildssa.go create mode 100644 internal/passes/buildssa/buildssa_test.go create mode 100644 internal/passes/buildssa/testdata/src/a/a.go create mode 100644 internal/renameio/renameio.go create mode 100644 lint/analysis.go delete mode 100644 lint/lint_test.go create mode 100644 lint/runner.go delete mode 100644 lint/testutil/util.go create mode 100644 loader/loader.go create mode 100644 simple/analysis.go create mode 100644 staticcheck/analysis.go create mode 100644 staticcheck/knowledge.go create mode 100644 staticcheck/testdata/src/CheckArgOverwritten/CheckArgOverwritten.go delete mode 100644 staticcheck/testdata/src/function-literals/function-literals.go delete mode 100644 staticcheck/testdata/src/synthetic/synthetic.go create mode 100644 stylecheck/analysis.go create mode 100644 stylecheck/testdata/src/CheckReceiverNamesIdentical/CheckReceiverNames.go delete mode 100644 unused/testdata/src/generated1/generated1.go delete mode 100644 unused/testdata/src/generated2/generated2.go diff --git a/cmd/gosimple/README.md b/cmd/gosimple/README.md deleted file mode 100644 index efaaeeeec..000000000 --- a/cmd/gosimple/README.md +++ /dev/null @@ -1 +0,0 @@ -**Deprecated: gosimple has been merged into the staticcheck tool.** diff --git a/cmd/gosimple/gosimple.go b/cmd/gosimple/gosimple.go deleted file mode 100644 index c6fc7bd2d..000000000 --- a/cmd/gosimple/gosimple.go +++ /dev/null @@ -1,20 +0,0 @@ -// gosimple detects code that could be rewritten in a simpler way. -package main // import "honnef.co/go/tools/cmd/gosimple" -import ( - "fmt" - "os" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/simple" -) - -func main() { - fmt.Fprintln(os.Stderr, "Gosimple has been deprecated. Please use staticcheck instead.") - fs := lintutil.FlagSet("gosimple") - gen := fs.Bool("generated", false, "Check generated code") - fs.Parse(os.Args[1:]) - c := simple.NewChecker() - c.CheckGenerated = *gen - lintutil.ProcessFlagSet([]lint.Checker{c}, fs) -} diff --git a/cmd/megacheck/README.md b/cmd/megacheck/README.md deleted file mode 100644 index 509762886..000000000 --- a/cmd/megacheck/README.md +++ /dev/null @@ -1 +0,0 @@ -**Deprecated: megacheck has been merged into the staticcheck tool.** diff --git a/cmd/megacheck/megacheck.go b/cmd/megacheck/megacheck.go deleted file mode 100644 index 309e73c4d..000000000 --- a/cmd/megacheck/megacheck.go +++ /dev/null @@ -1,93 +0,0 @@ -// megacheck runs staticcheck, gosimple and unused. -package main // import "honnef.co/go/tools/cmd/megacheck" - -import ( - "fmt" - "os" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/simple" - "honnef.co/go/tools/staticcheck" - "honnef.co/go/tools/unused" -) - -func main() { - fmt.Fprintln(os.Stderr, "Megacheck has been deprecated. Please use staticcheck instead.") - - var flags struct { - staticcheck struct { - enabled bool - generated bool - } - gosimple struct { - enabled bool - generated bool - } - unused struct { - enabled bool - constants bool - fields bool - functions bool - types bool - variables bool - wholeProgram bool - reflection bool - } - } - fs := lintutil.FlagSet("megacheck") - fs.BoolVar(&flags.gosimple.enabled, - "simple.enabled", true, "Deprecated: use -checks instead") - fs.BoolVar(&flags.gosimple.generated, - "simple.generated", false, "Check generated code") - - fs.BoolVar(&flags.staticcheck.enabled, - "staticcheck.enabled", true, "Deprecated: use -checks instead") - fs.BoolVar(&flags.staticcheck.generated, - "staticcheck.generated", false, "Check generated code (only applies to a subset of checks)") - - fs.BoolVar(&flags.unused.enabled, - "unused.enabled", true, "Deprecated: use -checks instead") - fs.BoolVar(&flags.unused.constants, - "unused.consts", true, "Report unused constants") - fs.BoolVar(&flags.unused.fields, - "unused.fields", true, "Report unused fields") - fs.BoolVar(&flags.unused.functions, - "unused.funcs", true, "Report unused functions and methods") - fs.BoolVar(&flags.unused.types, - "unused.types", true, "Report unused types") - fs.BoolVar(&flags.unused.variables, - "unused.vars", true, "Report unused variables") - fs.BoolVar(&flags.unused.wholeProgram, - "unused.exported", false, "Treat arguments as a program and report unused exported identifiers") - fs.BoolVar(&flags.unused.reflection, - "unused.reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection") - - fs.Bool("simple.exit-non-zero", true, "Deprecated: use -fail instead") - fs.Bool("staticcheck.exit-non-zero", true, "Deprecated: use -fail instead") - fs.Bool("unused.exit-non-zero", true, "Deprecated: use -fail instead") - - fs.Parse(os.Args[1:]) - - var checkers []lint.Checker - - if flags.staticcheck.enabled { - sac := staticcheck.NewChecker() - sac.CheckGenerated = flags.staticcheck.generated - checkers = append(checkers, sac) - } - - if flags.gosimple.enabled { - sc := simple.NewChecker() - sc.CheckGenerated = flags.gosimple.generated - checkers = append(checkers, sc) - } - - if flags.unused.enabled { - uc := &unused.Checker{} - uc.WholeProgram = flags.unused.wholeProgram - checkers = append(checkers, uc) - } - - lintutil.ProcessFlagSet(checkers, fs) -} diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 6f381850d..24a369e24 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -4,6 +4,7 @@ package main // import "honnef.co/go/tools/cmd/staticcheck" import ( "os" + "golang.org/x/tools/go/analysis" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" "honnef.co/go/tools/simple" @@ -16,12 +17,18 @@ func main() { fs := lintutil.FlagSet("staticcheck") fs.Parse(os.Args[1:]) - checkers := []lint.Checker{ - simple.NewChecker(), - staticcheck.NewChecker(), - stylecheck.NewChecker(), - &unused.Checker{}, + var cs []*analysis.Analyzer + for _, v := range simple.Analyzers { + cs = append(cs, v) } + for _, v := range staticcheck.Analyzers { + cs = append(cs, v) + } + for _, v := range stylecheck.Analyzers { + cs = append(cs, v) + } + + cums := []lint.CumulativeChecker{unused.NewChecker()} - lintutil.ProcessFlagSet(checkers, fs) + lintutil.ProcessFlagSet(cs, cums, fs) } diff --git a/cmd/unused/README.md b/cmd/unused/README.md deleted file mode 100644 index ddab38dc0..000000000 --- a/cmd/unused/README.md +++ /dev/null @@ -1 +0,0 @@ -**Deprecated: unused has been merged into the staticcheck tool.** diff --git a/cmd/unused/main.go b/cmd/unused/main.go deleted file mode 100644 index 77b953bbb..000000000 --- a/cmd/unused/main.go +++ /dev/null @@ -1,57 +0,0 @@ -// unused reports unused identifiers (types, functions, ...) in your -// code. -package main // import "honnef.co/go/tools/cmd/unused" - -import ( - "fmt" - "log" - "os" - - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/unused" -) - -var ( - fConstants bool - fFields bool - fFunctions bool - fTypes bool - fVariables bool - fDebug string - fWholeProgram bool - fReflection bool -) - -func newChecker() *unused.Checker { - checker := &unused.Checker{} - if fDebug != "" { - debug, err := os.Create(fDebug) - if err != nil { - log.Fatal("couldn't open debug file:", err) - } - checker.Debug = debug - } - - checker.WholeProgram = fWholeProgram - return checker -} - -func main() { - fmt.Fprintln(os.Stderr, "Unused has been deprecated. Please use staticcheck instead.") - log.SetFlags(0) - - fs := lintutil.FlagSet("unused") - fs.BoolVar(&fConstants, "consts", true, "Report unused constants") - fs.BoolVar(&fFields, "fields", true, "Report unused fields") - fs.BoolVar(&fFunctions, "funcs", true, "Report unused functions and methods") - fs.BoolVar(&fTypes, "types", true, "Report unused types") - fs.BoolVar(&fVariables, "vars", true, "Report unused variables") - fs.StringVar(&fDebug, "debug", "", "Write a debug graph to `file`. Existing files will be overwritten.") - fs.BoolVar(&fWholeProgram, "exported", false, "Treat arguments as a program and report unused exported identifiers") - fs.BoolVar(&fReflection, "reflect", true, "Consider identifiers as used when it's likely they'll be accessed via reflection") - fs.Parse(os.Args[1:]) - - c := newChecker() - lintutil.ProcessFlagSet([]lint.Checker{c}, fs) -} diff --git a/config/config.go b/config/config.go index cfde5d51a..4ac006b9a 100644 --- a/config/config.go +++ b/config/config.go @@ -3,10 +3,34 @@ package config import ( "os" "path/filepath" + "reflect" "github.com/BurntSushi/toml" + "golang.org/x/tools/go/analysis" ) +var Analyzer = &analysis.Analyzer{ + Name: "config", + Doc: "loads configuration for the current package tree", + Run: func(pass *analysis.Pass) (interface{}, error) { + if len(pass.Files) == 0 { + cfg := DefaultConfig + return &cfg, nil + } + // FIXME(dh): this may yield the wrong path for generated files in the build cache + path := pass.Fset.PositionFor(pass.Files[0].Pos(), true).Filename + dir := filepath.Dir(path) + cfg, err := Load(dir) + return &cfg, err + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf((*Config)(nil)), +} + +func For(pass *analysis.Pass) *Config { + return pass.ResultOf[Analyzer].(*Config) +} + func mergeLists(a, b []string) []string { out := make([]string, 0, len(a)+len(b)) for _, el := range b { @@ -73,7 +97,7 @@ type Config struct { HTTPStatusCodeWhitelist []string `toml:"http_status_code_whitelist"` } -var defaultConfig = Config{ +var DefaultConfig = Config{ Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016"}, Initialisms: []string{ "ACL", "API", "ASCII", "CPU", "CSS", "DNS", @@ -120,7 +144,7 @@ func parseConfigs(dir string) ([]Config, error) { } dir = ndir } - out = append(out, defaultConfig) + out = append(out, DefaultConfig) if len(out) < 2 { return out, nil } diff --git a/functions/concrete.go b/functions/concrete.go deleted file mode 100644 index 932acd03e..000000000 --- a/functions/concrete.go +++ /dev/null @@ -1,56 +0,0 @@ -package functions - -import ( - "go/token" - "go/types" - - "honnef.co/go/tools/ssa" -) - -func concreteReturnTypes(fn *ssa.Function) []*types.Tuple { - res := fn.Signature.Results() - if res == nil { - return nil - } - ifaces := make([]bool, res.Len()) - any := false - for i := 0; i < res.Len(); i++ { - _, ifaces[i] = res.At(i).Type().Underlying().(*types.Interface) - any = any || ifaces[i] - } - if !any { - return []*types.Tuple{res} - } - var out []*types.Tuple - for _, block := range fn.Blocks { - if len(block.Instrs) == 0 { - continue - } - ret, ok := block.Instrs[len(block.Instrs)-1].(*ssa.Return) - if !ok { - continue - } - vars := make([]*types.Var, res.Len()) - for i, v := range ret.Results { - var typ types.Type - if !ifaces[i] { - typ = res.At(i).Type() - } else if mi, ok := v.(*ssa.MakeInterface); ok { - // TODO(dh): if mi.X is a function call that returns - // an interface, call concreteReturnTypes on that - // function (or, really, go through Descriptions, - // avoid infinite recursion etc, just like nil error - // detection) - - // TODO(dh): support Phi nodes - typ = mi.X.Type() - } else { - typ = res.At(i).Type() - } - vars[i] = types.NewParam(token.NoPos, nil, "", typ) - } - out = append(out, types.NewTuple(vars...)) - } - // TODO(dh): deduplicate out - return out -} diff --git a/functions/functions.go b/functions/functions.go deleted file mode 100644 index 839404129..000000000 --- a/functions/functions.go +++ /dev/null @@ -1,150 +0,0 @@ -package functions - -import ( - "go/types" - "sync" - - "honnef.co/go/tools/callgraph" - "honnef.co/go/tools/callgraph/static" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/staticcheck/vrp" -) - -var stdlibDescs = map[string]Description{ - "errors.New": {Pure: true}, - - "fmt.Errorf": {Pure: true}, - "fmt.Sprintf": {Pure: true}, - "fmt.Sprint": {Pure: true}, - - "sort.Reverse": {Pure: true}, - - "strings.Map": {Pure: true}, - "strings.Repeat": {Pure: true}, - "strings.Replace": {Pure: true}, - "strings.Title": {Pure: true}, - "strings.ToLower": {Pure: true}, - "strings.ToLowerSpecial": {Pure: true}, - "strings.ToTitle": {Pure: true}, - "strings.ToTitleSpecial": {Pure: true}, - "strings.ToUpper": {Pure: true}, - "strings.ToUpperSpecial": {Pure: true}, - "strings.Trim": {Pure: true}, - "strings.TrimFunc": {Pure: true}, - "strings.TrimLeft": {Pure: true}, - "strings.TrimLeftFunc": {Pure: true}, - "strings.TrimPrefix": {Pure: true}, - "strings.TrimRight": {Pure: true}, - "strings.TrimRightFunc": {Pure: true}, - "strings.TrimSpace": {Pure: true}, - "strings.TrimSuffix": {Pure: true}, - - "(*net/http.Request).WithContext": {Pure: true}, - - "math/rand.Read": {NilError: true}, - "(*math/rand.Rand).Read": {NilError: true}, -} - -type Description struct { - // The function is known to be pure - Pure bool - // The function is known to be a stub - Stub bool - // The function is known to never return (panics notwithstanding) - Infinite bool - // Variable ranges - Ranges vrp.Ranges - Loops []Loop - // Function returns an error as its last argument, but it is - // always nil - NilError bool - ConcreteReturnTypes []*types.Tuple -} - -type descriptionEntry struct { - ready chan struct{} - result Description -} - -type Descriptions struct { - CallGraph *callgraph.Graph - mu sync.Mutex - cache map[*ssa.Function]*descriptionEntry -} - -func NewDescriptions(prog *ssa.Program) *Descriptions { - return &Descriptions{ - CallGraph: static.CallGraph(prog), - cache: map[*ssa.Function]*descriptionEntry{}, - } -} - -func (d *Descriptions) Get(fn *ssa.Function) Description { - d.mu.Lock() - fd := d.cache[fn] - if fd == nil { - fd = &descriptionEntry{ - ready: make(chan struct{}), - } - d.cache[fn] = fd - d.mu.Unlock() - - { - fd.result = stdlibDescs[fn.RelString(nil)] - fd.result.Pure = fd.result.Pure || d.IsPure(fn) - fd.result.Stub = fd.result.Stub || d.IsStub(fn) - fd.result.Infinite = fd.result.Infinite || !terminates(fn) - fd.result.Ranges = vrp.BuildGraph(fn).Solve() - fd.result.Loops = findLoops(fn) - fd.result.NilError = fd.result.NilError || IsNilError(fn) - fd.result.ConcreteReturnTypes = concreteReturnTypes(fn) - } - - close(fd.ready) - } else { - d.mu.Unlock() - <-fd.ready - } - return fd.result -} - -func IsNilError(fn *ssa.Function) bool { - // TODO(dh): This is very simplistic, as we only look for constant - // nil returns. A more advanced approach would work transitively. - // An even more advanced approach would be context-aware and - // determine nil errors based on inputs (e.g. io.WriteString to a - // bytes.Buffer will always return nil, but an io.WriteString to - // an os.File might not). Similarly, an os.File opened for reading - // won't error on Close, but other files will. - res := fn.Signature.Results() - if res.Len() == 0 { - return false - } - last := res.At(res.Len() - 1) - if types.TypeString(last.Type(), nil) != "error" { - return false - } - - if fn.Blocks == nil { - return false - } - for _, block := range fn.Blocks { - if len(block.Instrs) == 0 { - continue - } - ins := block.Instrs[len(block.Instrs)-1] - ret, ok := ins.(*ssa.Return) - if !ok { - continue - } - v := ret.Results[len(ret.Results)-1] - c, ok := v.(*ssa.Const) - if !ok { - return false - } - if !c.IsNil() { - return false - } - } - return true -} diff --git a/functions/loops.go b/functions/loops.go index 63011cf3e..92514af9a 100644 --- a/functions/loops.go +++ b/functions/loops.go @@ -4,7 +4,7 @@ import "honnef.co/go/tools/ssa" type Loop map[*ssa.BasicBlock]bool -func findLoops(fn *ssa.Function) []Loop { +func FindLoops(fn *ssa.Function) []Loop { if fn.Blocks == nil { return nil } diff --git a/functions/pure.go b/functions/pure.go index 7028eb8c6..cf914e3bb 100644 --- a/functions/pure.go +++ b/functions/pure.go @@ -1,10 +1,6 @@ package functions import ( - "go/token" - "go/types" - - "honnef.co/go/tools/callgraph" "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" ) @@ -13,7 +9,7 @@ import ( // considered a stub if it has no instructions or exactly one // instruction, which must be either returning only constant values or // a panic. -func (d *Descriptions) IsStub(fn *ssa.Function) bool { +func IsStub(fn *ssa.Function) bool { if len(fn.Blocks) == 0 { return true } @@ -39,85 +35,3 @@ func (d *Descriptions) IsStub(fn *ssa.Function) bool { return false } } - -func (d *Descriptions) IsPure(fn *ssa.Function) bool { - if fn.Signature.Results().Len() == 0 { - // A function with no return values is empty or is doing some - // work we cannot see (for example because of build tags); - // don't consider it pure. - return false - } - - for _, param := range fn.Params { - if _, ok := param.Type().Underlying().(*types.Basic); !ok { - return false - } - } - - if fn.Blocks == nil { - return false - } - checkCall := func(common *ssa.CallCommon) bool { - if common.IsInvoke() { - return false - } - builtin, ok := common.Value.(*ssa.Builtin) - if !ok { - if common.StaticCallee() != fn { - if common.StaticCallee() == nil { - return false - } - // TODO(dh): ideally, IsPure wouldn't be responsible - // for avoiding infinite recursion, but - // FunctionDescriptions would be. - node := d.CallGraph.CreateNode(common.StaticCallee()) - if callgraph.PathSearch(node, func(other *callgraph.Node) bool { - return other.Func == fn - }) != nil { - return false - } - if !d.Get(common.StaticCallee()).Pure { - return false - } - } - } else { - switch builtin.Name() { - case "len", "cap", "make", "new": - default: - return false - } - } - return true - } - for _, b := range fn.Blocks { - for _, ins := range b.Instrs { - switch ins := ins.(type) { - case *ssa.Call: - if !checkCall(ins.Common()) { - return false - } - case *ssa.Defer: - if !checkCall(&ins.Call) { - return false - } - case *ssa.Select: - return false - case *ssa.Send: - return false - case *ssa.Go: - return false - case *ssa.Panic: - return false - case *ssa.Store: - return false - case *ssa.FieldAddr: - return false - case *ssa.UnOp: - if ins.Op == token.MUL || ins.Op == token.AND { - return false - } - } - } - } - return true -} diff --git a/functions/terminates.go b/functions/terminates.go index 65f9e16dc..3e9c3a23f 100644 --- a/functions/terminates.go +++ b/functions/terminates.go @@ -2,10 +2,10 @@ package functions import "honnef.co/go/tools/ssa" -// terminates reports whether fn is supposed to return, that is if it +// Terminates reports whether fn is supposed to return, that is if it // has at least one theoretic path that returns from the function. // Explicit panics do not count as terminating. -func terminates(fn *ssa.Function) bool { +func Terminates(fn *ssa.Function) bool { if fn.Blocks == nil { // assuming that a function terminates is the conservative // choice diff --git a/internal/cache/cache.go b/internal/cache/cache.go new file mode 100644 index 000000000..508877ce7 --- /dev/null +++ b/internal/cache/cache.go @@ -0,0 +1,473 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cache implements a build artifact cache. +// +// This package is a slightly modified fork of Go's +// cmd/go/internal/cache package. +package cache + +import ( + "bytes" + "crypto/sha256" + "encoding/hex" + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + "honnef.co/go/tools/internal/renameio" +) + +// An ActionID is a cache action key, the hash of a complete description of a +// repeatable computation (command line, environment variables, +// input file contents, executable contents). +type ActionID [HashSize]byte + +// An OutputID is a cache output key, the hash of an output of a computation. +type OutputID [HashSize]byte + +// A Cache is a package cache, backed by a file system directory tree. +type Cache struct { + dir string + now func() time.Time +} + +// Open opens and returns the cache in the given directory. +// +// It is safe for multiple processes on a single machine to use the +// same cache directory in a local file system simultaneously. +// They will coordinate using operating system file locks and may +// duplicate effort but will not corrupt the cache. +// +// However, it is NOT safe for multiple processes on different machines +// to share a cache directory (for example, if the directory were stored +// in a network file system). File locking is notoriously unreliable in +// network file systems and may not suffice to protect the cache. +// +func Open(dir string) (*Cache, error) { + info, err := os.Stat(dir) + if err != nil { + return nil, err + } + if !info.IsDir() { + return nil, &os.PathError{Op: "open", Path: dir, Err: fmt.Errorf("not a directory")} + } + for i := 0; i < 256; i++ { + name := filepath.Join(dir, fmt.Sprintf("%02x", i)) + if err := os.MkdirAll(name, 0777); err != nil { + return nil, err + } + } + c := &Cache{ + dir: dir, + now: time.Now, + } + return c, nil +} + +// fileName returns the name of the file corresponding to the given id. +func (c *Cache) fileName(id [HashSize]byte, key string) string { + return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key) +} + +var errMissing = errors.New("cache entry not found") + +const ( + // action entry file is "v1 \n" + hexSize = HashSize * 2 + entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1 +) + +// verify controls whether to run the cache in verify mode. +// In verify mode, the cache always returns errMissing from Get +// but then double-checks in Put that the data being written +// exactly matches any existing entry. This provides an easy +// way to detect program behavior that would have been different +// had the cache entry been returned from Get. +// +// verify is enabled by setting the environment variable +// GODEBUG=gocacheverify=1. +var verify = false + +// DebugTest is set when GODEBUG=gocachetest=1 is in the environment. +var DebugTest = false + +func init() { initEnv() } + +func initEnv() { + verify = false + debugHash = false + debug := strings.Split(os.Getenv("GODEBUG"), ",") + for _, f := range debug { + if f == "gocacheverify=1" { + verify = true + } + if f == "gocachehash=1" { + debugHash = true + } + if f == "gocachetest=1" { + DebugTest = true + } + } +} + +// Get looks up the action ID in the cache, +// returning the corresponding output ID and file size, if any. +// Note that finding an output ID does not guarantee that the +// saved file for that output ID is still available. +func (c *Cache) Get(id ActionID) (Entry, error) { + if verify { + return Entry{}, errMissing + } + return c.get(id) +} + +type Entry struct { + OutputID OutputID + Size int64 + Time time.Time +} + +// get is Get but does not respect verify mode, so that Put can use it. +func (c *Cache) get(id ActionID) (Entry, error) { + missing := func() (Entry, error) { + return Entry{}, errMissing + } + f, err := os.Open(c.fileName(id, "a")) + if err != nil { + return missing() + } + defer f.Close() + entry := make([]byte, entrySize+1) // +1 to detect whether f is too long + if n, err := io.ReadFull(f, entry); n != entrySize || err != io.ErrUnexpectedEOF { + return missing() + } + if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' { + return missing() + } + eid, entry := entry[3:3+hexSize], entry[3+hexSize:] + eout, entry := entry[1:1+hexSize], entry[1+hexSize:] + esize, entry := entry[1:1+20], entry[1+20:] + etime, entry := entry[1:1+20], entry[1+20:] + var buf [HashSize]byte + if _, err := hex.Decode(buf[:], eid); err != nil || buf != id { + return missing() + } + if _, err := hex.Decode(buf[:], eout); err != nil { + return missing() + } + i := 0 + for i < len(esize) && esize[i] == ' ' { + i++ + } + size, err := strconv.ParseInt(string(esize[i:]), 10, 64) + if err != nil || size < 0 { + return missing() + } + i = 0 + for i < len(etime) && etime[i] == ' ' { + i++ + } + tm, err := strconv.ParseInt(string(etime[i:]), 10, 64) + if err != nil || size < 0 { + return missing() + } + + c.used(c.fileName(id, "a")) + + return Entry{buf, size, time.Unix(0, tm)}, nil +} + +// GetFile looks up the action ID in the cache and returns +// the name of the corresponding data file. +func (c *Cache) GetFile(id ActionID) (file string, entry Entry, err error) { + entry, err = c.Get(id) + if err != nil { + return "", Entry{}, err + } + file = c.OutputFile(entry.OutputID) + info, err := os.Stat(file) + if err != nil || info.Size() != entry.Size { + return "", Entry{}, errMissing + } + return file, entry, nil +} + +// GetBytes looks up the action ID in the cache and returns +// the corresponding output bytes. +// GetBytes should only be used for data that can be expected to fit in memory. +func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) { + entry, err := c.Get(id) + if err != nil { + return nil, entry, err + } + data, _ := ioutil.ReadFile(c.OutputFile(entry.OutputID)) + if sha256.Sum256(data) != entry.OutputID { + return nil, entry, errMissing + } + return data, entry, nil +} + +// OutputFile returns the name of the cache file storing output with the given OutputID. +func (c *Cache) OutputFile(out OutputID) string { + file := c.fileName(out, "d") + c.used(file) + return file +} + +// Time constants for cache expiration. +// +// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour), +// to avoid causing many unnecessary inode updates. The mtimes therefore +// roughly reflect "time of last use" but may in fact be older by at most an hour. +// +// We scan the cache for entries to delete at most once per trimInterval (1 day). +// +// When we do scan the cache, we delete entries that have not been used for +// at least trimLimit (5 days). Statistics gathered from a month of usage by +// Go developers found that essentially all reuse of cached entries happened +// within 5 days of the previous reuse. See golang.org/issue/22990. +const ( + mtimeInterval = 1 * time.Hour + trimInterval = 24 * time.Hour + trimLimit = 5 * 24 * time.Hour +) + +// used makes a best-effort attempt to update mtime on file, +// so that mtime reflects cache access time. +// +// Because the reflection only needs to be approximate, +// and to reduce the amount of disk activity caused by using +// cache entries, used only updates the mtime if the current +// mtime is more than an hour old. This heuristic eliminates +// nearly all of the mtime updates that would otherwise happen, +// while still keeping the mtimes useful for cache trimming. +func (c *Cache) used(file string) { + info, err := os.Stat(file) + if err == nil && c.now().Sub(info.ModTime()) < mtimeInterval { + return + } + os.Chtimes(file, c.now(), c.now()) +} + +// Trim removes old cache entries that are likely not to be reused. +func (c *Cache) Trim() { + now := c.now() + + // We maintain in dir/trim.txt the time of the last completed cache trim. + // If the cache has been trimmed recently enough, do nothing. + // This is the common case. + data, _ := ioutil.ReadFile(filepath.Join(c.dir, "trim.txt")) + t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64) + if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval { + return + } + + // Trim each of the 256 subdirectories. + // We subtract an additional mtimeInterval + // to account for the imprecision of our "last used" mtimes. + cutoff := now.Add(-trimLimit - mtimeInterval) + for i := 0; i < 256; i++ { + subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i)) + c.trimSubdir(subdir, cutoff) + } + + // Ignore errors from here: if we don't write the complete timestamp, the + // cache will appear older than it is, and we'll trim it again next time. + renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix()))) +} + +// trimSubdir trims a single cache subdirectory. +func (c *Cache) trimSubdir(subdir string, cutoff time.Time) { + // Read all directory entries from subdir before removing + // any files, in case removing files invalidates the file offset + // in the directory scan. Also, ignore error from f.Readdirnames, + // because we don't care about reporting the error and we still + // want to process any entries found before the error. + f, err := os.Open(subdir) + if err != nil { + return + } + names, _ := f.Readdirnames(-1) + f.Close() + + for _, name := range names { + // Remove only cache entries (xxxx-a and xxxx-d). + if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") { + continue + } + entry := filepath.Join(subdir, name) + info, err := os.Stat(entry) + if err == nil && info.ModTime().Before(cutoff) { + os.Remove(entry) + } + } +} + +// putIndexEntry adds an entry to the cache recording that executing the action +// with the given id produces an output with the given output id (hash) and size. +func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error { + // Note: We expect that for one reason or another it may happen + // that repeating an action produces a different output hash + // (for example, if the output contains a time stamp or temp dir name). + // While not ideal, this is also not a correctness problem, so we + // don't make a big deal about it. In particular, we leave the action + // cache entries writable specifically so that they can be overwritten. + // + // Setting GODEBUG=gocacheverify=1 does make a big deal: + // in verify mode we are double-checking that the cache entries + // are entirely reproducible. As just noted, this may be unrealistic + // in some cases but the check is also useful for shaking out real bugs. + entry := []byte(fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano())) + if verify && allowVerify { + old, err := c.get(id) + if err == nil && (old.OutputID != out || old.Size != size) { + // panic to show stack trace, so we can see what code is generating this cache entry. + msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size) + panic(msg) + } + } + file := c.fileName(id, "a") + if err := ioutil.WriteFile(file, entry, 0666); err != nil { + // TODO(bcmills): This Remove potentially races with another go command writing to file. + // Can we eliminate it? + os.Remove(file) + return err + } + os.Chtimes(file, c.now(), c.now()) // mainly for tests + + return nil +} + +// Put stores the given output in the cache as the output for the action ID. +// It may read file twice. The content of file must not change between the two passes. +func (c *Cache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { + return c.put(id, file, true) +} + +// PutNoVerify is like Put but disables the verify check +// when GODEBUG=goverifycache=1 is set. +// It is meant for data that is OK to cache but that we expect to vary slightly from run to run, +// like test output containing times and the like. +func (c *Cache) PutNoVerify(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { + return c.put(id, file, false) +} + +func (c *Cache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) { + // Compute output ID. + h := sha256.New() + if _, err := file.Seek(0, 0); err != nil { + return OutputID{}, 0, err + } + size, err := io.Copy(h, file) + if err != nil { + return OutputID{}, 0, err + } + var out OutputID + h.Sum(out[:0]) + + // Copy to cached output file (if not already present). + if err := c.copyFile(file, out, size); err != nil { + return out, size, err + } + + // Add to cache index. + return out, size, c.putIndexEntry(id, out, size, allowVerify) +} + +// PutBytes stores the given bytes in the cache as the output for the action ID. +func (c *Cache) PutBytes(id ActionID, data []byte) error { + _, _, err := c.Put(id, bytes.NewReader(data)) + return err +} + +// copyFile copies file into the cache, expecting it to have the given +// output ID and size, if that file is not present already. +func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error { + name := c.fileName(out, "d") + info, err := os.Stat(name) + if err == nil && info.Size() == size { + // Check hash. + if f, err := os.Open(name); err == nil { + h := sha256.New() + io.Copy(h, f) + f.Close() + var out2 OutputID + h.Sum(out2[:0]) + if out == out2 { + return nil + } + } + // Hash did not match. Fall through and rewrite file. + } + + // Copy file to cache directory. + mode := os.O_RDWR | os.O_CREATE + if err == nil && info.Size() > size { // shouldn't happen but fix in case + mode |= os.O_TRUNC + } + f, err := os.OpenFile(name, mode, 0666) + if err != nil { + return err + } + defer f.Close() + if size == 0 { + // File now exists with correct size. + // Only one possible zero-length file, so contents are OK too. + // Early return here makes sure there's a "last byte" for code below. + return nil + } + + // From here on, if any of the I/O writing the file fails, + // we make a best-effort attempt to truncate the file f + // before returning, to avoid leaving bad bytes in the file. + + // Copy file to f, but also into h to double-check hash. + if _, err := file.Seek(0, 0); err != nil { + f.Truncate(0) + return err + } + h := sha256.New() + w := io.MultiWriter(f, h) + if _, err := io.CopyN(w, file, size-1); err != nil { + f.Truncate(0) + return err + } + // Check last byte before writing it; writing it will make the size match + // what other processes expect to find and might cause them to start + // using the file. + buf := make([]byte, 1) + if _, err := file.Read(buf); err != nil { + f.Truncate(0) + return err + } + h.Write(buf) + sum := h.Sum(nil) + if !bytes.Equal(sum, out[:]) { + f.Truncate(0) + return fmt.Errorf("file content changed underfoot") + } + + // Commit cache file entry. + if _, err := f.Write(buf); err != nil { + f.Truncate(0) + return err + } + if err := f.Close(); err != nil { + // Data might not have been written, + // but file may look like it is the right size. + // To be extra careful, remove cached file. + os.Remove(name) + return err + } + os.Chtimes(name, c.now(), c.now()) // mainly for tests + + return nil +} diff --git a/internal/cache/cache_test.go b/internal/cache/cache_test.go new file mode 100644 index 000000000..7229bc4ce --- /dev/null +++ b/internal/cache/cache_test.go @@ -0,0 +1,270 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bytes" + "encoding/binary" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "testing" + "time" +) + +func init() { + verify = false // even if GODEBUG is set +} + +func TestBasic(t *testing.T) { + dir, err := ioutil.TempDir("", "cachetest-") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + _, err = Open(filepath.Join(dir, "notexist")) + if err == nil { + t.Fatal(`Open("tmp/notexist") succeeded, want failure`) + } + + cdir := filepath.Join(dir, "c1") + if err := os.Mkdir(cdir, 0777); err != nil { + t.Fatal(err) + } + + c1, err := Open(cdir) + if err != nil { + t.Fatalf("Open(c1) (create): %v", err) + } + if err := c1.putIndexEntry(dummyID(1), dummyID(12), 13, true); err != nil { + t.Fatalf("addIndexEntry: %v", err) + } + if err := c1.putIndexEntry(dummyID(1), dummyID(2), 3, true); err != nil { // overwrite entry + t.Fatalf("addIndexEntry: %v", err) + } + if entry, err := c1.Get(dummyID(1)); err != nil || entry.OutputID != dummyID(2) || entry.Size != 3 { + t.Fatalf("c1.Get(1) = %x, %v, %v, want %x, %v, nil", entry.OutputID, entry.Size, err, dummyID(2), 3) + } + + c2, err := Open(cdir) + if err != nil { + t.Fatalf("Open(c2) (reuse): %v", err) + } + if entry, err := c2.Get(dummyID(1)); err != nil || entry.OutputID != dummyID(2) || entry.Size != 3 { + t.Fatalf("c2.Get(1) = %x, %v, %v, want %x, %v, nil", entry.OutputID, entry.Size, err, dummyID(2), 3) + } + if err := c2.putIndexEntry(dummyID(2), dummyID(3), 4, true); err != nil { + t.Fatalf("addIndexEntry: %v", err) + } + if entry, err := c1.Get(dummyID(2)); err != nil || entry.OutputID != dummyID(3) || entry.Size != 4 { + t.Fatalf("c1.Get(2) = %x, %v, %v, want %x, %v, nil", entry.OutputID, entry.Size, err, dummyID(3), 4) + } +} + +func TestGrowth(t *testing.T) { + dir, err := ioutil.TempDir("", "cachetest-") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + + c, err := Open(dir) + if err != nil { + t.Fatalf("Open: %v", err) + } + + n := 10000 + if testing.Short() { + n = 1000 + } + + for i := 0; i < n; i++ { + if err := c.putIndexEntry(dummyID(i), dummyID(i*99), int64(i)*101, true); err != nil { + t.Fatalf("addIndexEntry: %v", err) + } + id := ActionID(dummyID(i)) + entry, err := c.Get(id) + if err != nil { + t.Fatalf("Get(%x): %v", id, err) + } + if entry.OutputID != dummyID(i*99) || entry.Size != int64(i)*101 { + t.Errorf("Get(%x) = %x, %d, want %x, %d", id, entry.OutputID, entry.Size, dummyID(i*99), int64(i)*101) + } + } + for i := 0; i < n; i++ { + id := ActionID(dummyID(i)) + entry, err := c.Get(id) + if err != nil { + t.Fatalf("Get2(%x): %v", id, err) + } + if entry.OutputID != dummyID(i*99) || entry.Size != int64(i)*101 { + t.Errorf("Get2(%x) = %x, %d, want %x, %d", id, entry.OutputID, entry.Size, dummyID(i*99), int64(i)*101) + } + } +} + +func TestVerifyPanic(t *testing.T) { + os.Setenv("GODEBUG", "gocacheverify=1") + initEnv() + defer func() { + os.Unsetenv("GODEBUG") + verify = false + }() + + if !verify { + t.Fatal("initEnv did not set verify") + } + + dir, err := ioutil.TempDir("", "cachetest-") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + + c, err := Open(dir) + if err != nil { + t.Fatalf("Open: %v", err) + } + + id := ActionID(dummyID(1)) + if err := c.PutBytes(id, []byte("abc")); err != nil { + t.Fatal(err) + } + + defer func() { + if err := recover(); err != nil { + t.Log(err) + return + } + }() + c.PutBytes(id, []byte("def")) + t.Fatal("mismatched Put did not panic in verify mode") +} + +func dummyID(x int) [HashSize]byte { + var out [HashSize]byte + binary.LittleEndian.PutUint64(out[:], uint64(x)) + return out +} + +func TestCacheTrim(t *testing.T) { + dir, err := ioutil.TempDir("", "cachetest-") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + + c, err := Open(dir) + if err != nil { + t.Fatalf("Open: %v", err) + } + const start = 1000000000 + now := int64(start) + c.now = func() time.Time { return time.Unix(now, 0) } + + checkTime := func(name string, mtime int64) { + t.Helper() + file := filepath.Join(c.dir, name[:2], name) + info, err := os.Stat(file) + if err != nil { + t.Fatal(err) + } + if info.ModTime().Unix() != mtime { + t.Fatalf("%s mtime = %d, want %d", name, info.ModTime().Unix(), mtime) + } + } + + id := ActionID(dummyID(1)) + c.PutBytes(id, []byte("abc")) + entry, _ := c.Get(id) + c.PutBytes(ActionID(dummyID(2)), []byte("def")) + mtime := now + checkTime(fmt.Sprintf("%x-a", id), mtime) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime) + + // Get should not change recent mtimes. + now = start + 10 + c.Get(id) + checkTime(fmt.Sprintf("%x-a", id), mtime) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime) + + // Get should change distant mtimes. + now = start + 5000 + mtime2 := now + if _, err := c.Get(id); err != nil { + t.Fatal(err) + } + c.OutputFile(entry.OutputID) + checkTime(fmt.Sprintf("%x-a", id), mtime2) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime2) + + // Trim should leave everything alone: it's all too new. + c.Trim() + if _, err := c.Get(id); err != nil { + t.Fatal(err) + } + c.OutputFile(entry.OutputID) + data, err := ioutil.ReadFile(filepath.Join(dir, "trim.txt")) + if err != nil { + t.Fatal(err) + } + checkTime(fmt.Sprintf("%x-a", dummyID(2)), start) + + // Trim less than a day later should not do any work at all. + now = start + 80000 + c.Trim() + if _, err := c.Get(id); err != nil { + t.Fatal(err) + } + c.OutputFile(entry.OutputID) + data2, err := ioutil.ReadFile(filepath.Join(dir, "trim.txt")) + if err != nil { + t.Fatal(err) + } + if !bytes.Equal(data, data2) { + t.Fatalf("second trim did work: %q -> %q", data, data2) + } + + // Fast forward and do another trim just before the 5 day cutoff. + // Note that because of usedQuantum the cutoff is actually 5 days + 1 hour. + // We used c.Get(id) just now, so 5 days later it should still be kept. + // On the other hand almost a full day has gone by since we wrote dummyID(2) + // and we haven't looked at it since, so 5 days later it should be gone. + now += 5 * 86400 + checkTime(fmt.Sprintf("%x-a", dummyID(2)), start) + c.Trim() + if _, err := c.Get(id); err != nil { + t.Fatal(err) + } + c.OutputFile(entry.OutputID) + mtime3 := now + if _, err := c.Get(dummyID(2)); err == nil { // haven't done a Get for this since original write above + t.Fatalf("Trim did not remove dummyID(2)") + } + + // The c.Get(id) refreshed id's mtime again. + // Check that another 5 days later it is still not gone, + // but check by using checkTime, which doesn't bring mtime forward. + now += 5 * 86400 + c.Trim() + checkTime(fmt.Sprintf("%x-a", id), mtime3) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime3) + + // Half a day later Trim should still be a no-op, because there was a Trim recently. + // Even though the entry for id is now old enough to be trimmed, + // it gets a reprieve until the time comes for a new Trim scan. + now += 86400 / 2 + c.Trim() + checkTime(fmt.Sprintf("%x-a", id), mtime3) + checkTime(fmt.Sprintf("%x-d", entry.OutputID), mtime3) + + // Another half a day later, Trim should actually run, and it should remove id. + now += 86400/2 + 1 + c.Trim() + if _, err := c.Get(dummyID(1)); err == nil { + t.Fatal("Trim did not remove dummyID(1)") + } +} diff --git a/internal/cache/default.go b/internal/cache/default.go new file mode 100644 index 000000000..3034f76a5 --- /dev/null +++ b/internal/cache/default.go @@ -0,0 +1,85 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "sync" +) + +// Default returns the default cache to use. +func Default() (*Cache, error) { + defaultOnce.Do(initDefaultCache) + return defaultCache, defaultDirErr +} + +var ( + defaultOnce sync.Once + defaultCache *Cache +) + +// cacheREADME is a message stored in a README in the cache directory. +// Because the cache lives outside the normal Go trees, we leave the +// README as a courtesy to explain where it came from. +const cacheREADME = `This directory holds cached build artifacts from staticcheck. +` + +// initDefaultCache does the work of finding the default cache +// the first time Default is called. +func initDefaultCache() { + dir := DefaultDir() + if err := os.MkdirAll(dir, 0777); err != nil { + log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + } + if _, err := os.Stat(filepath.Join(dir, "README")); err != nil { + // Best effort. + ioutil.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666) + } + + c, err := Open(dir) + if err != nil { + log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + } + defaultCache = c +} + +var ( + defaultDirOnce sync.Once + defaultDir string + defaultDirErr error +) + +// DefaultDir returns the effective STATICCHECK_CACHE setting. +func DefaultDir() string { + // Save the result of the first call to DefaultDir for later use in + // initDefaultCache. cmd/go/main.go explicitly sets GOCACHE so that + // subprocesses will inherit it, but that means initDefaultCache can't + // otherwise distinguish between an explicit "off" and a UserCacheDir error. + + defaultDirOnce.Do(func() { + defaultDir = os.Getenv("STATICCHECK_CACHE") + if filepath.IsAbs(defaultDir) { + return + } + if defaultDir != "" { + defaultDirErr = fmt.Errorf("STATICCHECK_CACHE is not an absolute path") + return + } + + // Compute default location. + dir, err := os.UserCacheDir() + if err != nil { + defaultDirErr = fmt.Errorf("STATICCHECK_CACHE is not defined and %v", err) + return + } + defaultDir = filepath.Join(dir, "staticcheck") + }) + + return defaultDir +} diff --git a/internal/cache/hash.go b/internal/cache/hash.go new file mode 100644 index 000000000..a53543ec5 --- /dev/null +++ b/internal/cache/hash.go @@ -0,0 +1,176 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bytes" + "crypto/sha256" + "fmt" + "hash" + "io" + "os" + "sync" +) + +var debugHash = false // set when GODEBUG=gocachehash=1 + +// HashSize is the number of bytes in a hash. +const HashSize = 32 + +// A Hash provides access to the canonical hash function used to index the cache. +// The current implementation uses salted SHA256, but clients must not assume this. +type Hash struct { + h hash.Hash + name string // for debugging + buf *bytes.Buffer // for verify +} + +// hashSalt is a salt string added to the beginning of every hash +// created by NewHash. Using the Staticcheck version makes sure that different +// versions of the command do not address the same cache +// entries, so that a bug in one version does not affect the execution +// of other versions. This salt will result in additional ActionID files +// in the cache, but not additional copies of the large output files, +// which are still addressed by unsalted SHA256. +var hashSalt []byte + +func SetSalt(b []byte) { + hashSalt = b +} + +// Subkey returns an action ID corresponding to mixing a parent +// action ID with a string description of the subkey. +func Subkey(parent ActionID, desc string) ActionID { + h := sha256.New() + h.Write([]byte("subkey:")) + h.Write(parent[:]) + h.Write([]byte(desc)) + var out ActionID + h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH subkey %x %q = %x\n", parent, desc, out) + } + if verify { + hashDebug.Lock() + hashDebug.m[out] = fmt.Sprintf("subkey %x %q", parent, desc) + hashDebug.Unlock() + } + return out +} + +// NewHash returns a new Hash. +// The caller is expected to Write data to it and then call Sum. +func NewHash(name string) *Hash { + h := &Hash{h: sha256.New(), name: name} + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name) + } + h.Write(hashSalt) + if verify { + h.buf = new(bytes.Buffer) + } + return h +} + +// Write writes data to the running hash. +func (h *Hash) Write(b []byte) (int, error) { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]: %q\n", h.name, b) + } + if h.buf != nil { + h.buf.Write(b) + } + return h.h.Write(b) +} + +// Sum returns the hash of the data written previously. +func (h *Hash) Sum() [HashSize]byte { + var out [HashSize]byte + h.h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]: %x\n", h.name, out) + } + if h.buf != nil { + hashDebug.Lock() + if hashDebug.m == nil { + hashDebug.m = make(map[[HashSize]byte]string) + } + hashDebug.m[out] = h.buf.String() + hashDebug.Unlock() + } + return out +} + +// In GODEBUG=gocacheverify=1 mode, +// hashDebug holds the input to every computed hash ID, +// so that we can work backward from the ID involved in a +// cache entry mismatch to a description of what should be there. +var hashDebug struct { + sync.Mutex + m map[[HashSize]byte]string +} + +// reverseHash returns the input used to compute the hash id. +func reverseHash(id [HashSize]byte) string { + hashDebug.Lock() + s := hashDebug.m[id] + hashDebug.Unlock() + return s +} + +var hashFileCache struct { + sync.Mutex + m map[string][HashSize]byte +} + +// FileHash returns the hash of the named file. +// It caches repeated lookups for a given file, +// and the cache entry for a file can be initialized +// using SetFileHash. +// The hash used by FileHash is not the same as +// the hash used by NewHash. +func FileHash(file string) ([HashSize]byte, error) { + hashFileCache.Lock() + out, ok := hashFileCache.m[file] + hashFileCache.Unlock() + + if ok { + return out, nil + } + + h := sha256.New() + f, err := os.Open(file) + if err != nil { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err) + } + return [HashSize]byte{}, err + } + _, err = io.Copy(h, f) + f.Close() + if err != nil { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err) + } + return [HashSize]byte{}, err + } + h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %x\n", file, out) + } + + SetFileHash(file, out) + return out, nil +} + +// SetFileHash sets the hash returned by FileHash for file. +func SetFileHash(file string, sum [HashSize]byte) { + hashFileCache.Lock() + if hashFileCache.m == nil { + hashFileCache.m = make(map[string][HashSize]byte) + } + hashFileCache.m[file] = sum + hashFileCache.Unlock() +} diff --git a/internal/cache/hash_test.go b/internal/cache/hash_test.go new file mode 100644 index 000000000..3bf714303 --- /dev/null +++ b/internal/cache/hash_test.go @@ -0,0 +1,52 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "fmt" + "io/ioutil" + "os" + "testing" +) + +func TestHash(t *testing.T) { + oldSalt := hashSalt + hashSalt = nil + defer func() { + hashSalt = oldSalt + }() + + h := NewHash("alice") + h.Write([]byte("hello world")) + sum := fmt.Sprintf("%x", h.Sum()) + want := "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" + if sum != want { + t.Errorf("hash(hello world) = %v, want %v", sum, want) + } +} + +func TestHashFile(t *testing.T) { + f, err := ioutil.TempFile("", "cmd-go-test-") + if err != nil { + t.Fatal(err) + } + name := f.Name() + fmt.Fprintf(f, "hello world") + defer os.Remove(name) + if err := f.Close(); err != nil { + t.Fatal(err) + } + + var h ActionID // make sure hash result is assignable to ActionID + h, err = FileHash(name) + if err != nil { + t.Fatal(err) + } + sum := fmt.Sprintf("%x", h) + want := "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9" + if sum != want { + t.Errorf("hash(hello world) = %v, want %v", sum, want) + } +} diff --git a/internal/passes/buildssa/buildssa.go b/internal/passes/buildssa/buildssa.go new file mode 100644 index 000000000..fde918d12 --- /dev/null +++ b/internal/passes/buildssa/buildssa.go @@ -0,0 +1,116 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package buildssa defines an Analyzer that constructs the SSA +// representation of an error-free package and returns the set of all +// functions within it. It does not report any diagnostics itself but +// may be used as an input to other analyzers. +// +// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE. +package buildssa + +import ( + "go/ast" + "go/types" + "reflect" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/ssa" +) + +var Analyzer = &analysis.Analyzer{ + Name: "buildssa", + Doc: "build SSA-form IR for later passes", + Run: run, + ResultType: reflect.TypeOf(new(SSA)), +} + +// SSA provides SSA-form intermediate representation for all the +// non-blank source functions in the current package. +type SSA struct { + Pkg *ssa.Package + SrcFuncs []*ssa.Function +} + +func run(pass *analysis.Pass) (interface{}, error) { + // Plundered from ssautil.BuildPackage. + + // We must create a new Program for each Package because the + // analysis API provides no place to hang a Program shared by + // all Packages. Consequently, SSA Packages and Functions do not + // have a canonical representation across an analysis session of + // multiple packages. This is unlikely to be a problem in + // practice because the analysis API essentially forces all + // packages to be analysed independently, so any given call to + // Analysis.Run on a package will see only SSA objects belonging + // to a single Program. + + mode := ssa.GlobalDebug + + prog := ssa.NewProgram(pass.Fset, mode) + + // Create SSA packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pass.Pkg.Imports()) + + // Create and build the primary package. + ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false) + ssapkg.Build() + + // Compute list of source functions, including literals, + // in source order. + var funcs []*ssa.Function + var addAnons func(f *ssa.Function) + addAnons = func(f *ssa.Function) { + funcs = append(funcs, f) + for _, anon := range f.AnonFuncs { + addAnons(anon) + } + } + addAnons(ssapkg.Members["init"].(*ssa.Function)) + for _, f := range pass.Files { + for _, decl := range f.Decls { + if fdecl, ok := decl.(*ast.FuncDecl); ok { + + // SSA will not build a Function + // for a FuncDecl named blank. + // That's arguably too strict but + // relaxing it would break uniqueness of + // names of package members. + if fdecl.Name.Name == "_" { + continue + } + + // (init functions have distinct Func + // objects named "init" and distinct + // ssa.Functions named "init#1", ...) + + fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func) + if fn == nil { + panic(fn) + } + + f := ssapkg.Prog.FuncValue(fn) + if f == nil { + panic(fn) + } + + addAnons(f) + } + } + } + + return &SSA{Pkg: ssapkg, SrcFuncs: funcs}, nil +} diff --git a/internal/passes/buildssa/buildssa_test.go b/internal/passes/buildssa/buildssa_test.go new file mode 100644 index 000000000..0e0435d26 --- /dev/null +++ b/internal/passes/buildssa/buildssa_test.go @@ -0,0 +1,29 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildssa_test + +import ( + "fmt" + "os" + "testing" + + "golang.org/x/tools/go/analysis/analysistest" + "honnef.co/go/tools/internal/passes/buildssa" +) + +func Test(t *testing.T) { + testdata := analysistest.TestData() + result := analysistest.Run(t, testdata, buildssa.Analyzer, "a")[0].Result + + ssainfo := result.(*buildssa.SSA) + got := fmt.Sprint(ssainfo.SrcFuncs) + want := `[a.init a.Fib (a.T).fib]` + if got != want { + t.Errorf("SSA.SrcFuncs = %s, want %s", got, want) + for _, f := range ssainfo.SrcFuncs { + f.WriteTo(os.Stderr) + } + } +} diff --git a/internal/passes/buildssa/testdata/src/a/a.go b/internal/passes/buildssa/testdata/src/a/a.go new file mode 100644 index 000000000..ddb13dacb --- /dev/null +++ b/internal/passes/buildssa/testdata/src/a/a.go @@ -0,0 +1,16 @@ +package a + +func Fib(x int) int { + if x < 2 { + return x + } + return Fib(x-1) + Fib(x-2) +} + +type T int + +func (T) fib(x int) int { return Fib(x) } + +func _() { + print("hi") +} diff --git a/internal/renameio/renameio.go b/internal/renameio/renameio.go new file mode 100644 index 000000000..3f3f1708f --- /dev/null +++ b/internal/renameio/renameio.go @@ -0,0 +1,83 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package renameio writes files atomically by renaming temporary files. +package renameio + +import ( + "bytes" + "io" + "io/ioutil" + "os" + "path/filepath" + "runtime" + "strings" + "time" +) + +const patternSuffix = "*.tmp" + +// Pattern returns a glob pattern that matches the unrenamed temporary files +// created when writing to filename. +func Pattern(filename string) string { + return filepath.Join(filepath.Dir(filename), filepath.Base(filename)+patternSuffix) +} + +// WriteFile is like ioutil.WriteFile, but first writes data to an arbitrary +// file in the same directory as filename, then renames it atomically to the +// final name. +// +// That ensures that the final location, if it exists, is always a complete file. +func WriteFile(filename string, data []byte) (err error) { + return WriteToFile(filename, bytes.NewReader(data)) +} + +// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader +// instead of a slice. +func WriteToFile(filename string, data io.Reader) (err error) { + f, err := ioutil.TempFile(filepath.Dir(filename), filepath.Base(filename)+patternSuffix) + if err != nil { + return err + } + defer func() { + // Only call os.Remove on f.Name() if we failed to rename it: otherwise, + // some other process may have created a new file with the same name after + // that. + if err != nil { + f.Close() + os.Remove(f.Name()) + } + }() + + if _, err := io.Copy(f, data); err != nil { + return err + } + // Sync the file before renaming it: otherwise, after a crash the reader may + // observe a 0-length file instead of the actual contents. + // See https://golang.org/issue/22397#issuecomment-380831736. + if err := f.Sync(); err != nil { + return err + } + if err := f.Close(); err != nil { + return err + } + + var start time.Time + for { + err := os.Rename(f.Name(), filename) + if err == nil || runtime.GOOS != "windows" || !strings.HasSuffix(err.Error(), "Access is denied.") { + return err + } + + // Windows seems to occasionally trigger spurious "Access is denied" errors + // here (see golang.org/issue/31247). We're not sure why. It's probably + // worth a little extra latency to avoid propagating the spurious errors. + if start.IsZero() { + start = time.Now() + } else if time.Since(start) >= 500*time.Millisecond { + return err + } + time.Sleep(5 * time.Millisecond) + } +} diff --git a/internal/sharedcheck/lint.go b/internal/sharedcheck/lint.go index cf797fb1b..affee6607 100644 --- a/internal/sharedcheck/lint.go +++ b/internal/sharedcheck/lint.go @@ -4,13 +4,14 @@ import ( "go/ast" "go/types" - "honnef.co/go/tools/lint" + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/internal/passes/buildssa" . "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" ) -func CheckRangeStringRunes(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { fn := func(node ast.Node) bool { rng, ok := node.(*ast.RangeStmt) if !ok || !IsBlank(rng.Key) { @@ -59,10 +60,11 @@ func CheckRangeStringRunes(j *lint.Job) { return true } - j.Errorf(rng, "should range over string, not []rune(string)") + pass.Reportf(rng.Pos(), "should range over string, not []rune(string)") return true } Inspect(ssafn.Syntax(), fn) } + return nil, nil } diff --git a/lint/analysis.go b/lint/analysis.go new file mode 100644 index 000000000..6e914e02a --- /dev/null +++ b/lint/analysis.go @@ -0,0 +1,39 @@ +package lint + +import ( + "go/ast" + "go/token" + "reflect" + + "golang.org/x/tools/go/analysis" +) + +var IsGeneratedAnalyzer = &analysis.Analyzer{ + Name: "isgenerated", + Doc: "annotate file names that have been code generated", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[string]bool{} + for _, f := range pass.Files { + path := pass.Fset.PositionFor(f.Pos(), false).Filename + m[path] = isGenerated(path) + } + return m, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(map[string]bool{}), +} + +var TokenFileAnalyzer = &analysis.Analyzer{ + Name: "tokenfileanalyzer", + Doc: "creates a mapping of *token.File to *ast.File", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[*token.File]*ast.File{} + for _, af := range pass.Files { + tf := pass.Fset.File(af.Pos()) + m[tf] = af + } + return m, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(map[*token.File]*ast.File{}), +} diff --git a/lint/lint.go b/lint/lint.go index c4d9ff671..72f69ba83 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -4,36 +4,20 @@ package lint // import "honnef.co/go/tools/lint" import ( "bytes" "fmt" - "go/ast" + "go/scanner" "go/token" "go/types" - "io" - "os" "path/filepath" - "runtime" "sort" "strings" "sync" - "time" "unicode" - "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" "honnef.co/go/tools/config" - "honnef.co/go/tools/ssa" - "honnef.co/go/tools/ssa/ssautil" ) -type Job struct { - Pkg *Pkg - GoVersion int - - check Check - problems []Problem - - duration time.Duration -} - type Ignore interface { Match(p Problem) bool } @@ -42,17 +26,18 @@ type LineIgnore struct { File string Line int Checks []string - matched bool - pos token.Pos + Matched bool + Pos token.Pos } func (li *LineIgnore) Match(p Problem) bool { - if p.Position.Filename != li.File || p.Position.Line != li.Line { + pos := p.Pos + if pos.Filename != li.File || pos.Line != li.Line { return false } for _, c := range li.Checks { if m, _ := filepath.Match(c, p.Check); m { - li.matched = true + li.Matched = true return true } } @@ -61,7 +46,7 @@ func (li *LineIgnore) Match(p Problem) bool { func (li *LineIgnore) String() string { matched := "not matched" - if li.matched { + if li.Matched { matched = "matched" } return fmt.Sprintf("%s:%d %s (%s)", li.File, li.Line, strings.Join(li.Checks, ", "), matched) @@ -73,7 +58,7 @@ type FileIgnore struct { } func (fi *FileIgnore) Match(p Problem) bool { - if p.Position.Filename != fi.File { + if p.Pos.Filename != fi.File { return false } for _, c := range fi.Checks { @@ -84,43 +69,6 @@ func (fi *FileIgnore) Match(p Problem) bool { return false } -type GlobIgnore struct { - Pattern string - Checks []string -} - -func (gi *GlobIgnore) Match(p Problem) bool { - if gi.Pattern != "*" { - pkgpath := p.Package.Types.Path() - if strings.HasSuffix(pkgpath, "_test") { - pkgpath = pkgpath[:len(pkgpath)-len("_test")] - } - name := filepath.Join(pkgpath, filepath.Base(p.Position.Filename)) - if m, _ := filepath.Match(gi.Pattern, name); !m { - return false - } - } - for _, c := range gi.Checks { - if m, _ := filepath.Match(c, p.Check); m { - return true - } - } - return false -} - -type Program struct { - SSA *ssa.Program - InitialPackages []*Pkg - AllPackages []*packages.Package - AllFunctions []*ssa.Function -} - -func (prog *Program) Fset() *token.FileSet { - return prog.InitialPackages[0].Fset -} - -type Func func(*Job) - type Severity uint8 const ( @@ -131,367 +79,177 @@ const ( // Problem represents a problem in some source code. type Problem struct { - Position token.Position // position in source file - Text string // the prose that describes the problem + Pos token.Position + Message string Check string - Package *Pkg Severity Severity } func (p *Problem) String() string { - if p.Check == "" { - return p.Text - } - return fmt.Sprintf("%s (%s)", p.Text, p.Check) -} - -type Checker interface { - Name() string - Prefix() string - Init(*Program) - Checks() []Check -} - -type Check struct { - Fn Func - ID string - FilterGenerated bool - Doc string + return fmt.Sprintf("%s (%s)", p.Message, p.Check) } // A Linter lints Go source code. type Linter struct { - Checkers []Checker - Ignores []Ignore - GoVersion int - ReturnIgnored bool - Config config.Config - - MaxConcurrentJobs int - PrintStats bool - - automaticIgnores []Ignore -} - -func (l *Linter) ignore(p Problem) bool { - ignored := false - for _, ig := range l.automaticIgnores { - // We cannot short-circuit these, as we want to record, for - // each ignore, whether it matched or not. - if ig.Match(p) { - ignored = true - } - } - if ignored { - // no need to execute other ignores if we've already had a - // match. - return true - } - for _, ig := range l.Ignores { - // We can short-circuit here, as we aren't tracking any - // information. - if ig.Match(p) { - return true - } - } - - return false -} - -func (j *Job) File(node Positioner) *ast.File { - return j.Pkg.tokenFileMap[j.Pkg.Fset.File(node.Pos())] -} - -func parseDirective(s string) (cmd string, args []string) { - if !strings.HasPrefix(s, "//lint:") { - return "", nil - } - s = strings.TrimPrefix(s, "//lint:") - fields := strings.Split(s, " ") - return fields[0], fields[1:] -} - -type PerfStats struct { - PackageLoading time.Duration - SSABuild time.Duration - OtherInitWork time.Duration - CheckerInits map[string]time.Duration - Jobs []JobStat + Checkers []*analysis.Analyzer + CumulativeCheckers []CumulativeChecker + GoVersion int + Config config.Config } -type JobStat struct { - Job string - Duration time.Duration +type CumulativeChecker interface { + Analyzer() *analysis.Analyzer + Result() []types.Object + ProblemObject(*token.FileSet, types.Object) Problem } -func (stats *PerfStats) Print(w io.Writer) { - fmt.Fprintln(w, "Package loading:", stats.PackageLoading) - fmt.Fprintln(w, "SSA build:", stats.SSABuild) - fmt.Fprintln(w, "Other init work:", stats.OtherInitWork) - - fmt.Fprintln(w, "Checker inits:") - for checker, d := range stats.CheckerInits { - fmt.Fprintf(w, "\t%s: %s\n", checker, d) +func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error) { + var analyzers []*analysis.Analyzer + analyzers = append(analyzers, l.Checkers...) + for _, cum := range l.CumulativeCheckers { + analyzers = append(analyzers, cum.Analyzer()) } - fmt.Fprintln(w) - fmt.Fprintln(w, "Jobs:") - sort.Slice(stats.Jobs, func(i, j int) bool { - return stats.Jobs[i].Duration < stats.Jobs[j].Duration - }) - var total time.Duration - for _, job := range stats.Jobs { - fmt.Fprintf(w, "\t%s: %s\n", job.Job, job.Duration) - total += job.Duration + r, err := NewRunner() + if err != nil { + return nil, err } - fmt.Fprintf(w, "\tTotal: %s\n", total) -} -func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { - allPkgs := allPackages(initial) - t := time.Now() - ssaprog, _ := ssautil.Packages(allPkgs, ssa.GlobalDebug) - ssaprog.Build() - if stats != nil { - stats.SSABuild = time.Since(t) + pkgs, err := r.Run(cfg, patterns, analyzers) + if err != nil { + return nil, err } - runtime.GC() - - t = time.Now() - pkgMap := map[*ssa.Package]*Pkg{} - var pkgs []*Pkg - for _, pkg := range initial { - ssapkg := ssaprog.Package(pkg.Types) - var cfg config.Config - if len(pkg.GoFiles) != 0 { - path := pkg.GoFiles[0] - dir := filepath.Dir(path) - var err error - // OPT(dh): we're rebuilding the entire config tree for - // each package. for example, if we check a/b/c and - // a/b/c/d, we'll process a, a/b, a/b/c, a, a/b, a/b/c, - // a/b/c/d – we should cache configs per package and only - // load the new levels. - cfg, err = config.Load(dir) - if err != nil { - // FIXME(dh): we couldn't load the config, what are we - // supposed to do? probably tell the user somehow - } - cfg = cfg.Merge(l.Config) - } - - pkg := &Pkg{ - SSA: ssapkg, - Package: pkg, - Config: cfg, - Generated: map[string]bool{}, - tokenFileMap: map[*token.File]*ast.File{}, - } - pkg.Inspector = inspector.New(pkg.Syntax) - for _, f := range pkg.Syntax { - tf := pkg.Fset.File(f.Pos()) - pkg.tokenFileMap[tf] = f - path := DisplayPosition(pkg.Fset, f.Pos()).Filename - pkg.Generated[path] = isGenerated(path) - } - pkgMap[ssapkg] = pkg - pkgs = append(pkgs, pkg) - } - - prog := &Program{ - SSA: ssaprog, - InitialPackages: pkgs, - AllPackages: allPkgs, - } - - for fn := range ssautil.AllFunctions(ssaprog) { - prog.AllFunctions = append(prog.AllFunctions, fn) - if fn.Pkg == nil { - continue - } - if pkg, ok := pkgMap[fn.Pkg]; ok { - pkg.InitialFunctions = append(pkg.InitialFunctions, fn) - } - } + tpkgToPkg := map[*types.Package]*Package{} + for _, pkg := range pkgs { + tpkgToPkg[pkg.Types] = pkg - var out []Problem - l.automaticIgnores = nil - for _, pkg := range initial { - for _, f := range pkg.Syntax { - found := false - commentLoop: - for _, cg := range f.Comments { - for _, c := range cg.List { - if strings.Contains(c.Text, "//lint:") { - found = true - break commentLoop - } + for _, err := range pkg.errs { + switch err := err.(type) { + case types.Error: + p := Problem{ + Pos: err.Fset.PositionFor(err.Pos, false), + Message: err.Msg, + Severity: Error, + Check: "compile", } - } - if !found { - continue - } - cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) - for node, cgs := range cm { - for _, cg := range cgs { - for _, c := range cg.List { - if !strings.HasPrefix(c.Text, "//lint:") { - continue - } - cmd, args := parseDirective(c.Text) - switch cmd { - case "ignore", "file-ignore": - if len(args) < 2 { - // FIXME(dh): this causes duplicated warnings when using megacheck - p := Problem{ - Position: DisplayPosition(prog.Fset(), c.Pos()), - Text: "malformed linter directive; missing the required reason field?", - Check: "", - Package: nil, - } - out = append(out, p) - continue - } - default: - // unknown directive, ignore - continue - } - checks := strings.Split(args[0], ",") - pos := DisplayPosition(prog.Fset(), node.Pos()) - var ig Ignore - switch cmd { - case "ignore": - ig = &LineIgnore{ - File: pos.Filename, - Line: pos.Line, - Checks: checks, - pos: c.Pos(), - } - case "file-ignore": - ig = &FileIgnore{ - File: pos.Filename, - Checks: checks, - } - } - l.automaticIgnores = append(l.automaticIgnores, ig) + pkg.problems = append(pkg.problems, p) + case packages.Error: + p := Problem{ + Pos: parsePos(err.Pos), + Message: err.Msg, + Severity: Error, + Check: "compile", + } + pkg.problems = append(pkg.problems, p) + case scanner.ErrorList: + for _, err := range err { + p := Problem{ + Pos: err.Pos, + Message: err.Msg, + Severity: Error, + Check: "compile", } + pkg.problems = append(pkg.problems, p) + } + case error: + p := Problem{ + Pos: token.Position{}, + Message: err.Error(), + Severity: Error, + Check: "compile", } + pkg.problems = append(pkg.problems, p) } } } - if stats != nil { - stats.OtherInitWork = time.Since(t) - } - - for _, checker := range l.Checkers { - t := time.Now() - checker.Init(prog) - if stats != nil { - stats.CheckerInits[checker.Name()] = time.Since(t) + var problems []Problem + for _, cum := range l.CumulativeCheckers { + for _, res := range cum.Result() { + pkg := tpkgToPkg[res.Pkg()] + allowedChecks := FilterChecks(analyzers, pkg.cfg.Merge(l.Config).Checks) + if allowedChecks[cum.Analyzer().Name] { + pos := DisplayPosition(pkg.Fset, res.Pos()) + if pkg.gen[pos.Filename] { + continue + } + p := cum.ProblemObject(pkg.Fset, res) + problems = append(problems, p) + } } } - var jobs []*Job - var allChecks []string - - var wg sync.WaitGroup - for _, checker := range l.Checkers { - for _, check := range checker.Checks() { - allChecks = append(allChecks, check.ID) - if check.Fn == nil { - continue + for _, pkg := range pkgs { + for _, ig := range pkg.ignores { + for i := range pkg.problems { + p := &pkg.problems[i] + if ig.Match(*p) { + p.Severity = Ignored + } } - for _, pkg := range pkgs { - j := &Job{ - Pkg: pkg, - check: check, - GoVersion: l.GoVersion, + for i := range problems { + p := &problems[i] + if ig.Match(*p) { + p.Severity = Ignored } - jobs = append(jobs, j) - wg.Add(1) - go func(check Check, j *Job) { - t := time.Now() - check.Fn(j) - j.duration = time.Since(t) - wg.Done() - }(check, j) } } - } - wg.Wait() - - for _, j := range jobs { - if stats != nil { - stats.Jobs = append(stats.Jobs, JobStat{j.check.ID, j.duration}) - } - for _, p := range j.problems { - if p.Package == nil { - panic(fmt.Sprintf("internal error: problem at position %s has nil package", p.Position)) + if pkg.cfg == nil { + // The package failed to load, otherwise we would have a + // valid config. Pass through all errors. + problems = append(problems, pkg.problems...) + } else { + for _, p := range pkg.problems { + allowedChecks := FilterChecks(analyzers, pkg.cfg.Merge(l.Config).Checks) + allowedChecks["compile"] = true + if allowedChecks[p.Check] { + problems = append(problems, p) + } } - allowedChecks := FilterChecks(allChecks, p.Package.Config.Checks) + } - if l.ignore(p) { - p.Severity = Ignored + for _, ig := range pkg.ignores { + ig, ok := ig.(*LineIgnore) + if !ok { + continue } - // TODO(dh): support globs in check white/blacklist - // OPT(dh): this approach doesn't actually disable checks, - // it just discards their results. For the moment, that's - // fine. None of our checks are super expensive. In the - // future, we may want to provide opt-in expensive - // analysis, which shouldn't run at all. It may be easiest - // to implement this in the individual checks. - if (l.ReturnIgnored || p.Severity != Ignored) && allowedChecks[p.Check] { - out = append(out, p) + if ig.Matched { + continue } - } - } - for _, ig := range l.automaticIgnores { - ig, ok := ig.(*LineIgnore) - if !ok { - continue - } - if ig.matched { - continue - } - - couldveMatched := false - for _, pkg := range pkgs { - for _, f := range pkg.tokenFileMap { - if prog.Fset().Position(f.Pos()).Filename != ig.File { + couldveMatched := false + allowedChecks := FilterChecks(analyzers, pkg.cfg.Merge(l.Config).Checks) + for _, c := range ig.Checks { + if !allowedChecks[c] { continue } - allowedChecks := FilterChecks(allChecks, pkg.Config.Checks) - for _, c := range ig.Checks { - if !allowedChecks[c] { - continue - } - couldveMatched = true - break - } + couldveMatched = true break } - } - if !couldveMatched { - // The ignored checks were disabled for the containing package. - // Don't flag the ignore for not having matched. - continue - } - p := Problem{ - Position: DisplayPosition(prog.Fset(), ig.pos), - Text: "this linter directive didn't match anything; should it be removed?", - Check: "", - Package: nil, + if !couldveMatched { + // The ignored checks were disabled for the containing package. + // Don't flag the ignore for not having matched. + continue + } + p := Problem{ + Pos: DisplayPosition(pkg.Fset, ig.Pos), + Message: "this linter directive didn't match anything; should it be removed?", + Check: "", + } + problems = append(problems, p) } - out = append(out, p) } - sort.Slice(out, func(i int, j int) bool { - pi, pj := out[i].Position, out[j].Position + if len(problems) == 0 { + return nil, nil + } + + sort.Slice(problems, func(i, j int) bool { + pi := problems[i].Pos + pj := problems[j].Pos if pi.Filename != pj.Filename { return pi.Filename < pj.Filename @@ -503,32 +261,22 @@ func (l *Linter) Lint(initial []*packages.Package, stats *PerfStats) []Problem { return pi.Column < pj.Column } - return out[i].Text < out[j].Text + return problems[i].Message < problems[j].Message }) - if l.PrintStats && stats != nil { - stats.Print(os.Stderr) - } - - if len(out) < 2 { - return out - } - - uniq := make([]Problem, 0, len(out)) - uniq = append(uniq, out[0]) - prev := out[0] - for _, p := range out[1:] { - if prev.Position == p.Position && prev.Text == p.Text { - continue + var out []Problem + out = append(out, problems[0]) + for i, p := range problems[1:] { + // We may encounter duplicate problems because one file + // can be part of many packages. + if problems[i] != p { + out = append(out, p) } - prev = p - uniq = append(uniq, p) } - - return uniq + return out, nil } -func FilterChecks(allChecks []string, checks []string) map[string]bool { +func FilterChecks(allChecks []*analysis.Analyzer, checks []string) map[string]bool { // OPT(dh): this entire computation could be cached per package allowedChecks := map[string]bool{} @@ -541,7 +289,7 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { if check == "*" || check == "all" { // Match all for _, c := range allChecks { - allowedChecks[c] = b + allowedChecks[c.Name] = b } } else if strings.HasSuffix(check, "*") { // Glob @@ -549,17 +297,17 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 for _, c := range allChecks { - idx := strings.IndexFunc(c, func(r rune) bool { return unicode.IsNumber(r) }) + idx := strings.IndexFunc(c.Name, func(r rune) bool { return unicode.IsNumber(r) }) if isCat { // Glob is S*, which should match S1000 but not SA1000 - cat := c[:idx] + cat := c.Name[:idx] if prefix == cat { - allowedChecks[c] = b + allowedChecks[c.Name] = b } } else { // Glob is S1* - if strings.HasPrefix(c, prefix) { - allowedChecks[c] = b + if strings.HasPrefix(c.Name, prefix) { + allowedChecks[c.Name] = b } } } @@ -571,19 +319,6 @@ func FilterChecks(allChecks []string, checks []string) map[string]bool { return allowedChecks } -// Pkg represents a package being linted. -type Pkg struct { - SSA *ssa.Package - InitialFunctions []*ssa.Function - *packages.Package - Config config.Config - Inspector *inspector.Inspector - // TODO(dh): this map should probably map from *ast.File, not string - Generated map[string]bool - - tokenFileMap map[*token.File]*ast.File -} - type Positioner interface { Pos() token.Pos } @@ -602,34 +337,6 @@ func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { return pos } -func (j *Job) Errorf(n Positioner, format string, args ...interface{}) *Problem { - pos := DisplayPosition(j.Pkg.Fset, n.Pos()) - if j.Pkg.Generated[pos.Filename] && j.check.FilterGenerated { - return nil - } - problem := Problem{ - Position: pos, - Text: fmt.Sprintf(format, args...), - Check: j.check.ID, - Package: j.Pkg, - } - j.problems = append(j.problems, problem) - return &j.problems[len(j.problems)-1] -} - -func allPackages(pkgs []*packages.Package) []*packages.Package { - var out []*packages.Package - packages.Visit( - pkgs, - func(pkg *packages.Package) bool { - out = append(out, pkg) - return true - }, - nil, - ) - return out -} - var bufferPool = &sync.Pool{ New: func() interface{} { buf := bytes.NewBuffer(nil) @@ -670,10 +377,24 @@ func writePackage(buf *bytes.Buffer, pkg *types.Package) { if pkg == nil { return } - var s string - s = pkg.Path() + s := pkg.Path() if s != "" { buf.WriteString(s) buf.WriteByte('.') } } + +type StringSliceVar []string + +func (v StringSliceVar) String() string { + return strings.Join(v, ",") +} + +func (v *StringSliceVar) Set(s string) error { + *v = StringSliceVar(strings.Split(s, ",")) + return nil +} + +func (v *StringSliceVar) Get() interface{} { + return []string(*v) +} diff --git a/lint/lint_test.go b/lint/lint_test.go deleted file mode 100644 index 437e75015..000000000 --- a/lint/lint_test.go +++ /dev/null @@ -1,34 +0,0 @@ -package lint_test - -import ( - "testing" - - . "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/testutil" -) - -type testChecker struct{} - -func (testChecker) Name() string { return "stylecheck" } -func (testChecker) Prefix() string { return "TEST" } -func (testChecker) Init(prog *Program) {} - -func (testChecker) Checks() []Check { - return []Check{ - {ID: "TEST1000", FilterGenerated: false, Fn: testLint}, - } -} - -func testLint(j *Job) { - // Flag all functions - for _, fn := range j.Pkg.InitialFunctions { - if fn.Synthetic == "" { - j.Errorf(fn, "This is a test problem") - } - } -} - -func TestAll(t *testing.T) { - c := testChecker{} - testutil.TestAll(t, c, "") -} diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index ab6800ab9..31cf2eeea 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -4,6 +4,7 @@ package lintdsl import ( "bytes" + "flag" "fmt" "go/ast" "go/constant" @@ -12,6 +13,7 @@ import ( "go/types" "strings" + "golang.org/x/tools/go/analysis" "honnef.co/go/tools/lint" "honnef.co/go/tools/ssa" ) @@ -71,16 +73,6 @@ func IsPointerLike(T types.Type) bool { return false } -func IsGenerated(f *ast.File) bool { - comments := f.Comments - if len(comments) > 0 { - comment := comments[0].Text() - return strings.Contains(comment, "Code generated by") || - strings.Contains(comment, "DO NOT EDIT") - } - return false -} - func IsIdent(expr ast.Expr, ident string) bool { id, ok := expr.(*ast.Ident) return ok && id.Name == ident @@ -103,26 +95,26 @@ func IsZero(expr ast.Expr) bool { return IsIntLiteral(expr, "0") } -func IsOfType(j *lint.Job, expr ast.Expr, name string) bool { - return IsType(j.Pkg.TypesInfo.TypeOf(expr), name) +func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool { + return IsType(pass.TypesInfo.TypeOf(expr), name) } -func IsInTest(j *lint.Job, node lint.Positioner) bool { +func IsInTest(pass *analysis.Pass, node lint.Positioner) bool { // FIXME(dh): this doesn't work for global variables with // initializers - f := j.Pkg.Fset.File(node.Pos()) + f := pass.Fset.File(node.Pos()) return f != nil && strings.HasSuffix(f.Name(), "_test.go") } -func IsInMain(j *lint.Job, node lint.Positioner) bool { +func IsInMain(pass *analysis.Pass, node lint.Positioner) bool { if node, ok := node.(packager); ok { return node.Package().Pkg.Name() == "main" } - return j.Pkg.Types.Name() == "main" + return pass.Pkg.Name() == "main" } -func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { - info := j.Pkg.TypesInfo +func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string { + info := pass.TypesInfo sel := info.Selections[expr] if sel == nil { if x, ok := expr.X.(*ast.Ident); ok { @@ -138,16 +130,16 @@ func SelectorName(j *lint.Job, expr *ast.SelectorExpr) string { return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) } -func IsNil(j *lint.Job, expr ast.Expr) bool { - return j.Pkg.TypesInfo.Types[expr].IsNil() +func IsNil(pass *analysis.Pass, expr ast.Expr) bool { + return pass.TypesInfo.Types[expr].IsNil() } -func BoolConst(j *lint.Job, expr ast.Expr) bool { - val := j.Pkg.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() +func BoolConst(pass *analysis.Pass, expr ast.Expr) bool { + val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() return constant.BoolVal(val) } -func IsBoolConst(j *lint.Job, expr ast.Expr) bool { +func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool { // We explicitly don't support typed bools because more often than // not, custom bool types are used as binary enums and the // explicit comparison is desired. @@ -156,7 +148,7 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool { if !ok { return false } - obj := j.Pkg.TypesInfo.ObjectOf(ident) + obj := pass.TypesInfo.ObjectOf(ident) c, ok := obj.(*types.Const) if !ok { return false @@ -171,8 +163,8 @@ func IsBoolConst(j *lint.Job, expr ast.Expr) bool { return true } -func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { - tv := j.Pkg.TypesInfo.Types[expr] +func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) { + tv := pass.TypesInfo.Types[expr] if tv.Value == nil { return 0, false } @@ -182,8 +174,8 @@ func ExprToInt(j *lint.Job, expr ast.Expr) (int64, bool) { return constant.Int64Val(tv.Value) } -func ExprToString(j *lint.Job, expr ast.Expr) (string, bool) { - val := j.Pkg.TypesInfo.Types[expr].Value +func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) { + val := pass.TypesInfo.Types[expr].Value if val == nil { return "", false } @@ -212,20 +204,21 @@ func DereferenceR(T types.Type) types.Type { return T } -func IsGoVersion(j *lint.Job, minor int) bool { - return j.GoVersion >= minor +func IsGoVersion(pass *analysis.Pass, minor int) bool { + version := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter).Get().(int) + return version >= minor } -func CallNameAST(j *lint.Job, call *ast.CallExpr) string { +func CallNameAST(pass *analysis.Pass, call *ast.CallExpr) string { switch fun := call.Fun.(type) { case *ast.SelectorExpr: - fn, ok := j.Pkg.TypesInfo.ObjectOf(fun.Sel).(*types.Func) + fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func) if !ok { return "" } return lint.FuncName(fn) case *ast.Ident: - obj := j.Pkg.TypesInfo.ObjectOf(fun) + obj := pass.TypesInfo.ObjectOf(fun) switch obj := obj.(type) { case *types.Func: return lint.FuncName(obj) @@ -239,35 +232,35 @@ func CallNameAST(j *lint.Job, call *ast.CallExpr) string { } } -func IsCallToAST(j *lint.Job, node ast.Node, name string) bool { +func IsCallToAST(pass *analysis.Pass, node ast.Node, name string) bool { call, ok := node.(*ast.CallExpr) if !ok { return false } - return CallNameAST(j, call) == name + return CallNameAST(pass, call) == name } -func IsCallToAnyAST(j *lint.Job, node ast.Node, names ...string) bool { +func IsCallToAnyAST(pass *analysis.Pass, node ast.Node, names ...string) bool { for _, name := range names { - if IsCallToAST(j, node, name) { + if IsCallToAST(pass, node, name) { return true } } return false } -func Render(j *lint.Job, x interface{}) string { +func Render(pass *analysis.Pass, x interface{}) string { var buf bytes.Buffer - if err := printer.Fprint(&buf, j.Pkg.Fset, x); err != nil { + if err := printer.Fprint(&buf, pass.Fset, x); err != nil { panic(err) } return buf.String() } -func RenderArgs(j *lint.Job, args []ast.Expr) string { +func RenderArgs(pass *analysis.Pass, args []ast.Expr) string { var ss []string for _, arg := range args { - ss = append(ss, Render(j, arg)) + ss = append(ss, Render(pass, arg)) } return strings.Join(ss, ", ") } @@ -359,3 +352,26 @@ func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Fiel } return out } + +func File(pass *analysis.Pass, node lint.Positioner) *ast.File { + pass.Fset.PositionFor(node.Pos(), true) + m := pass.ResultOf[lint.TokenFileAnalyzer].(map[*token.File]*ast.File) + return m[pass.Fset.File(node.Pos())] +} + +// IsGenerated reports whether pos is in a generated file, It ignores +// //line directives. +func IsGenerated(pass *analysis.Pass, pos token.Pos) bool { + file := pass.Fset.PositionFor(pos, false).Filename + m := pass.ResultOf[lint.IsGeneratedAnalyzer].(map[string]bool) + return m[file] +} + +func ReportfFG(pass *analysis.Pass, pos token.Pos, f string, args ...interface{}) { + file := lint.DisplayPosition(pass.Fset, pos).Filename + m := pass.ResultOf[lint.IsGeneratedAnalyzer].(map[string]bool) + if m[file] { + return + } + pass.Reportf(pos, f, args...) +} diff --git a/lint/lintutil/format/format.go b/lint/lintutil/format/format.go index 23aa132de..e4c3fd315 100644 --- a/lint/lintutil/format/format.go +++ b/lint/lintutil/format/format.go @@ -51,7 +51,7 @@ type Text struct { } func (o Text) Format(p lint.Problem) { - fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Position), p.String()) + fmt.Fprintf(o.W, "%v: %s\n", relativePositionString(p.Pos), p.String()) } type JSON struct { @@ -85,11 +85,11 @@ func (o JSON) Format(p lint.Problem) { Code: p.Check, Severity: severity(p.Severity), Location: location{ - File: p.Position.Filename, - Line: p.Position.Line, - Column: p.Position.Column, + File: p.Pos.Filename, + Line: p.Pos.Line, + Column: p.Pos.Column, }, - Message: p.Text, + Message: p.Message, } _ = json.NewEncoder(o.W).Encode(jp) } @@ -102,20 +102,21 @@ type Stylish struct { } func (o *Stylish) Format(p lint.Problem) { - if p.Position.Filename == "" { - p.Position.Filename = "-" + pos := p.Pos + if pos.Filename == "" { + pos.Filename = "-" } - if p.Position.Filename != o.prevFile { + if pos.Filename != o.prevFile { if o.prevFile != "" { o.tw.Flush() fmt.Fprintln(o.W) } - fmt.Fprintln(o.W, p.Position.Filename) - o.prevFile = p.Position.Filename + fmt.Fprintln(o.W, pos.Filename) + o.prevFile = pos.Filename o.tw = tabwriter.NewWriter(o.W, 0, 4, 2, ' ', 0) } - fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", p.Position.Line, p.Position.Column, p.Check, p.Text) + fmt.Fprintf(o.tw, " (%d, %d)\t%s\t%s\n", pos.Line, pos.Column, p.Check, p.Message) } func (o *Stylish) Stats(total, errors, warnings int) { diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 701711831..ab8942589 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -8,65 +8,49 @@ package lintutil // import "honnef.co/go/tools/lint/lintutil" import ( + "crypto/sha256" "errors" "flag" "fmt" "go/build" "go/token" + "io" "log" "os" "regexp" "runtime" - "runtime/debug" "runtime/pprof" "strconv" "strings" - "time" "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/cache" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil/format" "honnef.co/go/tools/version" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" ) -func usage(name string, flags *flag.FlagSet) func() { - return func() { - fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name) - fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name) - fmt.Fprintf(os.Stderr, "Flags:\n") - flags.PrintDefaults() - } -} - -func parseIgnore(s string) ([]lint.Ignore, error) { - var out []lint.Ignore - if len(s) == 0 { - return nil, nil - } - for _, part := range strings.Fields(s) { - p := strings.Split(part, ":") - if len(p) != 2 { - return nil, errors.New("malformed ignore string") - } - path := p[0] - checks := strings.Split(p[1], ",") - out = append(out, &lint.GlobIgnore{Pattern: path, Checks: checks}) +func NewVersionFlag() flag.Getter { + tags := build.Default.ReleaseTags + v := tags[len(tags)-1][2:] + version := new(VersionFlag) + if err := version.Set(v); err != nil { + panic(fmt.Sprintf("internal error: %s", err)) } - return out, nil + return version } -type versionFlag int +type VersionFlag int -func (v *versionFlag) String() string { +func (v *VersionFlag) String() string { return fmt.Sprintf("1.%d", *v) + } -func (v *versionFlag) Set(s string) error { +func (v *VersionFlag) Set(s string) error { if len(s) < 3 { return errors.New("invalid Go version") } @@ -77,14 +61,26 @@ func (v *versionFlag) Set(s string) error { return errors.New("invalid Go version") } i, err := strconv.Atoi(s[2:]) - *v = versionFlag(i) + *v = VersionFlag(i) return err } -func (v *versionFlag) Get() interface{} { +func (v *VersionFlag) Get() interface{} { return int(*v) } +func usage(name string, flags *flag.FlagSet) func() { + return func() { + fmt.Fprintf(os.Stderr, "Usage of %s:\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] # runs on package in current directory\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] packages\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] directory\n", name) + fmt.Fprintf(os.Stderr, "\t%s [flags] files... # must be a single package\n", name) + fmt.Fprintf(os.Stderr, "Flags:\n") + flags.PrintDefaults() + } +} + type list []string func (list *list) String() string { @@ -112,8 +108,6 @@ func FlagSet(name string) *flag.FlagSet { flags.String("f", "text", "Output `format` (valid choices are 'stylish', 'text' and 'json')") flags.String("explain", "", "Print description of `check`") - flags.Int("debug.max-concurrent-jobs", 0, "Number of jobs to run concurrently") - flags.Bool("debug.print-stats", false, "Print debug statistics") flags.String("debug.cpuprofile", "", "Write CPU profile to `file`") flags.String("debug.memprofile", "", "Write memory profile to `file`") @@ -124,7 +118,7 @@ func FlagSet(name string) *flag.FlagSet { tags := build.Default.ReleaseTags v := tags[len(tags)-1][2:] - version := new(versionFlag) + version := new(VersionFlag) if err := version.Set(v); err != nil { panic(fmt.Sprintf("internal error: %s", err)) } @@ -133,22 +127,16 @@ func FlagSet(name string) *flag.FlagSet { return flags } -func findCheck(cs []lint.Checker, check string) (lint.Check, bool) { +func findCheck(cs []*analysis.Analyzer, check string) (*analysis.Analyzer, bool) { for _, c := range cs { - for _, cc := range c.Checks() { - if cc.ID == check { - return cc, true - } + if c.Name == check { + return c, true } } - return lint.Check{}, false + return nil, false } -func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { - if _, ok := os.LookupEnv("GOGC"); !ok { - debug.SetGCPercent(50) - } - +func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *flag.FlagSet) { tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) @@ -158,8 +146,6 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { showIgnored := fs.Lookup("show-ignored").Value.(flag.Getter).Get().(bool) explain := fs.Lookup("explain").Value.(flag.Getter).Get().(string) - maxConcurrentJobs := fs.Lookup("debug.max-concurrent-jobs").Value.(flag.Getter).Get().(int) - printStats := fs.Lookup("debug.print-stats").Value.(flag.Getter).Get().(bool) cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string) memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) @@ -194,7 +180,12 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { } if explain != "" { - check, ok := findCheck(cs, explain) + var haystack []*analysis.Analyzer + haystack = append(haystack, cs...) + for _, cum := range cums { + haystack = append(haystack, cum.Analyzer()) + } + check, ok := findCheck(haystack, explain) if !ok { fmt.Fprintln(os.Stderr, "Couldn't find check", explain) exit(1) @@ -207,16 +198,12 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { exit(0) } - ps, err := Lint(cs, fs.Args(), &Options{ - Tags: strings.Fields(tags), - LintTests: tests, - Ignores: ignore, - GoVersion: goVersion, - ReturnIgnored: showIgnored, - Config: cfg, - - MaxConcurrentJobs: maxConcurrentJobs, - PrintStats: printStats, + ps, err := Lint(cs, cums, fs.Args(), &Options{ + Tags: strings.Fields(tags), + LintTests: tests, + Ignores: ignore, + GoVersion: goVersion, + Config: cfg, }) if err != nil { fmt.Fprintln(os.Stderr, err) @@ -243,15 +230,19 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { ) fail := *fs.Lookup("fail").Value.(*list) - var allChecks []string - for _, p := range ps { - allChecks = append(allChecks, p.Check) + analyzers := make([]*analysis.Analyzer, len(cs), len(cs)+len(cums)) + copy(analyzers, cs) + for _, cum := range cums { + analyzers = append(analyzers, cum.Analyzer()) } - - shouldExit := lint.FilterChecks(allChecks, fail) + shouldExit := lint.FilterChecks(analyzers, fail) + shouldExit["compile"] = true total = len(ps) for _, p := range ps { + if p.Severity == lint.Ignored && !showIgnored { + continue + } if shouldExit[p.Check] { errors++ } else { @@ -271,75 +262,54 @@ func ProcessFlagSet(cs []lint.Checker, fs *flag.FlagSet) { type Options struct { Config config.Config - Tags []string - LintTests bool - Ignores string - GoVersion int - ReturnIgnored bool - - MaxConcurrentJobs int - PrintStats bool + Tags []string + LintTests bool + Ignores string + GoVersion int } -func Lint(cs []lint.Checker, paths []string, opt *Options) ([]lint.Problem, error) { - stats := lint.PerfStats{ - CheckerInits: map[string]time.Duration{}, +func computeSalt() ([]byte, error) { + if version.Version != "devel" { + return []byte(version.Version), nil } - - if opt == nil { - opt = &Options{} + p, err := os.Executable() + if err != nil { + return nil, err } - ignores, err := parseIgnore(opt.Ignores) + f, err := os.Open(p) if err != nil { return nil, err } - - conf := &packages.Config{ - Mode: packages.LoadAllSyntax, - Tests: opt.LintTests, - BuildFlags: []string{ - "-tags=" + strings.Join(opt.Tags, " "), - }, + defer f.Close() + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return nil, err } + return h.Sum(nil), nil +} - t := time.Now() - if len(paths) == 0 { - paths = []string{"."} - } - pkgs, err := packages.Load(conf, paths...) +func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string, opt *Options) ([]lint.Problem, error) { + salt, err := computeSalt() if err != nil { - return nil, err - } - stats.PackageLoading = time.Since(t) - runtime.GC() - - var problems []lint.Problem - workingPkgs := make([]*packages.Package, 0, len(pkgs)) - for _, pkg := range pkgs { - if pkg.IllTyped { - problems = append(problems, compileErrors(pkg)...) - } else { - workingPkgs = append(workingPkgs, pkg) - } + return nil, fmt.Errorf("could not compute salt for cache: %s", err) } + cache.SetSalt(salt) - if len(workingPkgs) == 0 { - return problems, nil + if opt == nil { + opt = &Options{} } l := &lint.Linter{ - Checkers: cs, - Ignores: ignores, - GoVersion: opt.GoVersion, - ReturnIgnored: opt.ReturnIgnored, - Config: opt.Config, - - MaxConcurrentJobs: opt.MaxConcurrentJobs, - PrintStats: opt.PrintStats, + Checkers: cs, + CumulativeCheckers: cums, + GoVersion: opt.GoVersion, + Config: opt.Config, } - problems = append(problems, l.Lint(workingPkgs, &stats)...) - - return problems, nil + cfg := &packages.Config{} + if opt.LintTests { + cfg.Tests = true + } + return l.Lint(cfg, paths) } var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`) @@ -361,34 +331,3 @@ func parsePos(pos string) token.Position { Column: col, } } - -func compileErrors(pkg *packages.Package) []lint.Problem { - if !pkg.IllTyped { - return nil - } - if len(pkg.Errors) == 0 { - // transitively ill-typed - var ps []lint.Problem - for _, imp := range pkg.Imports { - ps = append(ps, compileErrors(imp)...) - } - return ps - } - var ps []lint.Problem - for _, err := range pkg.Errors { - p := lint.Problem{ - Position: parsePos(err.Pos), - Text: err.Msg, - Check: "compile", - } - ps = append(ps, p) - } - return ps -} - -func ProcessArgs(name string, cs []lint.Checker, args []string) { - flags := FlagSet(name) - flags.Parse(args) - - ProcessFlagSet(cs, flags) -} diff --git a/lint/runner.go b/lint/runner.go new file mode 100644 index 000000000..332c805e9 --- /dev/null +++ b/lint/runner.go @@ -0,0 +1,659 @@ +package lint + +import ( + "bytes" + "encoding/gob" + "encoding/hex" + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" + "regexp" + "runtime" + "sort" + "strconv" + "strings" + "sync" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/objectpath" + "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/cache" + "honnef.co/go/tools/loader" +) + +type Package struct { + *packages.Package + Imports map[string]*Package + initial bool + fromSource bool + hash string + + resultsMu sync.Mutex + results map[*analysis.Analyzer]*result + + cfg *config.Config + gen map[string]bool + problems []Problem + ignores []Ignore + errs []error +} + +type result struct { + v interface{} + err error + ready chan struct{} +} + +type buildResult struct { + done chan struct{} +} + +type Runner struct { + ld loader.Loader + cache *cache.Cache + + factsMu sync.RWMutex + facts map[types.Object][]analysis.Fact + pkgFacts map[*types.Package][]analysis.Fact + + builtMu sync.Mutex + built map[*Package]*buildResult +} + +func (r *Runner) importObjectFact(obj types.Object, fact analysis.Fact) bool { + r.factsMu.RLock() + defer r.factsMu.RUnlock() + // OPT(dh): consider looking for the fact in the analysisAction + // first, to avoid lock contention + for _, f := range r.facts[obj] { + if reflect.TypeOf(f) == reflect.TypeOf(fact) { + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + return true + } + } + return false +} + +func (r *Runner) importPackageFact(pkg *types.Package, fact analysis.Fact) bool { + r.factsMu.RLock() + defer r.factsMu.RUnlock() + for _, f := range r.pkgFacts[pkg] { + if reflect.TypeOf(f) == reflect.TypeOf(fact) { + reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) + return true + } + } + return false +} + +func (r *Runner) exportObjectFact(ac *analysisAction, obj types.Object, fact analysis.Fact) { + r.factsMu.Lock() + r.facts[obj] = append(r.facts[obj], fact) + r.factsMu.Unlock() + path, err := objectpath.For(obj) + if err == nil { + ac.newFacts = append(ac.newFacts, Fact{string(path), fact}) + } +} + +func (r *Runner) exportPackageFact(ac *analysisAction, fact analysis.Fact) { + r.factsMu.Lock() + r.pkgFacts[ac.pkg.Types] = append(r.pkgFacts[ac.pkg.Types], fact) + r.factsMu.Unlock() + ac.newFacts = append(ac.newFacts, Fact{"", fact}) +} + +type Fact struct { + Path string + Fact analysis.Fact +} + +type analysisAction struct { + analyzer *analysis.Analyzer + pkg *Package + newFacts []Fact + problems []Problem +} + +func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) { + p := Problem{ + Pos: DisplayPosition(pass.Fset, d.Pos), + Message: d.Message, + Check: pass.Analyzer.Name, + } + ac.problems = append(ac.problems, p) +} + +func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) { + ac.pkg.resultsMu.Lock() + res := ac.pkg.results[ac.analyzer] + if res != nil { + ac.pkg.resultsMu.Unlock() + <-res.ready + return res.v, res.err + } else { + res = &result{ + ready: make(chan struct{}), + } + ac.pkg.results[ac.analyzer] = res + ac.pkg.resultsMu.Unlock() + + defer func() { + res.v = ret + res.err = err + close(res.ready) + }() + + // Package may be a dependency or a package the user requested + // Facts for a dependency may be cached or not + // Diagnostics for a user package may be cached or not (not yet) + // When we have to analyze a package, we have to analyze it with all dependencies. + + pass := new(analysis.Pass) + *pass = analysis.Pass{ + Analyzer: ac.analyzer, + Fset: ac.pkg.Fset, + Files: ac.pkg.Syntax, + // type information may be nil or may be populated. if it is + // nil, it will get populated later. + Pkg: ac.pkg.Types, + TypesInfo: ac.pkg.TypesInfo, + TypesSizes: ac.pkg.TypesSizes, + ResultOf: map[*analysis.Analyzer]interface{}{}, + ImportObjectFact: r.importObjectFact, + ImportPackageFact: r.importPackageFact, + ExportObjectFact: func(obj types.Object, fact analysis.Fact) { + r.exportObjectFact(ac, obj, fact) + }, + ExportPackageFact: func(fact analysis.Fact) { + r.exportPackageFact(ac, fact) + }, + Report: func(d analysis.Diagnostic) { + ac.report(pass, d) + }, + } + + if !ac.pkg.initial { + // Don't report problems in dependencies + pass.Report = func(analysis.Diagnostic) {} + } + return r.runAnalysisUser(pass, ac) + } +} + +func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bool) { + if len(a.FactTypes) == 0 { + return nil, true + } + + var facts []Fact + // Look in the cache for facts + aID, err := passActionID(pkg, a) + if err != nil { + return nil, false + } + aID = cache.Subkey(aID, "facts") + b, _, err := r.cache.GetBytes(aID) + if err != nil { + // No cached facts, analyse this package like a user-provided one, but ignore diagnostics + return nil, false + } + + if err := gob.NewDecoder(bytes.NewReader(b)).Decode(&facts); err != nil { + // Cached facts are broken, analyse this package like a user-provided one, but ignore diagnostics + return nil, false + } + return facts, true +} + +func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) { + if !ac.pkg.fromSource { + panic(fmt.Sprintf("internal error: %s was not loaded from source", ac.pkg)) + } + + // User-provided package, analyse it + // First analyze it with dependencies + var req []*analysis.Analyzer + req = append(req, ac.analyzer.Requires...) + if pass.Analyzer != IsGeneratedAnalyzer && pass.Analyzer != config.Analyzer { + // Ensure all packages have the generated map and config. This is + // required by interna of the runner. Analyses that themselves + // make use of either have an explicit dependency so that other + // runners work correctly, too. + req = append(req, IsGeneratedAnalyzer, config.Analyzer) + } + for _, req := range req { + acReq := &analysisAction{analyzer: req, pkg: ac.pkg} + ret, err := r.runAnalysis(acReq) + if err != nil { + // We couldn't run a dependency, no point in going on + return nil, err + } + + pass.ResultOf[req] = ret + } + + // Then with this analyzer + ret, err := ac.analyzer.Run(pass) + if err != nil { + return nil, err + } + + // Persist facts to cache + if len(ac.analyzer.FactTypes) > 0 { + buf := &bytes.Buffer{} + if err := gob.NewEncoder(buf).Encode(ac.newFacts); err != nil { + return nil, err + } + aID, err := passActionID(ac.pkg, ac.analyzer) + if err != nil { + return nil, err + } + aID = cache.Subkey(aID, "facts") + if err := r.cache.PutBytes(aID, buf.Bytes()); err != nil { + return nil, err + } + } + + return ret, nil +} + +func NewRunner() (*Runner, error) { + cache, err := cache.Default() + if err != nil { + return nil, err + } + + return &Runner{ + cache: cache, + facts: map[types.Object][]analysis.Fact{}, + pkgFacts: map[*types.Package][]analysis.Fact{}, + built: map[*Package]*buildResult{}, + }, nil +} + +func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer) ([]*Package, error) { + for _, a := range analyzers { + for _, f := range a.FactTypes { + gob.Register(f) + } + } + + var dcfg packages.Config + if cfg != nil { + dcfg = *cfg + } + loaded, err := r.ld.Graph(dcfg, patterns...) + if err != nil { + return nil, err + } + + m := map[*packages.Package]*Package{} + packages.Visit(loaded, nil, func(l *packages.Package) { + m[l] = &Package{ + Package: l, + Imports: map[string]*Package{}, + results: map[*analysis.Analyzer]*result{}, + } + for _, err := range l.Errors { + m[l].errs = append(m[l].errs, err) + } + for k, v := range l.Imports { + m[l].Imports[k] = m[v] + } + + m[l].hash, err = packageHash(m[l]) + if err != nil { + m[l].errs = append(m[l].errs, err) + } + }) + pkgs := make([]*Package, len(loaded)) + for i, l := range loaded { + pkgs[i] = m[l] + pkgs[i].initial = true + } + + var wg sync.WaitGroup + wg.Add(len(pkgs)) + // OPT(dh): The ideal number of parallel jobs depends on the shape + // of the graph. We may risk having one goroutine doing work and + // all other goroutines being blocked on its completion. At the + // same time, Go dependency graphs aren't always very amiable + // towards parallelism. For example, on the standard library, we + // only achieve about 400% CPU usage (out of a possible 800% on + // this machine), and only 2x scaling. + sem := make(chan struct{}, runtime.GOMAXPROCS(-1)) + for _, pkg := range pkgs { + pkg := pkg + sem <- struct{}{} + go func() { + r.processPkg(pkg, analyzers) + <-sem + wg.Done() + }() + } + wg.Wait() + + return pkgs, nil +} + +var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`) + +func parsePos(pos string) token.Position { + if pos == "-" || pos == "" { + return token.Position{} + } + parts := posRe.FindStringSubmatch(pos) + if parts == nil { + panic(fmt.Sprintf("internal error: malformed position %q", pos)) + } + file := parts[1] + line, _ := strconv.Atoi(parts[2]) + col, _ := strconv.Atoi(parts[3]) + return token.Position{ + Filename: file, + Line: line, + Column: col, + } +} + +func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { + if pkg.Types != nil { + panic(fmt.Sprintf("internal error: %s has already been loaded", pkg.Package)) + } + // Load type information + if pkg.initial { + // Load package from source + pkg.fromSource = true + return r.ld.LoadFromSource(pkg.Package) + } + + var allFacts []Fact + failed := false + for _, a := range analyzers { + if len(a.FactTypes) > 0 { + facts, ok := r.loadCachedFacts(a, pkg) + if !ok { + failed = true + break + } + allFacts = append(allFacts, facts...) + } + } + + if failed { + pkg.fromSource = true + return r.ld.LoadFromSource(pkg.Package) + } + + // Load package from export data + if err := r.ld.LoadFromExport(pkg.Package); err != nil { + // We asked Go to give us up to date export data, yet + // we can't load it. There must be something wrong. + // + // Attempt loading from source. This should fail (because + // otherwise there would be export data); we just want to + // get the compile errors. If loading from source succeeds + // we discard the result, anyway. Otherwise we'll fail + // when trying to reload from export data later. + pkg.fromSource = true + if err := r.ld.LoadFromSource(pkg.Package); err != nil { + return err + } + // Make sure this package can't be imported successfully + pkg.Package.Errors = append(pkg.Package.Errors, packages.Error{ + Pos: "-", + Msg: fmt.Sprintf("could not load export data: %s", err), + Kind: packages.ParseError, + }) + return fmt.Errorf("could not load export data: %s", err) + } + + for _, f := range allFacts { + if f.Path == "" { + // This is a package fact + r.factsMu.Lock() + r.pkgFacts[pkg.Types] = append(r.pkgFacts[pkg.Types], f.Fact) + r.factsMu.Unlock() + continue + } + obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.Path)) + if err != nil { + // Be lenient about these errors. For example, when + // analysing io/ioutil from source, we may get a fact + // for methods on the devNull type, and objectpath + // will happily create a path for them. However, when + // we later load io/ioutil from export data, the path + // no longer resolves. + // + // If an exported type embeds the unexported type, + // then (part of) the unexported type will become part + // of the type information and our path will resolve + // again. + continue + } + r.factsMu.Lock() + r.facts[obj] = append(r.facts[obj], f.Fact) + r.factsMu.Unlock() + } + return nil +} + +func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { + r.builtMu.Lock() + res := r.built[pkg] + if res != nil { + r.builtMu.Unlock() + <-res.done + return + } + + res = &buildResult{done: make(chan struct{})} + r.built[pkg] = res + r.builtMu.Unlock() + + defer func() { + // Clear information we no longer need. Make sure to do this + // when returning from processPkg so that we clear + // dependencies, not just initial packages. + pkg.TypesInfo = nil + pkg.Syntax = nil + pkg.results = nil + close(res.done) + }() + + if len(pkg.errs) != 0 { + return + } + + for _, imp := range pkg.Imports { + r.processPkg(imp, analyzers) + if len(imp.errs) > 0 { + var s string + for _, err := range imp.errs { + s += "\n\t" + err.Error() + } + pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s: %s", imp, pkg, s)) + return + } + } + if pkg.PkgPath == "unsafe" { + pkg.Types = types.Unsafe + return + } + + if err := r.loadPkg(pkg, analyzers); err != nil { + pkg.errs = append(pkg.errs, err) + return + } + + if !pkg.fromSource { + // Nothing left to do for the package. + return + } + + // Run analyses on initial packages and those missing facts + var wg sync.WaitGroup + wg.Add(len(analyzers)) + errs := make([]error, len(analyzers)) + var acs []*analysisAction + for i, a := range analyzers { + i := i + a := a + ac := &analysisAction{analyzer: a, pkg: pkg} + acs = append(acs, ac) + go func() { + defer wg.Done() + // Only initial packages and packages with missing + // facts will have been loaded from source. + if pkg.initial || len(a.FactTypes) > 0 { + if _, err := r.runAnalysis(ac); err != nil { + errs[i] = fmt.Errorf("error running analyzer %s on %s: %s", a, pkg, err) + return + } + } + }() + } + wg.Wait() + for _, err := range errs { + if err != nil { + pkg.errs = append(pkg.errs, err) + } + } + + // We can't process ignores at this point because `unused` needs + // to see more than one package to make its decision. + ignores, problems := parseDirectives(pkg.Package) + pkg.ignores = append(pkg.ignores, ignores...) + pkg.problems = append(pkg.problems, problems...) + for _, ac := range acs { + pkg.problems = append(pkg.problems, ac.problems...) + } + pkg.cfg = pkg.results[config.Analyzer].v.(*config.Config) + pkg.gen = pkg.results[IsGeneratedAnalyzer].v.(map[string]bool) + + // In a previous version of the code, we would throw away all type + // information and reload it from export data. That was + // nonsensical. The *types.Package doesn't keep any information + // live that export data wouldn't also. We only need to discard + // the AST and the TypesInfo maps; that happens after we return + // from processPkg. +} + +func parseDirective(s string) (cmd string, args []string) { + if !strings.HasPrefix(s, "//lint:") { + return "", nil + } + s = strings.TrimPrefix(s, "//lint:") + fields := strings.Split(s, " ") + return fields[0], fields[1:] +} + +func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) { + var ignores []Ignore + var problems []Problem + + for _, f := range pkg.Syntax { + found := false + commentLoop: + for _, cg := range f.Comments { + for _, c := range cg.List { + if strings.Contains(c.Text, "//lint:") { + found = true + break commentLoop + } + } + } + if !found { + continue + } + cm := ast.NewCommentMap(pkg.Fset, f, f.Comments) + for node, cgs := range cm { + for _, cg := range cgs { + for _, c := range cg.List { + if !strings.HasPrefix(c.Text, "//lint:") { + continue + } + cmd, args := parseDirective(c.Text) + switch cmd { + case "ignore", "file-ignore": + if len(args) < 2 { + // FIXME(dh): this causes duplicated warnings when using megacheck + p := Problem{ + Pos: DisplayPosition(pkg.Fset, c.Pos()), + Message: "malformed linter directive; missing the required reason field?", + Severity: Error, + Check: "", + } + problems = append(problems, p) + continue + } + default: + // unknown directive, ignore + continue + } + checks := strings.Split(args[0], ",") + pos := DisplayPosition(pkg.Fset, node.Pos()) + var ig Ignore + switch cmd { + case "ignore": + ig = &LineIgnore{ + File: pos.Filename, + Line: pos.Line, + Checks: checks, + Pos: c.Pos(), + } + case "file-ignore": + ig = &FileIgnore{ + File: pos.Filename, + Checks: checks, + } + } + ignores = append(ignores, ig) + } + } + } + } + + return ignores, problems +} + +func packageHash(pkg *Package) (string, error) { + key := cache.NewHash("package hash") + fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) + for _, f := range pkg.CompiledGoFiles { + h, err := cache.FileHash(f) + if err != nil { + return "", err + } + fmt.Fprintf(key, "file %s %x\n", f, h) + } + imps := make([]*Package, 0, len(pkg.Imports)) + for _, v := range pkg.Imports { + imps = append(imps, v) + } + sort.Slice(imps, func(i, j int) bool { + return imps[i].PkgPath < imps[j].PkgPath + }) + for _, dep := range imps { + if dep.PkgPath == "unsafe" { + continue + } + + fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, dep.hash) + } + h := key.Sum() + return hex.EncodeToString(h[:]), nil +} + +func passActionID(pkg *Package, analyzer *analysis.Analyzer) (cache.ActionID, error) { + key := cache.NewHash("action ID") + fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) + fmt.Fprintf(key, "pkghash %s\n", pkg.hash) + fmt.Fprintf(key, "analyzer %s\n", analyzer.Name) + + return key.Sum(), nil +} diff --git a/lint/testdata/src/Test/line-ignores.go b/lint/testdata/src/Test/line-ignores.go index 77660e69c..6479f4ae4 100644 --- a/lint/testdata/src/Test/line-ignores.go +++ b/lint/testdata/src/Test/line-ignores.go @@ -3,20 +3,17 @@ package pkg // the line directive should not affect the line ignores //line random-file:1 -func fn1() {} // MATCH "test problem" +func fn1() {} // want `test problem` //lint:ignore TEST1000 This should be ignored, because ... //lint:ignore XXX1000 Testing that multiple linter directives work correctly func fn2() {} -//lint:ignore TEST1000 -func fn3() {} // MATCH "test problem" +//lint:ignore TEST1000 // want `malformed linter directive` +func fn3() {} // want `test problem` //lint:ignore TEST1000 ignore func fn4() { - //lint:ignore TEST1000 ignore + //lint:ignore TEST1000 ignore // want `this linter directive didn't match anything` var _ int } - -// MATCH:12 "malformed linter directive" -// MATCH:17 "this linter directive didn't match anything" diff --git a/lint/testutil/util.go b/lint/testutil/util.go deleted file mode 100644 index f3b046041..000000000 --- a/lint/testutil/util.go +++ /dev/null @@ -1,261 +0,0 @@ -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file or at -// https://developers.google.com/open-source/licenses/bsd. - -// Package testutil provides helpers for testing staticcheck. -package testutil // import "honnef.co/go/tools/lint/testutil" - -import ( - "fmt" - "go/parser" - "go/token" - "io/ioutil" - "os" - "path/filepath" - "regexp" - "sort" - "strconv" - "strings" - "testing" - - "golang.org/x/tools/go/packages" - "honnef.co/go/tools/config" - "honnef.co/go/tools/lint" -) - -func TestAll(t *testing.T, c lint.Checker, dir string) { - testPackages(t, c, dir) -} - -func testPackages(t *testing.T, c lint.Checker, dir string) { - gopath := filepath.Join("testdata", dir) - gopath, err := filepath.Abs(gopath) - if err != nil { - t.Fatal(err) - } - fis, err := ioutil.ReadDir(filepath.Join(gopath, "src")) - if err != nil { - if os.IsNotExist(err) { - // no packages to test - return - } - t.Fatal("couldn't get test packages:", err) - } - - var paths []string - for _, fi := range fis { - if strings.HasSuffix(fi.Name(), ".disabled") { - continue - } - paths = append(paths, fi.Name()) - } - - conf := &packages.Config{ - Mode: packages.LoadAllSyntax, - Tests: true, - Env: append(os.Environ(), "GOPATH="+gopath), - } - - pkgs, err := packages.Load(conf, paths...) - if err != nil { - t.Error("Error loading packages:", err) - return - } - - versions := map[int][]*packages.Package{} - for _, pkg := range pkgs { - path := strings.TrimSuffix(pkg.Types.Path(), ".test") - parts := strings.Split(path, "_") - - version := 0 - if len(parts) > 1 { - part := parts[len(parts)-1] - if len(part) >= 4 && strings.HasPrefix(part, "go1") { - v, err := strconv.Atoi(part[len("go1"):]) - if err != nil { - continue - } - version = v - } - } - versions[version] = append(versions[version], pkg) - } - - for version, pkgs := range versions { - sources := map[string][]byte{} - var files []string - - for _, pkg := range pkgs { - files = append(files, pkg.GoFiles...) - for _, fi := range pkg.GoFiles { - src, err := ioutil.ReadFile(fi) - if err != nil { - t.Fatal(err) - } - sources[fi] = src - } - } - - sort.Strings(files) - filesUniq := make([]string, 0, len(files)) - if len(files) < 2 { - filesUniq = files - } else { - filesUniq = append(filesUniq, files[0]) - prev := files[0] - for _, f := range files[1:] { - if f == prev { - continue - } - prev = f - filesUniq = append(filesUniq, f) - } - } - - lintGoVersion(t, c, version, pkgs, filesUniq, sources) - } -} - -func lintGoVersion( - t *testing.T, - c lint.Checker, - version int, - pkgs []*packages.Package, - files []string, - sources map[string][]byte, -) { - l := &lint.Linter{Checkers: []lint.Checker{c}, GoVersion: version, Config: config.Config{Checks: []string{"all"}}} - problems := l.Lint(pkgs, nil) - - for _, fi := range files { - src := sources[fi] - - ins := parseInstructions(t, fi, src) - - for _, in := range ins { - ok := false - for i, p := range problems { - if p.Position.Line != in.Line || p.Position.Filename != fi { - continue - } - if in.Match.MatchString(p.Text) { - // remove this problem from ps - copy(problems[i:], problems[i+1:]) - problems = problems[:len(problems)-1] - - ok = true - break - } - } - if !ok { - t.Errorf("Lint failed at %s:%d; /%v/ did not match", fi, in.Line, in.Match) - } - } - } - for _, p := range problems { - t.Errorf("Unexpected problem at %s: %v", p.Position, p.Text) - } -} - -type instruction struct { - Line int // the line number this applies to - Match *regexp.Regexp // what pattern to match - Replacement string // what the suggested replacement line should be -} - -// parseInstructions parses instructions from the comments in a Go source file. -// It returns nil if none were parsed. -func parseInstructions(t *testing.T, filename string, src []byte) []instruction { - fset := token.NewFileSet() - f, err := parser.ParseFile(fset, filename, src, parser.ParseComments) - if err != nil { - t.Fatalf("Test file %v does not parse: %v", filename, err) - } - var ins []instruction - for _, cg := range f.Comments { - ln := fset.PositionFor(cg.Pos(), false).Line - raw := cg.Text() - for _, line := range strings.Split(raw, "\n") { - if line == "" || strings.HasPrefix(line, "#") { - continue - } - if line == "OK" && ins == nil { - // so our return value will be non-nil - ins = make([]instruction, 0) - continue - } - if !strings.Contains(line, "MATCH") { - continue - } - rx, err := extractPattern(line) - if err != nil { - t.Fatalf("At %v:%d: %v", filename, ln, err) - } - matchLine := ln - if i := strings.Index(line, "MATCH:"); i >= 0 { - // This is a match for a different line. - lns := strings.TrimPrefix(line[i:], "MATCH:") - lns = lns[:strings.Index(lns, " ")] - matchLine, err = strconv.Atoi(lns) - if err != nil { - t.Fatalf("Bad match line number %q at %v:%d: %v", lns, filename, ln, err) - } - } - var repl string - if r, ok := extractReplacement(line); ok { - repl = r - } - ins = append(ins, instruction{ - Line: matchLine, - Match: rx, - Replacement: repl, - }) - } - } - return ins -} - -func extractPattern(line string) (*regexp.Regexp, error) { - n := strings.Index(line, " ") - if n == 01 { - return nil, fmt.Errorf("malformed match instruction %q", line) - } - line = line[n+1:] - var pat string - switch line[0] { - case '/': - a, b := strings.Index(line, "/"), strings.LastIndex(line, "/") - if a == -1 || a == b { - return nil, fmt.Errorf("malformed match instruction %q", line) - } - pat = line[a+1 : b] - case '"': - a, b := strings.Index(line, `"`), strings.LastIndex(line, `"`) - if a == -1 || a == b { - return nil, fmt.Errorf("malformed match instruction %q", line) - } - pat = regexp.QuoteMeta(line[a+1 : b]) - default: - return nil, fmt.Errorf("malformed match instruction %q", line) - } - - rx, err := regexp.Compile(pat) - if err != nil { - return nil, fmt.Errorf("bad match pattern %q: %v", pat, err) - } - return rx, nil -} - -func extractReplacement(line string) (string, bool) { - // Look for this: / -> ` - // (the end of a match and start of a backtick string), - // and then the closing backtick. - const start = "/ -> `" - a, b := strings.Index(line, start), strings.LastIndex(line, "`") - if a < 0 || a > b { - return "", false - } - return line[a+len(start) : b], true -} diff --git a/loader/loader.go b/loader/loader.go new file mode 100644 index 000000000..9c6885d48 --- /dev/null +++ b/loader/loader.go @@ -0,0 +1,197 @@ +package loader + +import ( + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "go/types" + "log" + "os" + "sync" + + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/go/packages" +) + +type Loader struct { + exportMu sync.RWMutex +} + +// Graph resolves patterns and returns packages with all the +// information required to later load type information, and optionally +// syntax trees. +// +// The provided config can set any setting with the exception of Mode. +func (ld *Loader) Graph(cfg packages.Config, patterns ...string) ([]*packages.Package, error) { + cfg.Mode = packages.NeedName | packages.NeedImports | packages.NeedDeps | packages.NeedExportsFile | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedTypesSizes + pkgs, err := packages.Load(&cfg, patterns...) + if err != nil { + return nil, err + } + fset := token.NewFileSet() + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + pkg.Fset = fset + }) + return pkgs, nil +} + +// LoadFromExport loads a package from export data. All of its +// dependencies must have been loaded already. +func (ld *Loader) LoadFromExport(pkg *packages.Package) error { + ld.exportMu.Lock() + defer ld.exportMu.Unlock() + + pkg.IllTyped = true + for path, pkg := range pkg.Imports { + if pkg.Types == nil { + return fmt.Errorf("dependency %q hasn't been loaded yet", path) + } + } + if pkg.ExportFile == "" { + return fmt.Errorf("no export data for %q", pkg.ID) + } + f, err := os.Open(pkg.ExportFile) + if err != nil { + return err + } + defer f.Close() + + r, err := gcexportdata.NewReader(f) + if err != nil { + return err + } + + view := make(map[string]*types.Package) // view seen by gcexportdata + seen := make(map[*packages.Package]bool) // all visited packages + var visit func(pkgs map[string]*packages.Package) + visit = func(pkgs map[string]*packages.Package) { + for _, pkg := range pkgs { + if !seen[pkg] { + seen[pkg] = true + view[pkg.PkgPath] = pkg.Types + visit(pkg.Imports) + } + } + } + visit(pkg.Imports) + tpkg, err := gcexportdata.Read(r, pkg.Fset, view, pkg.PkgPath) + if err != nil { + return err + } + pkg.Types = tpkg + pkg.IllTyped = false + return nil +} + +// LoadFromSource loads a package from source. All of its dependencies +// must have been loaded already. +func (ld *Loader) LoadFromSource(pkg *packages.Package) error { + ld.exportMu.RLock() + defer ld.exportMu.RUnlock() + + pkg.IllTyped = true + pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) + + // OPT(dh): many packages have few files, much fewer than there + // are CPU cores. Additionally, parsing each individual file is + // very fast. A naive parallel implementation of this loop won't + // be faster, and tends to be slower due to extra scheduling, + // bookkeeping and potentially false sharing of cache lines. + pkg.Syntax = make([]*ast.File, len(pkg.CompiledGoFiles)) + for i, file := range pkg.CompiledGoFiles { + f, err := parser.ParseFile(pkg.Fset, file, nil, parser.ParseComments) + if err != nil { + pkg.Errors = append(pkg.Errors, convertError(err)...) + return err + } + pkg.Syntax[i] = f + } + pkg.TypesInfo = &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + + importer := func(path string) (*types.Package, error) { + if path == "unsafe" { + return types.Unsafe, nil + } + imp := pkg.Imports[path] + if imp == nil { + return nil, nil + } + if len(imp.Errors) > 0 { + return nil, imp.Errors[0] + } + return imp.Types, nil + } + tc := &types.Config{ + Importer: importerFunc(importer), + Error: func(err error) { + pkg.Errors = append(pkg.Errors, convertError(err)...) + }, + } + err := types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax) + if err != nil { + return err + } + pkg.IllTyped = false + return nil +} + +func convertError(err error) []packages.Error { + var errs []packages.Error + // taken from go/packages + switch err := err.(type) { + case packages.Error: + // from driver + errs = append(errs, err) + + case *os.PathError: + // from parser + errs = append(errs, packages.Error{ + Pos: err.Path + ":1", + Msg: err.Err.Error(), + Kind: packages.ParseError, + }) + + case scanner.ErrorList: + // from parser + for _, err := range err { + errs = append(errs, packages.Error{ + Pos: err.Pos.String(), + Msg: err.Msg, + Kind: packages.ParseError, + }) + } + + case types.Error: + // from type checker + errs = append(errs, packages.Error{ + Pos: err.Fset.Position(err.Pos).String(), + Msg: err.Msg, + Kind: packages.TypeError, + }) + + default: + // unexpected impoverished error from parser? + errs = append(errs, packages.Error{ + Pos: "-", + Msg: err.Error(), + Kind: packages.UnknownError, + }) + + // If you see this error message, please file a bug. + log.Printf("internal error: error %q (%T) without position", err, err) + } + return errs +} + +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } diff --git a/simple/analysis.go b/simple/analysis.go new file mode 100644 index 000000000..9449e2e74 --- /dev/null +++ b/simple/analysis.go @@ -0,0 +1,223 @@ +package simple + +import ( + "flag" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/lint/lintutil" +) + +func newFlagSet() flag.FlagSet { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version") + return *fs +} + +var Analyzers = map[string]*analysis.Analyzer{ + "S1000": { + Name: "S1000", + Run: LintSingleCaseSelect, + Doc: docS1000, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1001": { + Name: "S1001", + Run: LintLoopCopy, + Doc: docS1001, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1002": { + Name: "S1002", + Run: LintIfBoolCmp, + Doc: docS1002, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1003": { + Name: "S1003", + Run: LintStringsContains, + Doc: docS1003, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1004": { + Name: "S1004", + Run: LintBytesCompare, + Doc: docS1004, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1005": { + Name: "S1005", + Run: LintUnnecessaryBlank, + Doc: docS1005, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1006": { + Name: "S1006", + Run: LintForTrue, + Doc: docS1006, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1007": { + Name: "S1007", + Run: LintRegexpRaw, + Doc: docS1007, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1008": { + Name: "S1008", + Run: LintIfReturn, + Doc: docS1008, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1009": { + Name: "S1009", + Run: LintRedundantNilCheckWithLen, + Doc: docS1009, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1010": { + Name: "S1010", + Run: LintSlicing, + Doc: docS1010, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1011": { + Name: "S1011", + Run: LintLoopAppend, + Doc: docS1011, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1012": { + Name: "S1012", + Run: LintTimeSince, + Doc: docS1012, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1016": { + Name: "S1016", + Run: LintSimplerStructConversion, + Doc: docS1016, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1017": { + Name: "S1017", + Run: LintTrim, + Doc: docS1017, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1018": { + Name: "S1018", + Run: LintLoopSlide, + Doc: docS1018, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1019": { + Name: "S1019", + Run: LintMakeLenCap, + Doc: docS1019, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1020": { + Name: "S1020", + Run: LintAssertNotNil, + Doc: docS1020, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1021": { + Name: "S1021", + Run: LintDeclareAssign, + Doc: docS1021, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1023": { + Name: "S1023", + Run: LintRedundantBreak, + Doc: docS1023, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1024": { + Name: "S1024", + Run: LintTimeUntil, + Doc: docS1024, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1025": { + Name: "S1025", + Run: LintRedundantSprintf, + Doc: docS1025, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1028": { + Name: "S1028", + Run: LintErrorsNewSprintf, + Doc: docS1028, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1029": { + Name: "S1029", + Run: LintRangeStringRunes, + Doc: docS1029, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "S1030": { + Name: "S1030", + Run: LintBytesBufferConversions, + Doc: docS1030, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1031": { + Name: "S1031", + Run: LintNilCheckAroundRange, + Doc: docS1031, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1032": { + Name: "S1032", + Run: LintSortHelpers, + Doc: docS1032, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1033": { + Name: "S1033", + Run: LintGuardedDelete, + Doc: docS1033, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "S1034": { + Name: "S1034", + Run: LintSimplifyTypeSwitch, + Doc: docS1034, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, +} diff --git a/simple/doc.go b/simple/doc.go index ea437da56..b84b59d8b 100644 --- a/simple/doc.go +++ b/simple/doc.go @@ -424,3 +424,15 @@ sort.Strings(x) Available since 2019.1 ` + +var docS1033 = `Unnecessary guard around call to delete + +Available since: + Unreleased +` + +var docS1034 = `Use result of type assertion to simplify cases + +Available since: + Unreleased +` diff --git a/simple/lint.go b/simple/lint.go index db805770c..183adfc9f 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -8,67 +8,19 @@ import ( "go/token" "go/types" "reflect" + "sort" "strings" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" . "honnef.co/go/tools/arg" "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/lint" . "honnef.co/go/tools/lint/lintdsl" - - "golang.org/x/tools/go/types/typeutil" ) -type Checker struct { - CheckGenerated bool - MS *typeutil.MethodSetCache -} - -func NewChecker() *Checker { - return &Checker{ - MS: &typeutil.MethodSetCache{}, - } -} - -func (*Checker) Name() string { return "gosimple" } -func (*Checker) Prefix() string { return "S" } - -func (c *Checker) Init(prog *lint.Program) {} - -func (c *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "S1000", FilterGenerated: true, Fn: c.LintSingleCaseSelect, Doc: docS1000}, - {ID: "S1001", FilterGenerated: true, Fn: c.LintLoopCopy, Doc: docS1001}, - {ID: "S1002", FilterGenerated: true, Fn: c.LintIfBoolCmp, Doc: docS1002}, - {ID: "S1003", FilterGenerated: true, Fn: c.LintStringsContains, Doc: docS1003}, - {ID: "S1004", FilterGenerated: true, Fn: c.LintBytesCompare, Doc: docS1004}, - {ID: "S1005", FilterGenerated: true, Fn: c.LintUnnecessaryBlank, Doc: docS1005}, - {ID: "S1006", FilterGenerated: true, Fn: c.LintForTrue, Doc: docS1006}, - {ID: "S1007", FilterGenerated: true, Fn: c.LintRegexpRaw, Doc: docS1007}, - {ID: "S1008", FilterGenerated: true, Fn: c.LintIfReturn, Doc: docS1008}, - {ID: "S1009", FilterGenerated: true, Fn: c.LintRedundantNilCheckWithLen, Doc: docS1009}, - {ID: "S1010", FilterGenerated: true, Fn: c.LintSlicing, Doc: docS1010}, - {ID: "S1011", FilterGenerated: true, Fn: c.LintLoopAppend, Doc: docS1011}, - {ID: "S1012", FilterGenerated: true, Fn: c.LintTimeSince, Doc: docS1012}, - {ID: "S1016", FilterGenerated: true, Fn: c.LintSimplerStructConversion, Doc: docS1016}, - {ID: "S1017", FilterGenerated: true, Fn: c.LintTrim, Doc: docS1017}, - {ID: "S1018", FilterGenerated: true, Fn: c.LintLoopSlide, Doc: docS1018}, - {ID: "S1019", FilterGenerated: true, Fn: c.LintMakeLenCap, Doc: docS1019}, - {ID: "S1020", FilterGenerated: true, Fn: c.LintAssertNotNil, Doc: docS1020}, - {ID: "S1021", FilterGenerated: true, Fn: c.LintDeclareAssign, Doc: docS1021}, - {ID: "S1023", FilterGenerated: true, Fn: c.LintRedundantBreak, Doc: docS1023}, - {ID: "S1024", FilterGenerated: true, Fn: c.LintTimeUntil, Doc: docS1024}, - {ID: "S1025", FilterGenerated: true, Fn: c.LintRedundantSprintf, Doc: docS1025}, - {ID: "S1028", FilterGenerated: true, Fn: c.LintErrorsNewSprintf, Doc: docS1028}, - {ID: "S1029", FilterGenerated: false, Fn: c.LintRangeStringRunes, Doc: docS1029}, - {ID: "S1030", FilterGenerated: true, Fn: c.LintBytesBufferConversions, Doc: docS1030}, - {ID: "S1031", FilterGenerated: true, Fn: c.LintNilCheckAroundRange, Doc: docS1031}, - {ID: "S1032", FilterGenerated: true, Fn: c.LintSortHelpers, Doc: docS1032}, - {ID: "S1033", FilterGenerated: true, Fn: c.LintGuardedDelete, Doc: ``}, - {ID: "S1034", FilterGenerated: true, Fn: c.LintSimplifyTypeSwitch, Doc: ``}, - } -} - -func (c *Checker) LintSingleCaseSelect(j *lint.Job) { +func LintSingleCaseSelect(pass *analysis.Pass) (interface{}, error) { isSingleSelect := func(node ast.Node) bool { v, ok := node.(*ast.SelectStmt) if !ok { @@ -92,7 +44,7 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { return } seen[v.Body.List[0]] = struct{}{} - j.Errorf(node, "should use for range instead of for { select {} }") + ReportfFG(pass, node.Pos(), "should use for range instead of for { select {} }") case *ast.SelectStmt: if _, ok := seen[v]; ok { return @@ -100,13 +52,14 @@ func (c *Checker) LintSingleCaseSelect(j *lint.Job) { if !isSingleSelect(v) { return } - j.Errorf(node, "should use a simple channel send/receive instead of select with a single case") + ReportfFG(pass, node.Pos(), "should use a simple channel send/receive instead of select with a single case") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintLoopCopy(j *lint.Job) { +func LintLoopCopy(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.RangeStmt) @@ -128,7 +81,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { return } - if _, ok := j.Pkg.TypesInfo.TypeOf(lhs.X).(*types.Slice); !ok { + if _, ok := pass.TypesInfo.TypeOf(lhs.X).(*types.Slice); !ok { return } lidx, ok := lhs.Index.(*ast.Ident) @@ -139,16 +92,16 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if j.Pkg.TypesInfo.TypeOf(lhs) == nil || j.Pkg.TypesInfo.TypeOf(stmt.Rhs[0]) == nil { + if pass.TypesInfo.TypeOf(lhs) == nil || pass.TypesInfo.TypeOf(stmt.Rhs[0]) == nil { return } - if j.Pkg.TypesInfo.ObjectOf(lidx) != j.Pkg.TypesInfo.ObjectOf(key) { + if pass.TypesInfo.ObjectOf(lidx) != pass.TypesInfo.ObjectOf(key) { return } - if !types.Identical(j.Pkg.TypesInfo.TypeOf(lhs), j.Pkg.TypesInfo.TypeOf(stmt.Rhs[0])) { + if !types.Identical(pass.TypesInfo.TypeOf(lhs), pass.TypesInfo.TypeOf(stmt.Rhs[0])) { return } - if _, ok := j.Pkg.TypesInfo.TypeOf(loop.X).(*types.Slice); !ok { + if _, ok := pass.TypesInfo.TypeOf(loop.X).(*types.Slice); !ok { return } @@ -162,7 +115,7 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if j.Pkg.TypesInfo.ObjectOf(ridx) != j.Pkg.TypesInfo.ObjectOf(key) { + if pass.TypesInfo.ObjectOf(ridx) != pass.TypesInfo.ObjectOf(key) { return } } else if rhs, ok := stmt.Rhs[0].(*ast.Ident); ok { @@ -170,38 +123,39 @@ func (c *Checker) LintLoopCopy(j *lint.Job) { if !ok { return } - if j.Pkg.TypesInfo.ObjectOf(rhs) != j.Pkg.TypesInfo.ObjectOf(value) { + if pass.TypesInfo.ObjectOf(rhs) != pass.TypesInfo.ObjectOf(value) { return } } else { return } - j.Errorf(loop, "should use copy() instead of a loop") + ReportfFG(pass, loop.Pos(), "should use copy() instead of a loop") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintIfBoolCmp(j *lint.Job) { +func LintIfBoolCmp(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { expr := node.(*ast.BinaryExpr) if expr.Op != token.EQL && expr.Op != token.NEQ { return } - x := IsBoolConst(j, expr.X) - y := IsBoolConst(j, expr.Y) + x := IsBoolConst(pass, expr.X) + y := IsBoolConst(pass, expr.Y) if !x && !y { return } var other ast.Expr var val bool if x { - val = BoolConst(j, expr.X) + val = BoolConst(pass, expr.X) other = expr.Y } else { - val = BoolConst(j, expr.Y) + val = BoolConst(pass, expr.Y) other = expr.X } - basic, ok := j.Pkg.TypesInfo.TypeOf(other).Underlying().(*types.Basic) + basic, ok := pass.TypesInfo.TypeOf(other).Underlying().(*types.Basic) if !ok || basic.Kind() != types.Bool { return } @@ -209,21 +163,22 @@ func (c *Checker) LintIfBoolCmp(j *lint.Job) { if (expr.Op == token.EQL && !val) || (expr.Op == token.NEQ && val) { op = "!" } - r := op + Render(j, other) + r := op + Render(pass, other) l1 := len(r) r = strings.TrimLeft(r, "!") if (l1-len(r))%2 == 1 { r = "!" + r } - if IsInTest(j, node) { + if IsInTest(pass, node) { return } - j.Errorf(expr, "should omit comparison to bool constant, can be simplified to %s", r) + ReportfFG(pass, expr.Pos(), "should omit comparison to bool constant, can be simplified to %s", r) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintBytesBufferConversions(j *lint.Job) { +func LintBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) if len(call.Args) != 1 { @@ -239,18 +194,19 @@ func (c *Checker) LintBytesBufferConversions(j *lint.Job) { return } - typ := j.Pkg.TypesInfo.TypeOf(call.Fun) - if typ == types.Universe.Lookup("string").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).Bytes") { - j.Errorf(call, "should use %v.String() instead of %v", Render(j, sel.X), Render(j, call)) - } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(j, call.Args[0], "(*bytes.Buffer).String") { - j.Errorf(call, "should use %v.Bytes() instead of %v", Render(j, sel.X), Render(j, call)) + typ := pass.TypesInfo.TypeOf(call.Fun) + if typ == types.Universe.Lookup("string").Type() && IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).Bytes") { + ReportfFG(pass, call.Pos(), "should use %v.String() instead of %v", Render(pass, sel.X), Render(pass, call)) + } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).String") { + ReportfFG(pass, call.Pos(), "should use %v.Bytes() instead of %v", Render(pass, sel.X), Render(pass, call)) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintStringsContains(j *lint.Job) { +func LintStringsContains(pass *analysis.Pass) (interface{}, error) { // map of value to token to bool value allowed := map[int64]map[token.Token]bool{ -1: {token.GTR: true, token.NEQ: true, token.EQL: false}, @@ -264,7 +220,7 @@ func (c *Checker) LintStringsContains(j *lint.Job) { return } - value, ok := ExprToInt(j, expr.Y) + value, ok := ExprToInt(pass, expr.Y) if !ok { return } @@ -310,12 +266,13 @@ func (c *Checker) LintStringsContains(j *lint.Job) { if !b { prefix = "!" } - j.Errorf(node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(j, call.Args)) + ReportfFG(pass, node.Pos(), "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(pass, call.Args)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintBytesCompare(j *lint.Job) { +func LintBytesCompare(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { expr := node.(*ast.BinaryExpr) if expr.Op != token.NEQ && expr.Op != token.EQL { @@ -325,42 +282,44 @@ func (c *Checker) LintBytesCompare(j *lint.Job) { if !ok { return } - if !IsCallToAST(j, call, "bytes.Compare") { + if !IsCallToAST(pass, call, "bytes.Compare") { return } - value, ok := ExprToInt(j, expr.Y) + value, ok := ExprToInt(pass, expr.Y) if !ok || value != 0 { return } - args := RenderArgs(j, call.Args) + args := RenderArgs(pass, call.Args) prefix := "" if expr.Op == token.NEQ { prefix = "!" } - j.Errorf(node, "should use %sbytes.Equal(%s) instead", prefix, args) + ReportfFG(pass, node.Pos(), "should use %sbytes.Equal(%s) instead", prefix, args) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintForTrue(j *lint.Job) { +func LintForTrue(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.ForStmt) if loop.Init != nil || loop.Post != nil { return } - if !IsBoolConst(j, loop.Cond) || !BoolConst(j, loop.Cond) { + if !IsBoolConst(pass, loop.Cond) || !BoolConst(pass, loop.Cond) { return } - j.Errorf(loop, "should use for {} instead of for true {}") + ReportfFG(pass, loop.Pos(), "should use for {} instead of for true {}") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintRegexpRaw(j *lint.Job) { +func LintRegexpRaw(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "regexp.MustCompile") && - !IsCallToAST(j, call, "regexp.Compile") { + if !IsCallToAST(pass, call, "regexp.MustCompile") && + !IsCallToAST(pass, call, "regexp.Compile") { return } sel, ok := call.Fun.(*ast.SelectorExpr) @@ -408,12 +367,13 @@ func (c *Checker) LintRegexpRaw(j *lint.Job) { } } - j.Errorf(call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) + ReportfFG(pass, call.Pos(), "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintIfReturn(j *lint.Job) { +func LintIfReturn(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { block := node.(*ast.BlockStmt) l := len(block.List) @@ -455,7 +415,7 @@ func (c *Checker) LintIfReturn(j *lint.Job) { if len(ret1.Results) != 1 { return } - if !IsBoolConst(j, ret1.Results[0]) { + if !IsBoolConst(pass, ret1.Results[0]) { return } @@ -466,12 +426,13 @@ func (c *Checker) LintIfReturn(j *lint.Job) { if len(ret2.Results) != 1 { return } - if !IsBoolConst(j, ret2.Results[0]) { + if !IsBoolConst(pass, ret2.Results[0]) { return } - j.Errorf(n1, "should use 'return ' instead of 'if { return }; return '") + ReportfFG(pass, n1.Pos(), "should use 'return ' instead of 'if { return }; return '") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + return nil, nil } // LintRedundantNilCheckWithLen checks for the following reduntant nil-checks: @@ -482,7 +443,7 @@ func (c *Checker) LintIfReturn(j *lint.Job) { // if x != nil && len(x) > N {} // if x != nil && len(x) >= N {} (where N != 0) // -func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { +func LintRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) { _, ok := expr.(*ast.BasicLit) if ok { @@ -492,7 +453,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { if !ok { return false, false } - c, ok := j.Pkg.TypesInfo.ObjectOf(id).(*types.Const) + c, ok := pass.TypesInfo.ObjectOf(id).(*types.Const) if !ok { return false, false } @@ -522,7 +483,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { if !ok { return } - if !IsNil(j, x.Y) { + if !IsNil(pass, x.Y) { return } @@ -585,7 +546,7 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { // finally check that xx type is one of array, slice, map or chan // this is to prevent false positive in case if xx is a pointer to an array var nilType string - switch j.Pkg.TypesInfo.TypeOf(xx).(type) { + switch pass.TypesInfo.TypeOf(xx).(type) { case *types.Slice: nilType = "nil slices" case *types.Map: @@ -595,12 +556,13 @@ func (c *Checker) LintRedundantNilCheckWithLen(j *lint.Job) { default: return } - j.Errorf(expr, "should omit nil check; len() for %s is defined as zero", nilType) + ReportfFG(pass, expr.Pos(), "should omit nil check; len() for %s is defined as zero", nilType) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintSlicing(j *lint.Job) { +func LintSlicing(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { n := node.(*ast.SliceExpr) if n.Max != nil { @@ -618,26 +580,27 @@ func (c *Checker) LintSlicing(j *lint.Job) { if !ok || fun.Name != "len" { return } - if _, ok := j.Pkg.TypesInfo.ObjectOf(fun).(*types.Builtin); !ok { + if _, ok := pass.TypesInfo.ObjectOf(fun).(*types.Builtin); !ok { return } arg, ok := call.Args[Arg("len.v")].(*ast.Ident) if !ok || arg.Obj != s.Obj { return } - j.Errorf(n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") + ReportfFG(pass, n.Pos(), "should omit second index in slice, s[a:len(s)] is identical to s[a:]") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) + return nil, nil } -func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { +func refersTo(pass *analysis.Pass, expr ast.Expr, ident *ast.Ident) bool { found := false fn := func(node ast.Node) bool { ident2, ok := node.(*ast.Ident) if !ok { return true } - if j.Pkg.TypesInfo.ObjectOf(ident) == j.Pkg.TypesInfo.ObjectOf(ident2) { + if pass.TypesInfo.ObjectOf(ident) == pass.TypesInfo.ObjectOf(ident2) { found = true return false } @@ -647,7 +610,7 @@ func refersTo(j *lint.Job, expr ast.Expr, ident *ast.Ident) bool { return found } -func (c *Checker) LintLoopAppend(j *lint.Job) { +func LintLoopAppend(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.RangeStmt) if !IsBlank(loop.Key) { @@ -667,7 +630,7 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if stmt.Tok != token.ASSIGN || len(stmt.Lhs) != 1 || len(stmt.Rhs) != 1 { return } - if refersTo(j, stmt.Lhs[0], val) { + if refersTo(pass, stmt.Lhs[0], val) { return } call, ok := stmt.Rhs[0].(*ast.CallExpr) @@ -681,14 +644,14 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if !ok { return } - obj := j.Pkg.TypesInfo.ObjectOf(fun) + obj := pass.TypesInfo.ObjectOf(fun) fn, ok := obj.(*types.Builtin) if !ok || fn.Name() != "append" { return } - src := j.Pkg.TypesInfo.TypeOf(loop.X) - dst := j.Pkg.TypesInfo.TypeOf(call.Args[Arg("append.slice")]) + src := pass.TypesInfo.TypeOf(loop.X) + dst := pass.TypesInfo.TypeOf(call.Args[Arg("append.slice")]) // TODO(dominikh) remove nil check once Go issue #15173 has // been fixed if src == nil { @@ -698,7 +661,7 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { return } - if Render(j, stmt.Lhs[0]) != Render(j, call.Args[Arg("append.slice")]) { + if Render(pass, stmt.Lhs[0]) != Render(pass, call.Args[Arg("append.slice")]) { return } @@ -706,51 +669,54 @@ func (c *Checker) LintLoopAppend(j *lint.Job) { if !ok { return } - if j.Pkg.TypesInfo.ObjectOf(val) != j.Pkg.TypesInfo.ObjectOf(el) { + if pass.TypesInfo.ObjectOf(val) != pass.TypesInfo.ObjectOf(el) { return } - j.Errorf(loop, "should replace loop with %s = append(%s, %s...)", - Render(j, stmt.Lhs[0]), Render(j, call.Args[Arg("append.slice")]), Render(j, loop.X)) + ReportfFG(pass, loop.Pos(), "should replace loop with %s = append(%s, %s...)", + Render(pass, stmt.Lhs[0]), Render(pass, call.Args[Arg("append.slice")]), Render(pass, loop.X)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintTimeSince(j *lint.Job) { +func LintTimeSince(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { return } - if !IsCallToAST(j, sel.X, "time.Now") { + if !IsCallToAST(pass, sel.X, "time.Now") { return } if sel.Sel.Name != "Sub" { return } - j.Errorf(call, "should use time.Since instead of time.Now().Sub") + ReportfFG(pass, call.Pos(), "should use time.Since instead of time.Now().Sub") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintTimeUntil(j *lint.Job) { - if !IsGoVersion(j, 8) { - return +func LintTimeUntil(pass *analysis.Pass) (interface{}, error) { + if !IsGoVersion(pass, 8) { + return nil, nil } fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "(time.Time).Sub") { + if !IsCallToAST(pass, call, "(time.Time).Sub") { return } - if !IsCallToAST(j, call.Args[Arg("(time.Time).Sub.u")], "time.Now") { + if !IsCallToAST(pass, call.Args[Arg("(time.Time).Sub.u")], "time.Now") { return } - j.Errorf(call, "should use time.Until instead of t.Sub(time.Now())") + ReportfFG(pass, call.Pos(), "should use time.Until instead of t.Sub(time.Now())") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { +func LintUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { fn1 := func(node ast.Node) { assign := node.(*ast.AssignStmt) if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { @@ -763,7 +729,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { case *ast.IndexExpr: // The type-checker should make sure that it's a map, but // let's be safe. - if _, ok := j.Pkg.TypesInfo.TypeOf(rhs.X).Underlying().(*types.Map); !ok { + if _, ok := pass.TypesInfo.TypeOf(rhs.X).Underlying().(*types.Map); !ok { return } case *ast.UnaryExpr: @@ -775,7 +741,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { } cp := *assign cp.Lhs = cp.Lhs[0:1] - j.Errorf(assign, "should write %s instead of %s", Render(j, &cp), Render(j, assign)) + ReportfFG(pass, assign.Pos(), "should write %s instead of %s", Render(pass, &cp), Render(pass, assign)) } fn2 := func(node ast.Node) { @@ -795,7 +761,7 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { if expr.Op != token.ARROW { continue } - j.Errorf(lh, "'_ = <-ch' can be simplified to '<-ch'") + ReportfFG(pass, lh.Pos(), "'_ = <-ch' can be simplified to '<-ch'") } } @@ -804,22 +770,23 @@ func (c *Checker) LintUnnecessaryBlank(j *lint.Job) { // for x, _ if !IsBlank(rs.Key) && IsBlank(rs.Value) { - j.Errorf(rs.Value, "should omit value from range; this loop is equivalent to `for %s %s range ...`", Render(j, rs.Key), rs.Tok) + ReportfFG(pass, rs.Value.Pos(), "should omit value from range; this loop is equivalent to `for %s %s range ...`", Render(pass, rs.Key), rs.Tok) } // for _, _ || for _ if IsBlank(rs.Key) && (IsBlank(rs.Value) || rs.Value == nil) { - j.Errorf(rs.Key, "should omit values from range; this loop is equivalent to `for range ...`") + ReportfFG(pass, rs.Key.Pos(), "should omit values from range; this loop is equivalent to `for range ...`") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1) - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2) - if IsGoVersion(j, 4) { - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn1) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn2) + if IsGoVersion(pass, 4) { + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn3) } + return nil, nil } -func (c *Checker) LintSimplerStructConversion(j *lint.Job) { +func LintSimplerStructConversion(pass *analysis.Pass) (interface{}, error) { var skip ast.Node fn := func(node ast.Node) { // Do not suggest type conversion between pointers @@ -838,7 +805,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return } - typ1, _ := j.Pkg.TypesInfo.TypeOf(lit.Type).(*types.Named) + typ1, _ := pass.TypesInfo.TypeOf(lit.Type).(*types.Named) if typ1 == nil { return } @@ -858,7 +825,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if !ok { return nil, nil, false } - typ := j.Pkg.TypesInfo.TypeOf(sel.X) + typ := pass.TypesInfo.TypeOf(sel.X) return typ, ident, typ != nil } if len(lit.Elts) == 0 { @@ -926,7 +893,7 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { if typ1 == typ2 { return } - if IsGoVersion(j, 8) { + if IsGoVersion(pass, 8) { if !types.IdenticalIgnoreTags(s1, s2) { return } @@ -935,13 +902,14 @@ func (c *Checker) LintSimplerStructConversion(j *lint.Job) { return } } - j.Errorf(node, "should convert %s (type %s) to %s instead of using struct literal", + ReportfFG(pass, node.Pos(), "should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) + return nil, nil } -func (c *Checker) LintTrim(j *lint.Job) { +func LintTrim(pass *analysis.Pass) (interface{}, error) { sameNonDynamic := func(node1, node2 ast.Node) bool { if reflect.TypeOf(node1) != reflect.TypeOf(node2) { return false @@ -951,9 +919,9 @@ func (c *Checker) LintTrim(j *lint.Job) { case *ast.Ident: return node1.Obj == node2.(*ast.Ident).Obj case *ast.SelectorExpr: - return Render(j, node1) == Render(j, node2) + return Render(pass, node1) == Render(pass, node2) case *ast.IndexExpr: - return Render(j, node1) == Render(j, node2) + return Render(pass, node1) == Render(pass, node2) } return false } @@ -991,22 +959,22 @@ func (c *Checker) LintTrim(j *lint.Job) { return } switch { - case IsCallToAST(j, condCall, "strings.HasPrefix"): + case IsCallToAST(pass, condCall, "strings.HasPrefix"): pkg = "strings" fun = "HasPrefix" - case IsCallToAST(j, condCall, "strings.HasSuffix"): + case IsCallToAST(pass, condCall, "strings.HasSuffix"): pkg = "strings" fun = "HasSuffix" - case IsCallToAST(j, condCall, "strings.Contains"): + case IsCallToAST(pass, condCall, "strings.Contains"): pkg = "strings" fun = "Contains" - case IsCallToAST(j, condCall, "bytes.HasPrefix"): + case IsCallToAST(pass, condCall, "bytes.HasPrefix"): pkg = "bytes" fun = "HasPrefix" - case IsCallToAST(j, condCall, "bytes.HasSuffix"): + case IsCallToAST(pass, condCall, "bytes.HasSuffix"): pkg = "bytes" fun = "HasSuffix" - case IsCallToAST(j, condCall, "bytes.Contains"): + case IsCallToAST(pass, condCall, "bytes.Contains"): pkg = "bytes" fun = "Contains" default: @@ -1032,13 +1000,13 @@ func (c *Checker) LintTrim(j *lint.Job) { if len(rhs.Args) < 2 || !sameNonDynamic(condCall.Args[0], rhs.Args[0]) || !sameNonDynamic(condCall.Args[1], rhs.Args[1]) { return } - if IsCallToAST(j, condCall, "strings.HasPrefix") && IsCallToAST(j, rhs, "strings.TrimPrefix") || - IsCallToAST(j, condCall, "strings.HasSuffix") && IsCallToAST(j, rhs, "strings.TrimSuffix") || - IsCallToAST(j, condCall, "strings.Contains") && IsCallToAST(j, rhs, "strings.Replace") || - IsCallToAST(j, condCall, "bytes.HasPrefix") && IsCallToAST(j, rhs, "bytes.TrimPrefix") || - IsCallToAST(j, condCall, "bytes.HasSuffix") && IsCallToAST(j, rhs, "bytes.TrimSuffix") || - IsCallToAST(j, condCall, "bytes.Contains") && IsCallToAST(j, rhs, "bytes.Replace") { - j.Errorf(ifstmt, "should replace this if statement with an unconditional %s", CallNameAST(j, rhs)) + if IsCallToAST(pass, condCall, "strings.HasPrefix") && IsCallToAST(pass, rhs, "strings.TrimPrefix") || + IsCallToAST(pass, condCall, "strings.HasSuffix") && IsCallToAST(pass, rhs, "strings.TrimSuffix") || + IsCallToAST(pass, condCall, "strings.Contains") && IsCallToAST(pass, rhs, "strings.Replace") || + IsCallToAST(pass, condCall, "bytes.HasPrefix") && IsCallToAST(pass, rhs, "bytes.TrimPrefix") || + IsCallToAST(pass, condCall, "bytes.HasSuffix") && IsCallToAST(pass, rhs, "bytes.TrimSuffix") || + IsCallToAST(pass, condCall, "bytes.Contains") && IsCallToAST(pass, rhs, "bytes.Replace") { + ReportfFG(pass, ifstmt.Pos(), "should replace this if statement with an unconditional %s", CallNameAST(pass, rhs)) } return case *ast.SliceExpr: @@ -1063,7 +1031,7 @@ func (c *Checker) LintTrim(j *lint.Job) { index = slice.Low case "HasSuffix": if slice.Low != nil { - n, ok := ExprToInt(j, slice.Low) + n, ok := ExprToInt(pass, slice.Low) if !ok || n != 0 { return } @@ -1092,8 +1060,8 @@ func (c *Checker) LintTrim(j *lint.Job) { if !ok { return } - s1, ok1 := ExprToString(j, lit) - s2, ok2 := ExprToString(j, condCall.Args[1]) + s1, ok1 := ExprToString(pass, lit) + s2, ok2 := ExprToString(pass, condCall.Args[1]) if !ok1 || !ok2 || s1 != s2 { return } @@ -1109,8 +1077,8 @@ func (c *Checker) LintTrim(j *lint.Job) { if pkg != "strings" { return } - string, ok1 := ExprToString(j, condCall.Args[1]) - int, ok2 := ExprToInt(j, slice.Low) + string, ok1 := ExprToString(pass, condCall.Args[1]) + int, ok2 := ExprToInt(pass, slice.Low) if !ok1 || !ok2 || int != int64(len(string)) { return } @@ -1136,13 +1104,14 @@ func (c *Checker) LintTrim(j *lint.Job) { case "HasSuffix": replacement = "TrimSuffix" } - j.Errorf(ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) + ReportfFG(pass, ifstmt.Pos(), "should replace this if statement with an unconditional %s.%s", pkg, replacement) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintLoopSlide(j *lint.Job) { +func LintLoopSlide(pass *analysis.Pass) (interface{}, error) { // TODO(dh): detect bs[i+offset] in addition to bs[offset+i] // TODO(dh): consider merging this function with LintLoopCopy // TODO(dh): detect length that is an expression, not a variable name @@ -1176,7 +1145,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } postvar, ok := post.X.(*ast.Ident) - if !ok || j.Pkg.TypesInfo.ObjectOf(postvar) != j.Pkg.TypesInfo.ObjectOf(initvar) { + if !ok || pass.TypesInfo.ObjectOf(postvar) != pass.TypesInfo.ObjectOf(initvar) { return } bin, ok := loop.Cond.(*ast.BinaryExpr) @@ -1184,7 +1153,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } binx, ok := bin.X.(*ast.Ident) - if !ok || j.Pkg.TypesInfo.ObjectOf(binx) != j.Pkg.TypesInfo.ObjectOf(initvar) { + if !ok || pass.TypesInfo.ObjectOf(binx) != pass.TypesInfo.ObjectOf(initvar) { return } biny, ok := bin.Y.(*ast.Ident) @@ -1213,8 +1182,8 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { if !ok { return } - obj1 := j.Pkg.TypesInfo.ObjectOf(bs1) - obj2 := j.Pkg.TypesInfo.ObjectOf(bs2) + obj1 := pass.TypesInfo.ObjectOf(bs1) + obj2 := pass.TypesInfo.ObjectOf(bs2) if obj1 != obj2 { return } @@ -1223,7 +1192,7 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { } index1, ok := lhs.Index.(*ast.Ident) - if !ok || j.Pkg.TypesInfo.ObjectOf(index1) != j.Pkg.TypesInfo.ObjectOf(initvar) { + if !ok || pass.TypesInfo.ObjectOf(index1) != pass.TypesInfo.ObjectOf(initvar) { return } index2, ok := rhs.Index.(*ast.BinaryExpr) @@ -1235,16 +1204,17 @@ func (c *Checker) LintLoopSlide(j *lint.Job) { return } add2, ok := index2.Y.(*ast.Ident) - if !ok || j.Pkg.TypesInfo.ObjectOf(add2) != j.Pkg.TypesInfo.ObjectOf(initvar) { + if !ok || pass.TypesInfo.ObjectOf(add2) != pass.TypesInfo.ObjectOf(initvar) { return } - j.Errorf(loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(j, bs1), Render(j, biny), Render(j, bs1), Render(j, add1)) + ReportfFG(pass, loop.Pos(), "should use copy(%s[:%s], %s[%s:]) instead", Render(pass, bs1), Render(pass, biny), Render(pass, bs1), Render(pass, add1)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintMakeLenCap(j *lint.Job) { +func LintMakeLenCap(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) if fn, ok := call.Fun.(*ast.Ident); !ok || fn.Name != "make" { @@ -1254,25 +1224,26 @@ func (c *Checker) LintMakeLenCap(j *lint.Job) { switch len(call.Args) { case 2: // make(T, len) - if _, ok := j.Pkg.TypesInfo.TypeOf(call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { + if _, ok := pass.TypesInfo.TypeOf(call.Args[Arg("make.t")]).Underlying().(*types.Slice); ok { break } if IsZero(call.Args[Arg("make.size[0]")]) { - j.Errorf(call.Args[Arg("make.size[0]")], "should use make(%s) instead", Render(j, call.Args[Arg("make.t")])) + ReportfFG(pass, call.Args[Arg("make.size[0]")].Pos(), "should use make(%s) instead", Render(pass, call.Args[Arg("make.t")])) } case 3: // make(T, len, cap) - if Render(j, call.Args[Arg("make.size[0]")]) == Render(j, call.Args[Arg("make.size[1]")]) { - j.Errorf(call.Args[Arg("make.size[0]")], + if Render(pass, call.Args[Arg("make.size[0]")]) == Render(pass, call.Args[Arg("make.size[1]")]) { + ReportfFG(pass, call.Args[Arg("make.size[0]")].Pos(), "should use make(%s, %s) instead", - Render(j, call.Args[Arg("make.t")]), Render(j, call.Args[Arg("make.size[0]")])) + Render(pass, call.Args[Arg("make.t")]), Render(pass, call.Args[Arg("make.size[0]")])) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintAssertNotNil(j *lint.Job) { +func LintAssertNotNil(pass *analysis.Pass) (interface{}, error) { isNilCheck := func(ident *ast.Ident, expr ast.Expr) bool { xbinop, ok := expr.(*ast.BinaryExpr) if !ok || xbinop.Op != token.NEQ { @@ -1282,7 +1253,7 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { if !ok || xident.Obj != ident.Obj { return false } - if !IsNil(j, xbinop.Y) { + if !IsNil(pass, xbinop.Y) { return false } return true @@ -1320,7 +1291,7 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) { return } - j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) + ReportfFG(pass, ifstmt.Pos(), "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent)) } fn2 := func(node ast.Node) { // Check that outer ifstmt is an 'if x != nil {}' @@ -1345,7 +1316,7 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { if !ok { return } - if !IsNil(j, binop.Y) { + if !IsNil(pass, binop.Y) { return } @@ -1376,13 +1347,14 @@ func (c *Checker) LintAssertNotNil(j *lint.Job) { if !isOKCheck(assignIdent, ifstmt.Cond) { return } - j.Errorf(ifstmt, "when %s is true, %s can't be nil", Render(j, assignIdent), Render(j, assertIdent)) + ReportfFG(pass, ifstmt.Pos(), "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) + return nil, nil } -func (c *Checker) LintDeclareAssign(j *lint.Job) { +func LintDeclareAssign(pass *analysis.Pass) (interface{}, error) { hasMultipleAssignments := func(root ast.Node, ident *ast.Ident) bool { num := 0 ast.Inspect(root, func(node ast.Node) bool { @@ -1440,20 +1412,21 @@ func (c *Checker) LintDeclareAssign(j *lint.Job) { continue } - if refersTo(j, assign.Rhs[0], ident) { + if refersTo(pass, assign.Rhs[0], ident) { continue } if hasMultipleAssignments(block, ident) { continue } - j.Errorf(decl, "should merge variable declaration with assignment on next line") + ReportfFG(pass, decl.Pos(), "should merge variable declaration with assignment on next line") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintRedundantBreak(j *lint.Job) { +func LintRedundantBreak(pass *analysis.Pass) (interface{}, error) { fn1 := func(node ast.Node) { clause := node.(*ast.CaseClause) if len(clause.Body) < 2 { @@ -1463,7 +1436,7 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { if !ok || branch.Tok != token.BREAK || branch.Label != nil { return } - j.Errorf(branch, "redundant break statement") + ReportfFG(pass, branch.Pos(), "redundant break statement") } fn2 := func(node ast.Node) { var ret *ast.FieldList @@ -1490,10 +1463,11 @@ func (c *Checker) LintRedundantBreak(j *lint.Job) { } // we don't need to check rst.Results as we already // checked x.Type.Results to be nil. - j.Errorf(rst, "redundant return statement") + ReportfFG(pass, rst.Pos(), "redundant return statement") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) + return nil, nil } func isStringer(T types.Type) bool { @@ -1520,56 +1494,58 @@ func isStringer(T types.Type) bool { return true } -func (c *Checker) LintRedundantSprintf(j *lint.Job) { +func LintRedundantSprintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "fmt.Sprintf") { + if !IsCallToAST(pass, call, "fmt.Sprintf") { return } if len(call.Args) != 2 { return } - if s, ok := ExprToString(j, call.Args[Arg("fmt.Sprintf.format")]); !ok || s != "%s" { + if s, ok := ExprToString(pass, call.Args[Arg("fmt.Sprintf.format")]); !ok || s != "%s" { return } arg := call.Args[Arg("fmt.Sprintf.a[0]")] - typ := j.Pkg.TypesInfo.TypeOf(arg) + typ := pass.TypesInfo.TypeOf(arg) if isStringer(typ) { - j.Errorf(call, "should use String() instead of fmt.Sprintf") + pass.Reportf(call.Pos(), "should use String() instead of fmt.Sprintf") return } if typ.Underlying() == types.Universe.Lookup("string").Type() { if typ == types.Universe.Lookup("string").Type() { - j.Errorf(call, "the argument is already a string, there's no need to use fmt.Sprintf") + ReportfFG(pass, call.Pos(), "the argument is already a string, there's no need to use fmt.Sprintf") } else { - j.Errorf(call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") + ReportfFG(pass, call.Pos(), "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintErrorsNewSprintf(j *lint.Job) { +func LintErrorsNewSprintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if !IsCallToAST(j, node, "errors.New") { + if !IsCallToAST(pass, node, "errors.New") { return } call := node.(*ast.CallExpr) - if !IsCallToAST(j, call.Args[Arg("errors.New.text")], "fmt.Sprintf") { + if !IsCallToAST(pass, call.Args[Arg("errors.New.text")], "fmt.Sprintf") { return } - j.Errorf(node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") + ReportfFG(pass, node.Pos(), "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) LintRangeStringRunes(j *lint.Job) { - sharedcheck.CheckRangeStringRunes(j) +func LintRangeStringRunes(pass *analysis.Pass) (interface{}, error) { + return sharedcheck.CheckRangeStringRunes(pass) } -func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { +func LintNilCheckAroundRange(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { ifstmt := node.(*ast.IfStmt) cond, ok := ifstmt.Cond.(*ast.BinaryExpr) @@ -1577,7 +1553,7 @@ func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { return } - if cond.Op != token.NEQ || !IsNil(j, cond.Y) || len(ifstmt.Body.List) != 1 { + if cond.Op != token.NEQ || !IsNil(pass, cond.Y) || len(ifstmt.Body.List) != 1 { return } @@ -1596,15 +1572,16 @@ func (c *Checker) LintNilCheckAroundRange(j *lint.Job) { if ifXIdent.Obj != rangeXIdent.Obj { return } - switch j.Pkg.TypesInfo.TypeOf(rangeXIdent).(type) { + switch pass.TypesInfo.TypeOf(rangeXIdent).(type) { case *types.Slice, *types.Map: - j.Errorf(node, "unnecessary nil check around range") + ReportfFG(pass, node.Pos(), "unnecessary nil check around range") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + return nil, nil } -func isPermissibleSort(j *lint.Job, node ast.Node) bool { +func isPermissibleSort(pass *analysis.Pass, node ast.Node) bool { call := node.(*ast.CallExpr) typeconv, ok := call.Args[0].(*ast.CallExpr) if !ok { @@ -1615,7 +1592,7 @@ func isPermissibleSort(j *lint.Job, node ast.Node) bool { if !ok { return true } - name := SelectorName(j, sel) + name := SelectorName(pass, sel) switch name { case "sort.IntSlice", "sort.Float64Slice", "sort.StringSlice": default: @@ -1625,7 +1602,12 @@ func isPermissibleSort(j *lint.Job, node ast.Node) bool { return false } -func (c *Checker) LintSortHelpers(j *lint.Job) { +func LintSortHelpers(pass *analysis.Pass) (interface{}, error) { + type Error struct { + node lint.Positioner + msg string + } + var allErrors []Error fn := func(node ast.Node) { var body *ast.BlockStmt switch node := node.(type) { @@ -1640,27 +1622,23 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { return } - type Error struct { - node lint.Positioner - msg string - } var errors []Error permissible := false fnSorts := func(node ast.Node) bool { if permissible { return false } - if !IsCallToAST(j, node, "sort.Sort") { + if !IsCallToAST(pass, node, "sort.Sort") { return true } - if isPermissibleSort(j, node) { + if isPermissibleSort(pass, node) { permissible = true return false } call := node.(*ast.CallExpr) typeconv := call.Args[Arg("sort.Sort.data")].(*ast.CallExpr) sel := typeconv.Fun.(*ast.SelectorExpr) - name := SelectorName(j, sel) + name := SelectorName(pass, sel) switch name { case "sort.IntSlice": @@ -1677,15 +1655,24 @@ func (c *Checker) LintSortHelpers(j *lint.Job) { if permissible { return } - for _, err := range errors { - j.Errorf(err.node, "%s", err.msg) - } - return + allErrors = append(allErrors, errors...) + } + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) + sort.Slice(allErrors, func(i, j int) bool { + return allErrors[i].node.Pos() < allErrors[j].node.Pos() + }) + var prev token.Pos + for _, err := range allErrors { + if err.node.Pos() == prev { + continue + } + prev = err.node.Pos() + ReportfFG(pass, err.node.Pos(), "%s", err.msg) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncLit)(nil), (*ast.FuncDecl)(nil)}, fn) + return nil, nil } -func (c *Checker) LintGuardedDelete(j *lint.Job) { +func LintGuardedDelete(pass *analysis.Pass) (interface{}, error) { isCommaOkMapIndex := func(stmt ast.Stmt) (b *ast.Ident, m ast.Expr, key ast.Expr, ok bool) { // Has to be of the form `_, = [] @@ -1707,7 +1694,7 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { if !ok { return nil, nil, nil, false } - if _, ok := j.Pkg.TypesInfo.TypeOf(index.X).(*types.Map); !ok { + if _, ok := pass.TypesInfo.TypeOf(index.X).(*types.Map); !ok { return nil, nil, nil, false } key = index.Index @@ -1729,25 +1716,26 @@ func (c *Checker) LintGuardedDelete(j *lint.Job) { if !ok { return } - if !IsCallToAST(j, call, "delete") { + if !IsCallToAST(pass, call, "delete") { return } b, m, key, ok := isCommaOkMapIndex(stmt.Init) if !ok { return } - if cond, ok := stmt.Cond.(*ast.Ident); !ok || j.Pkg.TypesInfo.ObjectOf(cond) != j.Pkg.TypesInfo.ObjectOf(b) { + if cond, ok := stmt.Cond.(*ast.Ident); !ok || pass.TypesInfo.ObjectOf(cond) != pass.TypesInfo.ObjectOf(b) { return } - if Render(j, call.Args[0]) != Render(j, m) || Render(j, call.Args[1]) != Render(j, key) { + if Render(pass, call.Args[0]) != Render(pass, m) || Render(pass, call.Args[1]) != Render(pass, key) { return } - j.Errorf(stmt, "unnecessary guard around call to delete") + ReportfFG(pass, stmt.Pos(), "unnecessary guard around call to delete") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { +func LintSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { stmt := node.(*ast.TypeSwitchStmt) if stmt.Init != nil { @@ -1764,7 +1752,7 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { if !ok { return } - x := j.Pkg.TypesInfo.ObjectOf(ident) + x := pass.TypesInfo.ObjectOf(ident) var allOffenders []ast.Node for _, clause := range stmt.Body.List { clause := clause.(*ast.CaseClause) @@ -1783,12 +1771,12 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { hasUnrelatedAssertion = true return false } - if j.Pkg.TypesInfo.ObjectOf(ident) != x { + if pass.TypesInfo.ObjectOf(ident) != x { hasUnrelatedAssertion = true return false } - if !types.Identical(j.Pkg.TypesInfo.TypeOf(clause.List[0]), j.Pkg.TypesInfo.TypeOf(assert2.Type)) { + if !types.Identical(pass.TypesInfo.TypeOf(clause.List[0]), pass.TypesInfo.TypeOf(assert2.Type)) { hasUnrelatedAssertion = true return false } @@ -1807,11 +1795,12 @@ func (c *Checker) LintSimplifyTypeSwitch(j *lint.Job) { if len(allOffenders) != 0 { at := "" for _, offender := range allOffenders { - pos := lint.DisplayPosition(j.Pkg.Fset, offender.Pos()) + pos := lint.DisplayPosition(pass.Fset, offender.Pos()) at += "\n\t" + pos.String() } - j.Errorf(expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(j, ident), Render(j, ident), at) + ReportfFG(pass, expr.Pos(), "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(pass, ident), Render(pass, ident), at) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + return nil, nil } diff --git a/simple/lint_test.go b/simple/lint_test.go index 4a746768f..f8fa32e5b 100644 --- a/simple/lint_test.go +++ b/simple/lint_test.go @@ -3,9 +3,71 @@ package simple import ( "testing" - "honnef.co/go/tools/lint/testutil" + "golang.org/x/tools/go/analysis/analysistest" ) func TestAll(t *testing.T) { - testutil.TestAll(t, NewChecker(), "") + checks := map[string][]struct { + dir string + version string + }{ + "S1000": {{dir: "single-case-select"}}, + "S1001": {{dir: "copy"}}, + "S1002": {{dir: "bool-cmp"}}, + "S1003": {{dir: "contains"}}, + "S1004": {{dir: "compare"}}, + "S1005": { + {dir: "LintBlankOK"}, + {dir: "receive-blank"}, + {dir: "range_go13", version: "1.3"}, + {dir: "range_go14", version: "1.4"}, + }, + "S1006": { + {dir: "for-true"}, + {dir: "generated"}, + }, + "S1007": {{dir: "regexp-raw"}}, + "S1008": {{dir: "if-return"}}, + "S1009": {{dir: "nil-len"}}, + "S1010": {{dir: "slicing"}}, + "S1011": {{dir: "loop-append"}}, + "S1012": {{dir: "time-since"}}, + "S1016": { + {dir: "convert"}, + {dir: "convert_go17", version: "1.7"}, + {dir: "convert_go18", version: "1.8"}, + }, + "S1017": {{dir: "trim"}}, + "S1018": {{dir: "LintLoopSlide"}}, + "S1019": {{dir: "LintMakeLenCap"}}, + "S1020": {{dir: "LintAssertNotNil"}}, + "S1021": {{dir: "LintDeclareAssign"}}, + "S1023": { + {dir: "LintRedundantBreak"}, + {dir: "LintRedundantReturn"}, + }, + "S1024": { + {dir: "LimeTimeUntil_go17", version: "1.7"}, + {dir: "LimeTimeUntil_go18", version: "1.8"}, + }, + "S1025": {{dir: "LintRedundantSprintf"}}, + "S1028": {{dir: "LintErrorsNewSprintf"}}, + "S1029": {{dir: "LintRangeStringRunes"}}, + "S1030": {{dir: "LintBytesBufferConversions"}}, + "S1031": {{dir: "LintNilCheckAroundRange"}}, + "S1032": {{dir: "LintSortHelpers"}}, + "S1033": {{dir: "LintGuardedDelete"}}, + "S1034": {{dir: "LintSimplifyTypeSwitch"}}, + } + for check, dirs := range checks { + a := Analyzers[check] + for _, dir := range dirs { + if dir.version != "" { + if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { + t.Fatal(err) + } + } + analysistest.Run(t, analysistest.TestData(), a, dir.dir) + } + } } diff --git a/simple/testdata/src/LimeTimeUntil_go18/LimeTimeUntil_go18.go b/simple/testdata/src/LimeTimeUntil_go18/LimeTimeUntil_go18.go index 7f32d7b27..359b9db68 100644 --- a/simple/testdata/src/LimeTimeUntil_go18/LimeTimeUntil_go18.go +++ b/simple/testdata/src/LimeTimeUntil_go18/LimeTimeUntil_go18.go @@ -3,7 +3,7 @@ package pkg import "time" func fn(t time.Time) { - t.Sub(time.Now()) // MATCH "time.Until" + t.Sub(time.Now()) // want `time\.Until` t.Sub(t) t2 := time.Now() t.Sub(t2) diff --git a/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go b/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go index f4b6b50c0..f15d0842e 100644 --- a/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go +++ b/simple/testdata/src/LintAssertNotNil/LintAssertNotNil.go @@ -1,9 +1,9 @@ package pkg func fn(i interface{}, x interface{}) { - if _, ok := i.(string); ok && i != nil { // MATCH "when ok is true, i can't be nil" + if _, ok := i.(string); ok && i != nil { // want `when ok is true, i can't be nil` } - if _, ok := i.(string); i != nil && ok { // MATCH "when ok is true, i can't be nil" + if _, ok := i.(string); i != nil && ok { // want `when ok is true, i can't be nil` } if _, ok := i.(string); i != nil || ok { } @@ -12,7 +12,7 @@ func fn(i interface{}, x interface{}) { if _, ok := i.(string); i == nil && ok { } if i != nil { - if _, ok := i.(string); ok { // MATCH "when ok is true, i can't be nil" + if _, ok := i.(string); ok { // want `when ok is true, i can't be nil` } } if i != nil { diff --git a/simple/testdata/src/LintBlankOK/LintBlankOK.go b/simple/testdata/src/LintBlankOK/LintBlankOK.go index bb3c76760..351328674 100644 --- a/simple/testdata/src/LintBlankOK/LintBlankOK.go +++ b/simple/testdata/src/LintBlankOK/LintBlankOK.go @@ -5,8 +5,8 @@ func fn() { var ch chan int var fn func() (int, bool) - x, _ := m[0] // MATCH "should write x := m[0] instead of x, _ := m[0]" - x, _ = <-ch // MATCH "should write x = <-ch instead of x, _ = <-ch" + x, _ := m[0] // want `should write x := m\[0\] instead of x, _ := m\[0\]` + x, _ = <-ch // want `should write x = <-ch instead of x, _ = <-ch` x, _ = fn() _ = x } diff --git a/simple/testdata/src/LintBytesBufferConversions/LintBytesBufferConversions.go b/simple/testdata/src/LintBytesBufferConversions/LintBytesBufferConversions.go index a21e403f9..de5d1b7da 100644 --- a/simple/testdata/src/LintBytesBufferConversions/LintBytesBufferConversions.go +++ b/simple/testdata/src/LintBytesBufferConversions/LintBytesBufferConversions.go @@ -6,12 +6,12 @@ import ( func fn() { buf := bytes.NewBufferString("str") - _ = string(buf.Bytes()) // MATCH "should use buf.String() instead of string(buf.Bytes())" - _ = []byte(buf.String()) // MATCH "should use buf.Bytes() instead of []byte(buf.String())" + _ = string(buf.Bytes()) // want `should use buf\.String\(\) instead of string\(buf\.Bytes\(\)\)` + _ = []byte(buf.String()) // want `should use buf\.Bytes\(\) instead of \[\]byte\(buf\.String\(\)\)` m := map[string]*bytes.Buffer{"key": buf} - _ = string(m["key"].Bytes()) // MATCH "should use m["key"].String() instead of string(m["key"].Bytes())" - _ = []byte(m["key"].String()) // MATCH "should use m["key"].Bytes() instead of []byte(m["key"].String())" + _ = string(m["key"].Bytes()) // want `should use m\["key"\]\.String\(\) instead of string\(m\["key"\]\.Bytes\(\)\)` + _ = []byte(m["key"].String()) // want `should use m\["key"\]\.Bytes\(\) instead of \[\]byte\(m\["key"\]\.String\(\)\)` string := func(_ interface{}) interface{} { return nil diff --git a/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go b/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go index 424a3668f..66cdd6c9e 100644 --- a/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go +++ b/simple/testdata/src/LintDeclareAssign/LintDeclareAssign.go @@ -1,16 +1,16 @@ package pkg func fn() { - var x int // MATCH "should merge variable declaration with assignment on next line" + var x int // want `should merge variable declaration with assignment on next line` x = 1 _ = x - var y interface{} // MATCH "should merge variable declaration with assignment on next line" + var y interface{} // want `should merge variable declaration with assignment on next line` y = 1 _ = y if true { - var x string // MATCH "should merge variable declaration with assignment on next line" + var x string // want `should merge variable declaration with assignment on next line` x = "" _ = x } diff --git a/simple/testdata/src/LintErrorsNewSprintf/LintErrorsNewSprintf.go b/simple/testdata/src/LintErrorsNewSprintf/LintErrorsNewSprintf.go index d49cd418e..c05d4f519 100644 --- a/simple/testdata/src/LintErrorsNewSprintf/LintErrorsNewSprintf.go +++ b/simple/testdata/src/LintErrorsNewSprintf/LintErrorsNewSprintf.go @@ -8,5 +8,5 @@ import ( func fn() { _ = fmt.Errorf("%d", 0) _ = errors.New("") - _ = errors.New(fmt.Sprintf("%d", 0)) // MATCH "should use fmt.Errorf" + _ = errors.New(fmt.Sprintf("%d", 0)) // want `should use fmt\.Errorf` } diff --git a/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go b/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go index 2a4f34332..020a80e04 100644 --- a/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go +++ b/simple/testdata/src/LintGuardedDelete/LintGuardedDelete.go @@ -2,7 +2,7 @@ package pkg func fn(m map[int]int) { - if _, ok := m[0]; ok { // MATCH "unnecessary guard" + if _, ok := m[0]; ok { // want `unnecessary guard` delete(m, 0) } if _, ok := m[0]; !ok { @@ -17,7 +17,7 @@ func fn(m map[int]int) { } var key int - if _, ok := m[key]; ok { // MATCH "unnecessary guard" + if _, ok := m[key]; ok { // want `unnecessary guard` delete(m, key) } if _, ok := m[key]; ok { diff --git a/simple/testdata/src/LintLoopSlide/LintLoopSlide.go b/simple/testdata/src/LintLoopSlide/LintLoopSlide.go index f3d95a48a..edb9c567a 100644 --- a/simple/testdata/src/LintLoopSlide/LintLoopSlide.go +++ b/simple/testdata/src/LintLoopSlide/LintLoopSlide.go @@ -5,7 +5,7 @@ func fn() { var bs []int var offset int - for i := 0; i < n; i++ { // MATCH "should use copy(bs[:n], bs[offset:]) instead" + for i := 0; i < n; i++ { // want `should use copy\(bs\[:n\], bs\[offset:\]\) instead` bs[i] = bs[offset+i] } diff --git a/simple/testdata/src/LintMakeLenCap/LintMakeLenCap.go b/simple/testdata/src/LintMakeLenCap/LintMakeLenCap.go index 918d8bb04..3c108a08c 100644 --- a/simple/testdata/src/LintMakeLenCap/LintMakeLenCap.go +++ b/simple/testdata/src/LintMakeLenCap/LintMakeLenCap.go @@ -9,11 +9,11 @@ func fn() { _ = make([]int, 0) // length is mandatory for slices, don't suggest removal _ = make(s, 0) // length is mandatory for slices, don't suggest removal _ = make(chan int, c) // constant of 0 may be due to debugging, math or platform-specific code - _ = make(chan int, 0) // MATCH "should use make(chan int) instead" - _ = make(ch, 0) // MATCH "should use make(ch) instead" - _ = make(map[int]int, 0) // MATCH "should use make(map[int]int) instead" - _ = make([]int, 1, 1) // MATCH "should use make([]int, 1) instead" - _ = make([]int, x, x) // MATCH "should use make([]int, x) instead" + _ = make(chan int, 0) // want `should use make\(chan int\) instead` + _ = make(ch, 0) // want `should use make\(ch\) instead` + _ = make(map[int]int, 0) // want `should use make\(map\[int\]int\) instead` + _ = make([]int, 1, 1) // want `should use make\(\[\]int, 1\) instead` + _ = make([]int, x, x) // want `should use make\(\[\]int, x\) instead` _ = make([]int, 1, 2) _ = make([]int, x, y) } diff --git a/simple/testdata/src/LintNilCheckAroundRange/LintNilCheckAroundRange.go b/simple/testdata/src/LintNilCheckAroundRange/LintNilCheckAroundRange.go index d18915261..045d5d0ae 100644 --- a/simple/testdata/src/LintNilCheckAroundRange/LintNilCheckAroundRange.go +++ b/simple/testdata/src/LintNilCheckAroundRange/LintNilCheckAroundRange.go @@ -18,14 +18,14 @@ func main() { } } - if str != nil { // MATCH /unnecessary nil check around range/ + if str != nil { // want `unnecessary nil check around range` for _, s := range str { s = s + "A" } } var nilMap map[string]int - if nilMap != nil { // MATCH /unnecessary nil check around range/ + if nilMap != nil { // want `unnecessary nil check around range` for key, value := range nilMap { nilMap[key] = value + 1 } diff --git a/simple/testdata/src/LintRangeStringRunes/LintRangeStringRunes.go b/simple/testdata/src/LintRangeStringRunes/LintRangeStringRunes.go index 59d349ac0..b6761a187 100644 --- a/simple/testdata/src/LintRangeStringRunes/LintRangeStringRunes.go +++ b/simple/testdata/src/LintRangeStringRunes/LintRangeStringRunes.go @@ -5,7 +5,7 @@ func fn(s string) { println(r) } - for _, r := range []rune(s) { // MATCH "should range over string" + for _, r := range []rune(s) { // want `should range over string` println(r) } @@ -15,7 +15,7 @@ func fn(s string) { } x := []rune(s) - for _, r := range x { // MATCH "should range over string" + for _, r := range x { // want `should range over string` println(r) } diff --git a/simple/testdata/src/LintRedundantBreak/LintRedundantBreak.go b/simple/testdata/src/LintRedundantBreak/LintRedundantBreak.go index 1fa78295b..c7d42f79d 100644 --- a/simple/testdata/src/LintRedundantBreak/LintRedundantBreak.go +++ b/simple/testdata/src/LintRedundantBreak/LintRedundantBreak.go @@ -4,7 +4,7 @@ func fn(x int) { switch x { case 1: println() - break // MATCH /redundant break/ + break // want `redundant break` case 2: println() case 3: diff --git a/simple/testdata/src/LintRedundantReturn/LintRedundantReturn.go b/simple/testdata/src/LintRedundantReturn/LintRedundantReturn.go index 90325af17..410eea20a 100644 --- a/simple/testdata/src/LintRedundantReturn/LintRedundantReturn.go +++ b/simple/testdata/src/LintRedundantReturn/LintRedundantReturn.go @@ -1,11 +1,11 @@ package pkg func fn1() { - return // MATCH /redundant return/ + return // want `redundant return` } func fn2(a int) { - return // MATCH /redundant return/ + return // want `redundant return` } func fn3() int { @@ -30,11 +30,11 @@ func fn6() { func fn7() { return println("foo") - return // MATCH /redundant return/ + return // want `redundant return` } func fn8() { _ = func() { - return // MATCH /redundant return/ + return // want `redundant return` } } diff --git a/simple/testdata/src/LintRedundantSprintf/LintRedundantSprintf.go b/simple/testdata/src/LintRedundantSprintf/LintRedundantSprintf.go index 7cd9dffd5..d2c43d820 100644 --- a/simple/testdata/src/LintRedundantSprintf/LintRedundantSprintf.go +++ b/simple/testdata/src/LintRedundantSprintf/LintRedundantSprintf.go @@ -21,14 +21,14 @@ func fn() { var t4 T4 var t5 T5 var t6 T6 - _ = fmt.Sprintf("%s", "test") // MATCH "is already a string" - _ = fmt.Sprintf("%s", t1) // MATCH "is a string" - _ = fmt.Sprintf("%s", t2) // MATCH "is a string" - _ = fmt.Sprintf("%s", t3) // MATCH "should use String() instead of fmt.Sprintf" - _ = fmt.Sprintf("%s", t3.String()) // MATCH "is already a string" + _ = fmt.Sprintf("%s", "test") // want `is already a string` + _ = fmt.Sprintf("%s", t1) // want `is a string` + _ = fmt.Sprintf("%s", t2) // want `is a string` + _ = fmt.Sprintf("%s", t3) // want `should use String\(\) instead of fmt\.Sprintf` + _ = fmt.Sprintf("%s", t3.String()) // want `is already a string` _ = fmt.Sprintf("%s", t4) _ = fmt.Sprintf("%s", t5) _ = fmt.Sprintf("%s %s", t1, t2) _ = fmt.Sprintf("%v", t1) - _ = fmt.Sprintf("%s", t6) // MATCH "should use String() instead of fmt.Sprintf" + _ = fmt.Sprintf("%s", t6) // want `should use String\(\) instead of fmt\.Sprintf` } diff --git a/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go b/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go index 275a388bb..892f8447e 100644 --- a/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go +++ b/simple/testdata/src/LintSimplifyTypeSwitch/LintSimplifyTypeSwitch.go @@ -14,7 +14,7 @@ func fn(x, y interface{}) { case int: fmt.Println(x.(int), y.(int)) } - switch x.(type) { // MATCH "assigning the result of this type assertion" + switch x.(type) { // want `assigning the result of this type assertion` case int: fmt.Println(x.(int)) } diff --git a/simple/testdata/src/LintSortHelpers/LintSortHelpers.go b/simple/testdata/src/LintSortHelpers/LintSortHelpers.go index 442df89a7..fb7722932 100644 --- a/simple/testdata/src/LintSortHelpers/LintSortHelpers.go +++ b/simple/testdata/src/LintSortHelpers/LintSortHelpers.go @@ -10,17 +10,17 @@ func (s MyIntSlice) Swap(i, j int) {} func fn1() { var a []int - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` } func fn2() { var b []float64 - sort.Sort(sort.Float64Slice(b)) // MATCH "sort.Float64s" + sort.Sort(sort.Float64Slice(b)) // want `sort\.Float64s` } func fn3() { var c []string - sort.Sort(sort.StringSlice(c)) // MATCH "sort.Strings" + sort.Sort(sort.StringSlice(c)) // want `sort\.Strings` } func fn4() { @@ -49,18 +49,18 @@ func fn7() { func fn8() { var a []int - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` } func fn9() { func() { var a []int - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` }() } func fn10() { var a MyIntSlice - sort.Sort(sort.IntSlice(a)) // MATCH "sort.Ints" + sort.Sort(sort.IntSlice(a)) // want `sort\.Ints` } diff --git a/simple/testdata/src/bool-cmp/bool-cmp.go b/simple/testdata/src/bool-cmp/bool-cmp.go index 83224a956..0c73bc45f 100644 --- a/simple/testdata/src/bool-cmp/bool-cmp.go +++ b/simple/testdata/src/bool-cmp/bool-cmp.go @@ -9,42 +9,42 @@ func fn() { const t T = false if x == t { } - if fn1() == true { // MATCH "simplified to fn1()" + if fn1() == true { // want `simplified to fn1\(\)` } - if fn1() != true { // MATCH "simplified to !fn1()" + if fn1() != true { // want `simplified to !fn1\(\)` } - if fn1() == false { // MATCH "simplified to !fn1()" + if fn1() == false { // want `simplified to !fn1\(\)` } - if fn1() != false { // MATCH "simplified to fn1()" + if fn1() != false { // want `simplified to fn1\(\)` } - if fn1() && (fn1() || fn1()) || (fn1() && fn1()) == true { // MATCH "simplified to (fn1() && fn1())" + if fn1() && (fn1() || fn1()) || (fn1() && fn1()) == true { // want `simplified to \(fn1\(\) && fn1\(\)\)` } - if (fn1() && fn2()) == false { // MATCH "simplified to !(fn1() && fn2())" + if (fn1() && fn2()) == false { // want `simplified to !\(fn1\(\) && fn2\(\)\)` } var y bool - for y != true { // MATCH /simplified to !y/ + for y != true { // want `simplified to !y` } - if !y == true { // MATCH /simplified to !y/ + if !y == true { // want `simplified to !y` } - if !y == false { // MATCH /simplified to y/ + if !y == false { // want `simplified to y` } - if !y != true { // MATCH /simplified to y/ + if !y != true { // want `simplified to y` } - if !y != false { // MATCH /simplified to !y/ + if !y != false { // want `simplified to !y` } - if !!y == false { // MATCH /simplified to !y/ + if !!y == false { // want `simplified to !y` } - if !!!y == false { // MATCH /simplified to y/ + if !!!y == false { // want `simplified to y` } - if !!y == true { // MATCH /simplified to y/ + if !!y == true { // want `simplified to y` } - if !!!y == true { // MATCH /simplified to !y/ + if !!!y == true { // want `simplified to !y` } - if !!y != true { // MATCH /simplified to !y/ + if !!y != true { // want `simplified to !y` } - if !!!y != true { // MATCH /simplified to y/ + if !!!y != true { // want `simplified to y` } if !y == !false { // not matched because we expect true/false on one side, not !false } diff --git a/simple/testdata/src/compare/compare.go b/simple/testdata/src/compare/compare.go index 18d4b17c9..435191635 100644 --- a/simple/testdata/src/compare/compare.go +++ b/simple/testdata/src/compare/compare.go @@ -3,8 +3,8 @@ package pkg import "bytes" func fn() { - _ = bytes.Compare(nil, nil) == 0 // MATCH / bytes.Equal/ - _ = bytes.Compare(nil, nil) != 0 // MATCH /!bytes.Equal/ + _ = bytes.Compare(nil, nil) == 0 // want ` bytes.Equal` + _ = bytes.Compare(nil, nil) != 0 // want `!bytes.Equal` _ = bytes.Compare(nil, nil) > 0 _ = bytes.Compare(nil, nil) < 0 } diff --git a/simple/testdata/src/contains/contains.go b/simple/testdata/src/contains/contains.go index eaf34d312..a509d91e0 100644 --- a/simple/testdata/src/contains/contains.go +++ b/simple/testdata/src/contains/contains.go @@ -6,34 +6,34 @@ import ( ) func fn() { - _ = strings.IndexRune("", 'x') > -1 // MATCH / strings.ContainsRune/ - _ = strings.IndexRune("", 'x') >= 0 // MATCH / strings.ContainsRune/ + _ = strings.IndexRune("", 'x') > -1 // want ` strings\.ContainsRune` + _ = strings.IndexRune("", 'x') >= 0 // want ` strings\.ContainsRune` _ = strings.IndexRune("", 'x') > 0 _ = strings.IndexRune("", 'x') >= -1 - _ = strings.IndexRune("", 'x') != -1 // MATCH / strings.ContainsRune/ - _ = strings.IndexRune("", 'x') == -1 // MATCH /!strings.ContainsRune/ + _ = strings.IndexRune("", 'x') != -1 // want ` strings\.ContainsRune` + _ = strings.IndexRune("", 'x') == -1 // want `!strings\.ContainsRune` _ = strings.IndexRune("", 'x') != 0 - _ = strings.IndexRune("", 'x') < 0 // MATCH /!strings.ContainsRune/ + _ = strings.IndexRune("", 'x') < 0 // want `!strings\.ContainsRune` - _ = strings.IndexAny("", "") > -1 // MATCH / strings.ContainsAny/ - _ = strings.IndexAny("", "") >= 0 // MATCH / strings.ContainsAny/ + _ = strings.IndexAny("", "") > -1 // want ` strings\.ContainsAny` + _ = strings.IndexAny("", "") >= 0 // want ` strings\.ContainsAny` _ = strings.IndexAny("", "") > 0 _ = strings.IndexAny("", "") >= -1 - _ = strings.IndexAny("", "") != -1 // MATCH / strings.ContainsAny/ - _ = strings.IndexAny("", "") == -1 // MATCH /!strings.ContainsAny/ + _ = strings.IndexAny("", "") != -1 // want ` strings\.ContainsAny` + _ = strings.IndexAny("", "") == -1 // want `!strings\.ContainsAny` _ = strings.IndexAny("", "") != 0 - _ = strings.IndexAny("", "") < 0 // MATCH /!strings.ContainsAny/ + _ = strings.IndexAny("", "") < 0 // want `!strings\.ContainsAny` - _ = strings.Index("", "") > -1 // MATCH / strings.Contains/ - _ = strings.Index("", "") >= 0 // MATCH / strings.Contains/ + _ = strings.Index("", "") > -1 // want ` strings\.Contains` + _ = strings.Index("", "") >= 0 // want ` strings\.Contains` _ = strings.Index("", "") > 0 _ = strings.Index("", "") >= -1 - _ = strings.Index("", "") != -1 // MATCH / strings.Contains/ - _ = strings.Index("", "") == -1 // MATCH /!strings.Contains/ + _ = strings.Index("", "") != -1 // want ` strings\.Contains` + _ = strings.Index("", "") == -1 // want `!strings\.Contains` _ = strings.Index("", "") != 0 - _ = strings.Index("", "") < 0 // MATCH /!strings.Contains/ + _ = strings.Index("", "") < 0 // want `!strings\.Contains` - _ = bytes.IndexRune(nil, 'x') > -1 // MATCH / bytes.ContainsRune/ - _ = bytes.IndexAny(nil, "") > -1 // MATCH / bytes.ContainsAny/ - _ = bytes.Index(nil, nil) > -1 // MATCH / bytes.Contains/ + _ = bytes.IndexRune(nil, 'x') > -1 // want ` bytes\.ContainsRune` + _ = bytes.IndexAny(nil, "") > -1 // want ` bytes\.ContainsAny` + _ = bytes.Index(nil, nil) > -1 // want ` bytes\.Contains` } diff --git a/simple/testdata/src/convert/convert.go b/simple/testdata/src/convert/convert.go index 4b20138ac..614105bd7 100644 --- a/simple/testdata/src/convert/convert.go +++ b/simple/testdata/src/convert/convert.go @@ -15,10 +15,10 @@ type t3 t1 func fn() { v1 := t1{1, 2} v2 := t2{1, 2} - _ = t2{v1.a, v1.b} // MATCH /should convert v1/ - _ = t2{a: v1.a, b: v1.b} // MATCH /should convert v1/ - _ = t2{b: v1.b, a: v1.a} // MATCH /should convert v1/ - _ = t3{v1.a, v1.b} // MATCH /should convert v1/ + _ = t2{v1.a, v1.b} // want `should convert v1` + _ = t2{a: v1.a, b: v1.b} // want `should convert v1` + _ = t2{b: v1.b, a: v1.a} // want `should convert v1` + _ = t3{v1.a, v1.b} // want `should convert v1` _ = t3{v1.a, v2.b} diff --git a/simple/testdata/src/convert_go17/convert.go b/simple/testdata/src/convert_go17/convert.go index 0ff30ef36..2afcf6a1a 100644 --- a/simple/testdata/src/convert_go17/convert.go +++ b/simple/testdata/src/convert_go17/convert.go @@ -17,6 +17,6 @@ type t3 struct { func fn() { v1 := t1{1, 2} - _ = t2{v1.a, v1.b} // MATCH /should convert v1/ + _ = t2{v1.a, v1.b} // want `should convert v1` _ = t3{v1.a, v1.b} } diff --git a/simple/testdata/src/convert_go18/convert.go b/simple/testdata/src/convert_go18/convert.go index e9887bcbe..f90244970 100644 --- a/simple/testdata/src/convert_go18/convert.go +++ b/simple/testdata/src/convert_go18/convert.go @@ -17,6 +17,6 @@ type t3 struct { func fn() { v1 := t1{1, 2} - _ = t2{v1.a, v1.b} // MATCH /should convert v1/ - _ = t3{v1.a, v1.b} // MATCH /should convert v1/ + _ = t2{v1.a, v1.b} // want `should convert v1` + _ = t3{v1.a, v1.b} // want `should convert v1` } diff --git a/simple/testdata/src/copy/copy.go b/simple/testdata/src/copy/copy.go index d4c130b2c..f8396f08b 100644 --- a/simple/testdata/src/copy/copy.go +++ b/simple/testdata/src/copy/copy.go @@ -2,11 +2,11 @@ package pkg func fn() { var b1, b2 []byte - for i, v := range b1 { // MATCH /should use copy/ + for i, v := range b1 { // want `should use copy` b2[i] = v } - for i := range b1 { // MATCH /should use copy/ + for i := range b1 { // want `should use copy` b2[i] = b1[i] } @@ -18,7 +18,7 @@ func fn() { } var b3, b4 []*byte - for i := range b3 { // MATCH /should use copy/ + for i := range b3 { // want `should use copy` b4[i] = b3[i] } diff --git a/simple/testdata/src/for-true/for-true.go b/simple/testdata/src/for-true/for-true.go index b806eaeff..f3cd0b3f2 100644 --- a/simple/testdata/src/for-true/for-true.go +++ b/simple/testdata/src/for-true/for-true.go @@ -3,7 +3,7 @@ package pkg func fn() { for false { } - for true { // MATCH /should use for/ + for true { // want `should use for` } for { } diff --git a/simple/testdata/src/generated/input.go b/simple/testdata/src/generated/input.go index 25622287f..985274180 100644 --- a/simple/testdata/src/generated/input.go +++ b/simple/testdata/src/generated/input.go @@ -1,6 +1,6 @@ package pkg -// MATCH "should use for {}" +// want `should use for \{\}` // the error is produced by generated.go, which pretends that its // broken code came from this file. diff --git a/simple/testdata/src/if-return/if-return.go b/simple/testdata/src/if-return/if-return.go index 28c798a69..6b4c6a8ab 100644 --- a/simple/testdata/src/if-return/if-return.go +++ b/simple/testdata/src/if-return/if-return.go @@ -3,7 +3,7 @@ package pkg func fn() bool { return true } func fn1() bool { x := true - if x { // MATCH /should use 'return '/ + if x { // want `should use 'return '` return true } return false @@ -31,21 +31,21 @@ func fn3() int { func fn4() bool { return true } func fn5() bool { - if fn() { // MATCH /should use 'return '/ + if fn() { // want `should use 'return '` return false } return true } func fn6() bool { - if fn3() != fn3() { // MATCH /should use 'return '/ + if fn3() != fn3() { // want `should use 'return '` return true } return false } func fn7() bool { - if 1 > 2 { // MATCH /should use 'return '/ + if 1 > 2 { // want `should use 'return '` return true } return false diff --git a/simple/testdata/src/loop-append/loop-append.go b/simple/testdata/src/loop-append/loop-append.go index 2f4eefd73..61d23046f 100644 --- a/simple/testdata/src/loop-append/loop-append.go +++ b/simple/testdata/src/loop-append/loop-append.go @@ -13,7 +13,7 @@ func fn1() { } var a, b []int - for _, v := range a { // MATCH /should replace loop/ + for _, v := range a { // want `should replace loop` b = append(b, v) } diff --git a/simple/testdata/src/nil-len/nil-len.go b/simple/testdata/src/nil-len/nil-len.go index 2de90d4c3..08fcfbb54 100644 --- a/simple/testdata/src/nil-len/nil-len.go +++ b/simple/testdata/src/nil-len/nil-len.go @@ -6,26 +6,26 @@ func fn() { var m map[int]int var ch chan int - if s == nil || len(s) == 0 { // MATCH /should omit nil check/ + if s == nil || len(s) == 0 { // want `should omit nil check` } - if m == nil || len(m) == 0 { // MATCH /should omit nil check/ + if m == nil || len(m) == 0 { // want `should omit nil check` } - if ch == nil || len(ch) == 0 { // MATCH /should omit nil check/ + if ch == nil || len(ch) == 0 { // want `should omit nil check` } - if s != nil && len(s) != 0 { // MATCH /should omit nil check/ + if s != nil && len(s) != 0 { // want `should omit nil check` } - if m != nil && len(m) > 0 { // MATCH /should omit nil check/ + if m != nil && len(m) > 0 { // want `should omit nil check` } - if s != nil && len(s) > 5 { // MATCH /should omit nil check/ + if s != nil && len(s) > 5 { // want `should omit nil check` } - if s != nil && len(s) >= 5 { // MATCH /should omit nil check/ + if s != nil && len(s) >= 5 { // want `should omit nil check` } const five = 5 - if s != nil && len(s) == five { // MATCH /should omit nil check/ + if s != nil && len(s) == five { // want `should omit nil check` } - if ch != nil && len(ch) == 5 { // MATCH /should omit nil check/ + if ch != nil && len(ch) == 5 { // want `should omit nil check` } if pa == nil || len(pa) == 0 { // nil check cannot be removed with pointer to an array diff --git a/simple/testdata/src/range_go14/range_go14.go b/simple/testdata/src/range_go14/range_go14.go index f1b285268..8555bc7e1 100644 --- a/simple/testdata/src/range_go14/range_go14.go +++ b/simple/testdata/src/range_go14/range_go14.go @@ -4,19 +4,19 @@ func fn() { var m map[string]int // with := - for x, _ := range m { // MATCH /should omit value from range/ + for x, _ := range m { // want `should omit value from range` _ = x } // with = var y string _ = y - for y, _ = range m { // MATCH /should omit value from range/ + for y, _ = range m { // want `should omit value from range` } - for _ = range m { // MATCH /should omit values.*range.*equivalent.*for range/ + for _ = range m { // want `should omit values.*range.*equivalent.*for range` } - for _, _ = range m { // MATCH /should omit values.*range.*equivalent.*for range/ + for _, _ = range m { // want `should omit values.*range.*equivalent.*for range` } // all OK: diff --git a/simple/testdata/src/receive-blank/receive-blank.go b/simple/testdata/src/receive-blank/receive-blank.go index 23c8a618f..703212034 100644 --- a/simple/testdata/src/receive-blank/receive-blank.go +++ b/simple/testdata/src/receive-blank/receive-blank.go @@ -3,13 +3,13 @@ package pkg func fn() { var ch chan int <-ch - _ = <-ch // MATCH /_ = <-ch/ + _ = <-ch // want `_ = <-ch` select { case <-ch: - case _ = <-ch: // MATCH /_ = <-ch/ + case _ = <-ch: // want `_ = <-ch` } x := <-ch - y, _ := <-ch, <-ch // MATCH /_ = <-ch/ - _, z := <-ch, <-ch // MATCH /_ = <-ch/ + y, _ := <-ch, <-ch // want `_ = <-ch` + _, z := <-ch, <-ch // want `_ = <-ch` _, _, _ = x, y, z } diff --git a/simple/testdata/src/regexp-raw/regexp-raw.go b/simple/testdata/src/regexp-raw/regexp-raw.go index ec92a0d7b..87fcca60e 100644 --- a/simple/testdata/src/regexp-raw/regexp-raw.go +++ b/simple/testdata/src/regexp-raw/regexp-raw.go @@ -8,8 +8,8 @@ func fn() { x := "abc" const y = "abc" regexp.MustCompile(`\\.`) - regexp.MustCompile("\\.") // MATCH /should use raw string.+\.MustCompile/ - regexp.Compile("\\.") // MATCH /should use raw string.+\.Compile/ + regexp.MustCompile("\\.") // want `should use raw string.+\.MustCompile` + regexp.Compile("\\.") // want `should use raw string.+\.Compile` regexp.Compile("\\.`") regexp.MustCompile("(?m:^lease (.+?) {\n((?s).+?)\\n}\n)") regexp.MustCompile("\\*/[ \t\n\r\f\v]*;") diff --git a/simple/testdata/src/single-case-select/single-case-select.go b/simple/testdata/src/single-case-select/single-case-select.go index efaced137..6b53458af 100644 --- a/simple/testdata/src/single-case-select/single-case-select.go +++ b/simple/testdata/src/single-case-select/single-case-select.go @@ -2,18 +2,18 @@ package pkg func fn() { var ch chan int - select { // MATCH /should use a simple channel send/ + select { // want `should use a simple channel send` case <-ch: } outer: - for { // MATCH /should use for range/ + for { // want `should use for range` select { case <-ch: break outer } } - for { // MATCH /should use for range/ + for { // want `should use for range` select { case x := <-ch: _ = x @@ -21,7 +21,7 @@ outer: } for { - select { // MATCH /should use a simple channel send/ + select { // want `should use a simple channel send` case ch <- 0: } } diff --git a/simple/testdata/src/slicing/slicing.go b/simple/testdata/src/slicing/slicing.go index f49fb705e..a1de03d24 100644 --- a/simple/testdata/src/slicing/slicing.go +++ b/simple/testdata/src/slicing/slicing.go @@ -2,7 +2,7 @@ package pkg func fn() { var s []int - _ = s[:len(s)] // MATCH /omit second index/ + _ = s[:len(s)] // want `omit second index` len := func(s []int) int { return -1 } _ = s[:len(s)] diff --git a/simple/testdata/src/time-since/time-since.go b/simple/testdata/src/time-since/time-since.go index fb3a773ea..d8fb09abf 100644 --- a/simple/testdata/src/time-since/time-since.go +++ b/simple/testdata/src/time-since/time-since.go @@ -4,6 +4,6 @@ import "time" func fn() { t1 := time.Now() - _ = time.Now().Sub(t1) // MATCH "time.Since" + _ = time.Now().Sub(t1) // want `time\.Since` _ = time.Date(0, 0, 0, 0, 0, 0, 0, nil).Sub(t1) } diff --git a/simple/testdata/src/trim/trim.go b/simple/testdata/src/trim/trim.go index ccab6c496..bb7060c62 100644 --- a/simple/testdata/src/trim/trim.go +++ b/simple/testdata/src/trim/trim.go @@ -17,11 +17,11 @@ func fn() { var id1 = "a string value" var id2 string - if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, s1) { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[len(s1):] } - if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, s1) { // want `should replace.*with.*strings\.TrimPrefix` id1 = strings.TrimPrefix(id1, s1) } @@ -29,52 +29,52 @@ func fn() { id1 = strings.TrimPrefix(id1, s2) } - if strings.Contains(id1, s1) { // MATCH /should replace.*with.*strings.Replace/ + if strings.Contains(id1, s1) { // want `should replace.*with.*strings\.Replace` id1 = strings.Replace(id1, s1, "something", 123) } - if strings.HasSuffix(id1, s2) { // MATCH /should replace.*with.*strings.TrimSuffix/ + if strings.HasSuffix(id1, s2) { // want `should replace.*with.*strings\.TrimSuffix` id1 = id1[:len(id1)-len(s2)] } var x, y []string var i int - if strings.HasPrefix(x[i], s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(x[i], s1) { // want `should replace.*with.*strings\.TrimPrefix` x[i] = x[i][len(s1):] } - if strings.HasPrefix(x[i], y[i]) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(x[i], y[i]) { // want `should replace.*with.*strings\.TrimPrefix` x[i] = x[i][len(y[i]):] } var t struct{ x string } - if strings.HasPrefix(t.x, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(t.x, s1) { // want `should replace.*with.*strings\.TrimPrefix` t.x = t.x[len(s1):] } - if strings.HasPrefix(id1, "test") { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, "test") { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[len("test"):] } - if strings.HasPrefix(id1, "test") { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, "test") { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[4:] } - if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, s1) { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[14:] } - if strings.HasPrefix(id1, s1) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, s1) { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[n:] } var b1, b2 []byte - if bytes.HasPrefix(b1, b2) { // MATCH /should replace.*with.*bytes.TrimPrefix/ + if bytes.HasPrefix(b1, b2) { // want `should replace.*with.*bytes\.TrimPrefix` b1 = b1[len(b2):] } id3 := s2 - if strings.HasPrefix(id1, id3) { // MATCH /should replace.*with.*strings.TrimPrefix/ + if strings.HasPrefix(id1, id3) { // want `should replace.*with.*strings\.TrimPrefix` id1 = id1[len(id3):] } diff --git a/ssa/func.go b/ssa/func.go index 53635ba01..222eea641 100644 --- a/ssa/func.go +++ b/ssa/func.go @@ -328,6 +328,70 @@ func (f *Function) finishBody() { } f.Locals = f.Locals[:j] + // comma-ok receiving from a time.Tick channel will never return + // ok == false, so any branching on the value of ok can be + // replaced with an unconditional jump. This will primarily match + // `for range time.Tick(x)` loops, but it can also match + // user-written code. + for _, block := range f.Blocks { + if len(block.Instrs) < 3 { + continue + } + if len(block.Succs) != 2 { + continue + } + var instrs []*Instruction + for i, ins := range block.Instrs { + if _, ok := ins.(*DebugRef); ok { + continue + } + instrs = append(instrs, &block.Instrs[i]) + } + + for i, ins := range instrs { + unop, ok := (*ins).(*UnOp) + if !ok || unop.Op != token.ARROW { + continue + } + call, ok := unop.X.(*Call) + if !ok { + continue + } + if call.Common().IsInvoke() { + continue + } + + // OPT(dh): surely there is a more efficient way of doing + // this, than using FullName. We should already have + // resolved time.Tick somewhere? + v, ok := call.Common().Value.(*Function) + if !ok { + continue + } + t, ok := v.Object().(*types.Func) + if !ok { + continue + } + if t.FullName() != "time.Tick" { + continue + } + ex, ok := (*instrs[i+1]).(*Extract) + if !ok || ex.Tuple != unop || ex.Index != 1 { + continue + } + + ifstmt, ok := (*instrs[i+2]).(*If) + if !ok || ifstmt.Cond != ex { + continue + } + + *instrs[i+2] = NewJump(block) + succ := block.Succs[1] + block.Succs = block.Succs[0:1] + succ.RemovePred(block) + } + } + optimizeBlocks(f) buildReferrers(f) diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go new file mode 100644 index 000000000..b62e5fec7 --- /dev/null +++ b/staticcheck/analysis.go @@ -0,0 +1,527 @@ +package staticcheck + +import ( + "flag" + + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/lint/lintutil" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" +) + +func newFlagSet() flag.FlagSet { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version") + return *fs +} + +var Analyzers = map[string]*analysis.Analyzer{ + "SA1000": { + Name: "SA1000", + Run: callChecker(checkRegexpRules), + Doc: docSA1000, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1001": { + Name: "SA1001", + Run: CheckTemplate, + Doc: docSA1001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1002": { + Name: "SA1002", + Run: callChecker(checkTimeParseRules), + Doc: docSA1002, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1003": { + Name: "SA1003", + Run: callChecker(checkEncodingBinaryRules), + Doc: docSA1003, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1004": { + Name: "SA1004", + Run: CheckTimeSleepConstant, + Doc: docSA1004, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1005": { + Name: "SA1005", + Run: CheckExec, + Doc: docSA1005, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1006": { + Name: "SA1006", + Run: CheckUnsafePrintf, + Doc: docSA1006, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1007": { + Name: "SA1007", + Run: callChecker(checkURLsRules), + Doc: docSA1007, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1008": { + Name: "SA1008", + Run: CheckCanonicalHeaderKey, + Doc: docSA1008, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1010": { + Name: "SA1010", + Run: callChecker(checkRegexpFindAllRules), + Doc: docSA1010, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1011": { + Name: "SA1011", + Run: callChecker(checkUTF8CutsetRules), + Doc: docSA1011, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1012": { + Name: "SA1012", + Run: CheckNilContext, + Doc: docSA1012, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1013": { + Name: "SA1013", + Run: CheckSeeker, + Doc: docSA1013, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1014": { + Name: "SA1014", + Run: callChecker(checkUnmarshalPointerRules), + Doc: docSA1014, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1015": { + Name: "SA1015", + Run: CheckLeakyTimeTick, + Doc: docSA1015, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA1016": { + Name: "SA1016", + Run: CheckUntrappableSignal, + Doc: docSA1016, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA1017": { + Name: "SA1017", + Run: callChecker(checkUnbufferedSignalChanRules), + Doc: docSA1017, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1018": { + Name: "SA1018", + Run: callChecker(checkStringsReplaceZeroRules), + Doc: docSA1018, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1019": { + Name: "SA1019", + Run: CheckDeprecated, + Doc: docSA1019, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + FactTypes: []analysis.Fact{(*IsDeprecated)(nil)}, + Flags: newFlagSet(), + }, + "SA1020": { + Name: "SA1020", + Run: callChecker(checkListenAddressRules), + Doc: docSA1020, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1021": { + Name: "SA1021", + Run: callChecker(checkBytesEqualIPRules), + Doc: docSA1021, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1023": { + Name: "SA1023", + Run: CheckWriterBufferModified, + Doc: docSA1023, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA1024": { + Name: "SA1024", + Run: callChecker(checkUniqueCutsetRules), + Doc: docSA1024, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1025": { + Name: "SA1025", + Run: CheckTimerResetReturnValue, + Doc: docSA1025, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA1026": { + Name: "SA1026", + Run: callChecker(checkUnsupportedMarshal), + Doc: docSA1026, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA1027": { + Name: "SA1027", + Run: callChecker(checkAtomicAlignment), + Doc: docSA1027, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + + "SA2000": { + Name: "SA2000", + Run: CheckWaitgroupAdd, + Doc: docSA2000, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA2001": { + Name: "SA2001", + Run: CheckEmptyCriticalSection, + Doc: docSA2001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA2002": { + Name: "SA2002", + Run: CheckConcurrentTesting, + Doc: docSA2002, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA2003": { + Name: "SA2003", + Run: CheckDeferLock, + Doc: docSA2003, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + + "SA3000": { + Name: "SA3000", + Run: CheckTestMainExit, + Doc: docSA3000, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA3001": { + Name: "SA3001", + Run: CheckBenchmarkN, + Doc: docSA3001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + + "SA4000": { + Name: "SA4000", + Run: CheckLhsRhsIdentical, + Doc: docSA4000, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.TokenFileAnalyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "SA4001": { + Name: "SA4001", + Run: CheckIneffectiveCopy, + Doc: docSA4001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4002": { + Name: "SA4002", + Run: CheckDiffSizeComparison, + Doc: docSA4002, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA4003": { + Name: "SA4003", + Run: CheckExtremeComparison, + Doc: docSA4003, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4004": { + Name: "SA4004", + Run: CheckIneffectiveLoop, + Doc: docSA4004, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4006": { + Name: "SA4006", + Run: CheckUnreadVariableValues, + Doc: docSA4006, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4008": { + Name: "SA4008", + Run: CheckLoopCondition, + Doc: docSA4008, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4009": { + Name: "SA4009", + Run: CheckArgOverwritten, + Doc: docSA4009, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4010": { + Name: "SA4010", + Run: CheckIneffectiveAppend, + Doc: docSA4010, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4011": { + Name: "SA4011", + Run: CheckScopedBreak, + Doc: docSA4011, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4012": { + Name: "SA4012", + Run: CheckNaNComparison, + Doc: docSA4012, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA4013": { + Name: "SA4013", + Run: CheckDoubleNegation, + Doc: docSA4013, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4014": { + Name: "SA4014", + Run: CheckRepeatedIfElse, + Doc: docSA4014, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4015": { + Name: "SA4015", + Run: callChecker(checkMathIntRules), + Doc: docSA4015, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA4016": { + Name: "SA4016", + Run: CheckSillyBitwiseOps, + Doc: docSA4016, + Requires: []*analysis.Analyzer{buildssa.Analyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + "SA4017": { + Name: "SA4017", + Run: CheckPureFunctions, + Doc: docSA4017, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + FactTypes: []analysis.Fact{(*IsPure)(nil)}, + Flags: newFlagSet(), + }, + "SA4018": { + Name: "SA4018", + Run: CheckSelfAssignment, + Doc: docSA4018, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + "SA4019": { + Name: "SA4019", + Run: CheckDuplicateBuildConstraints, + Doc: docSA4019, + Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "SA4020": { + Name: "SA4020", + Run: CheckUnreachableTypeCases, + Doc: docSA4020, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA4021": { + Name: "SA4021", + Run: CheckSingleArgAppend, + Doc: docSA4021, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + + "SA5000": { + Name: "SA5000", + Run: CheckNilMaps, + Doc: docSA5000, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA5001": { + Name: "SA5001", + Run: CheckEarlyDefer, + Doc: docSA5001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5002": { + Name: "SA5002", + Run: CheckInfiniteEmptyLoop, + Doc: docSA5002, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5003": { + Name: "SA5003", + Run: CheckDeferInInfiniteLoop, + Doc: docSA5003, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5004": { + Name: "SA5004", + Run: CheckLoopEmptyDefault, + Doc: docSA5004, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5005": { + Name: "SA5005", + Run: CheckCyclicFinalizer, + Doc: docSA5005, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA5007": { + Name: "SA5007", + Run: CheckInfiniteRecursion, + Doc: docSA5007, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA5008": { + Name: "SA5008", + Run: CheckStructTags, + Doc: docSA5008, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA5009": { + Name: "SA5009", + Run: callChecker(checkPrintfRules), + Doc: docSA5009, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + + "SA6000": { + Name: "SA6000", + Run: callChecker(checkRegexpMatchLoopRules), + Doc: docSA6000, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA6001": { + Name: "SA6001", + Run: CheckMapBytesKey, + Doc: docSA6001, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA6002": { + Name: "SA6002", + Run: callChecker(checkSyncPoolValueRules), + Doc: docSA6002, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, + Flags: newFlagSet(), + }, + "SA6003": { + Name: "SA6003", + Run: CheckRangeStringRunes, + Doc: docSA6003, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "SA6005": { + Name: "SA6005", + Run: CheckToLowerToUpperComparison, + Doc: docSA6005, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + + "SA9001": { + Name: "SA9001", + Run: CheckDubiousDeferInChannelRangeLoop, + Doc: docSA9001, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA9002": { + Name: "SA9002", + Run: CheckNonOctalFileMode, + Doc: docSA9002, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + "SA9003": { + Name: "SA9003", + Run: CheckEmptyBranch, + Doc: docSA9003, + Requires: []*analysis.Analyzer{buildssa.Analyzer, lint.TokenFileAnalyzer, lint.IsGeneratedAnalyzer}, + Flags: newFlagSet(), + }, + "SA9004": { + Name: "SA9004", + Run: CheckMissingEnumTypesInDeclaration, + Doc: docSA9004, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, + // Filtering generated code because it may include empty structs generated from data models. + "SA9005": { + Name: "SA9005", + Run: callChecker(checkNoopMarshal), + Doc: docSA9005, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, +} diff --git a/staticcheck/doc.go b/staticcheck/doc.go index 07a39ef45..e0153e210 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -328,6 +328,7 @@ Available since 2017.1 ` +//lint:ignore U1000 This check is currently disabled var docSA4005 = `Field assignment that will never be observed. Did you mean to use a pointer receiver? Available since @@ -543,6 +544,7 @@ Available since 2017.1 ` +//lint:ignore U1000 This check is currently disabled var docSA5006 = `Slice index out of bounds Available since @@ -565,6 +567,18 @@ Available since 2017.1 ` +var docSA5008 = `Invalid struct tag + +Available since + Unreleased +` + +var docSA5009 = `Invalid Printf call + +Available since + Unreleased +` + var docSA6000 = `Using regexp.Match or related in a loop, should use regexp.Compile Available since diff --git a/staticcheck/knowledge.go b/staticcheck/knowledge.go new file mode 100644 index 000000000..4c12b866a --- /dev/null +++ b/staticcheck/knowledge.go @@ -0,0 +1,25 @@ +package staticcheck + +import ( + "reflect" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/ssa" + "honnef.co/go/tools/staticcheck/vrp" +) + +var valueRangesAnalyzer = &analysis.Analyzer{ + Name: "vrp", + Doc: "calculate value ranges of functions", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[*ssa.Function]vrp.Ranges{} + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + vr := vrp.BuildGraph(ssafn).Solve() + m[ssafn] = vr + } + return m, nil + }, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + ResultType: reflect.TypeOf(map[*ssa.Function]vrp.Ranges{}), +} diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 1300eff8c..84119d18f 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -15,13 +15,13 @@ import ( "sort" "strconv" "strings" - "sync" texttemplate "text/template" "unicode" . "honnef.co/go/tools/arg" "honnef.co/go/tools/deprecated" "honnef.co/go/tools/functions" + "honnef.co/go/tools/internal/passes/buildssa" "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/lint" . "honnef.co/go/tools/lint/lintdsl" @@ -30,8 +30,10 @@ import ( "honnef.co/go/tools/ssautil" "honnef.co/go/tools/staticcheck/vrp" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/astutil" - "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ast/inspector" ) func validRegexp(call *Call) { @@ -106,7 +108,7 @@ var ( checkEncodingBinaryRules = map[string]CallCheck{ "encoding/binary.Write": func(call *Call) { arg := call.Args[Arg("encoding/binary.Write.data")] - if !CanBinaryMarshal(call.Job, arg.Value) { + if !CanBinaryMarshal(call.Pass, arg.Value) { arg.Invalid(fmt.Sprintf("value of type %s cannot be used with binary.Write", arg.Value.Value.Type())) } }, @@ -311,8 +313,7 @@ var verbs = [...]verbFlag{ } func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { - var elem func(T types.Type, verb rune) ([]types.Type, bool) - elem = func(T types.Type, verb rune) ([]types.Type, bool) { + elem := func(T types.Type, verb rune) ([]types.Type, bool) { if verbs[verb]&noRecurse != 0 { return []types.Type{T}, false } @@ -623,7 +624,7 @@ func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { } func checkAtomicAlignmentImpl(call *Call) { - sizes := call.Job.Pkg.TypesSizes + sizes := call.Pass.TypesSizes if sizes.Sizeof(types.Typ[types.Uintptr]) != 4 { // Not running on a 32-bit platform return @@ -650,6 +651,9 @@ func checkAtomicAlignmentImpl(call *Call) { func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { return func(call *Call) { + if IsGenerated(call.Pass, call.Instr.Pos()) { + return + } arg := call.Args[argN] T := arg.Value.Value.Type() Ts, ok := Dereference(T).Underlying().(*types.Struct) @@ -731,108 +735,12 @@ func fieldPath(start types.Type, indices []int) string { return p } -type Checker struct { - CheckGenerated bool - funcDescs *functions.Descriptions - deprecatedPkgs map[*types.Package]string - deprecatedObjs map[types.Object]string -} - -func NewChecker() *Checker { - return &Checker{} -} - -func (*Checker) Name() string { return "staticcheck" } -func (*Checker) Prefix() string { return "SA" } - -func (c *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "SA1000", FilterGenerated: false, Fn: c.callChecker(checkRegexpRules), Doc: docSA1000}, - {ID: "SA1001", FilterGenerated: false, Fn: c.CheckTemplate, Doc: docSA1001}, - {ID: "SA1002", FilterGenerated: false, Fn: c.callChecker(checkTimeParseRules), Doc: docSA1002}, - {ID: "SA1003", FilterGenerated: false, Fn: c.callChecker(checkEncodingBinaryRules), Doc: docSA1003}, - {ID: "SA1004", FilterGenerated: false, Fn: c.CheckTimeSleepConstant, Doc: docSA1004}, - {ID: "SA1005", FilterGenerated: false, Fn: c.CheckExec, Doc: docSA1005}, - {ID: "SA1006", FilterGenerated: false, Fn: c.CheckUnsafePrintf, Doc: docSA1006}, - {ID: "SA1007", FilterGenerated: false, Fn: c.callChecker(checkURLsRules), Doc: docSA1007}, - {ID: "SA1008", FilterGenerated: false, Fn: c.CheckCanonicalHeaderKey, Doc: docSA1008}, - {ID: "SA1010", FilterGenerated: false, Fn: c.callChecker(checkRegexpFindAllRules), Doc: docSA1010}, - {ID: "SA1011", FilterGenerated: false, Fn: c.callChecker(checkUTF8CutsetRules), Doc: docSA1011}, - {ID: "SA1012", FilterGenerated: false, Fn: c.CheckNilContext, Doc: docSA1012}, - {ID: "SA1013", FilterGenerated: false, Fn: c.CheckSeeker, Doc: docSA1013}, - {ID: "SA1014", FilterGenerated: false, Fn: c.callChecker(checkUnmarshalPointerRules), Doc: docSA1014}, - {ID: "SA1015", FilterGenerated: false, Fn: c.CheckLeakyTimeTick, Doc: docSA1015}, - {ID: "SA1016", FilterGenerated: false, Fn: c.CheckUntrappableSignal, Doc: docSA1016}, - {ID: "SA1017", FilterGenerated: false, Fn: c.callChecker(checkUnbufferedSignalChanRules), Doc: docSA1017}, - {ID: "SA1018", FilterGenerated: false, Fn: c.callChecker(checkStringsReplaceZeroRules), Doc: docSA1018}, - {ID: "SA1019", FilterGenerated: false, Fn: c.CheckDeprecated, Doc: docSA1019}, - {ID: "SA1020", FilterGenerated: false, Fn: c.callChecker(checkListenAddressRules), Doc: docSA1020}, - {ID: "SA1021", FilterGenerated: false, Fn: c.callChecker(checkBytesEqualIPRules), Doc: docSA1021}, - {ID: "SA1023", FilterGenerated: false, Fn: c.CheckWriterBufferModified, Doc: docSA1023}, - {ID: "SA1024", FilterGenerated: false, Fn: c.callChecker(checkUniqueCutsetRules), Doc: docSA1024}, - {ID: "SA1025", FilterGenerated: false, Fn: c.CheckTimerResetReturnValue, Doc: docSA1025}, - {ID: "SA1026", FilterGenerated: false, Fn: c.callChecker(checkUnsupportedMarshal), Doc: docSA1026}, - {ID: "SA1027", FilterGenerated: false, Fn: c.callChecker(checkAtomicAlignment), Doc: docSA1027}, - - {ID: "SA2000", FilterGenerated: false, Fn: c.CheckWaitgroupAdd, Doc: docSA2000}, - {ID: "SA2001", FilterGenerated: false, Fn: c.CheckEmptyCriticalSection, Doc: docSA2001}, - {ID: "SA2002", FilterGenerated: false, Fn: c.CheckConcurrentTesting, Doc: docSA2002}, - {ID: "SA2003", FilterGenerated: false, Fn: c.CheckDeferLock, Doc: docSA2003}, - - {ID: "SA3000", FilterGenerated: false, Fn: c.CheckTestMainExit, Doc: docSA3000}, - {ID: "SA3001", FilterGenerated: false, Fn: c.CheckBenchmarkN, Doc: docSA3001}, - - {ID: "SA4000", FilterGenerated: false, Fn: c.CheckLhsRhsIdentical, Doc: docSA4000}, - {ID: "SA4001", FilterGenerated: false, Fn: c.CheckIneffectiveCopy, Doc: docSA4001}, - {ID: "SA4002", FilterGenerated: false, Fn: c.CheckDiffSizeComparison, Doc: docSA4002}, - {ID: "SA4003", FilterGenerated: false, Fn: c.CheckExtremeComparison, Doc: docSA4003}, - {ID: "SA4004", FilterGenerated: false, Fn: c.CheckIneffectiveLoop, Doc: docSA4004}, - {ID: "SA4006", FilterGenerated: false, Fn: c.CheckUnreadVariableValues, Doc: docSA4006}, - {ID: "SA4008", FilterGenerated: false, Fn: c.CheckLoopCondition, Doc: docSA4008}, - {ID: "SA4009", FilterGenerated: false, Fn: c.CheckArgOverwritten, Doc: docSA4009}, - {ID: "SA4010", FilterGenerated: false, Fn: c.CheckIneffectiveAppend, Doc: docSA4010}, - {ID: "SA4011", FilterGenerated: false, Fn: c.CheckScopedBreak, Doc: docSA4011}, - {ID: "SA4012", FilterGenerated: false, Fn: c.CheckNaNComparison, Doc: docSA4012}, - {ID: "SA4013", FilterGenerated: false, Fn: c.CheckDoubleNegation, Doc: docSA4013}, - {ID: "SA4014", FilterGenerated: false, Fn: c.CheckRepeatedIfElse, Doc: docSA4014}, - {ID: "SA4015", FilterGenerated: false, Fn: c.callChecker(checkMathIntRules), Doc: docSA4015}, - {ID: "SA4016", FilterGenerated: false, Fn: c.CheckSillyBitwiseOps, Doc: docSA4016}, - {ID: "SA4017", FilterGenerated: false, Fn: c.CheckPureFunctions, Doc: docSA4017}, - {ID: "SA4018", FilterGenerated: true, Fn: c.CheckSelfAssignment, Doc: docSA4018}, - {ID: "SA4019", FilterGenerated: true, Fn: c.CheckDuplicateBuildConstraints, Doc: docSA4019}, - {ID: "SA4020", FilterGenerated: false, Fn: c.CheckUnreachableTypeCases, Doc: docSA4020}, - {ID: "SA4021", FilterGenerated: true, Fn: c.CheckSingleArgAppend, Doc: docSA4021}, - - {ID: "SA5000", FilterGenerated: false, Fn: c.CheckNilMaps, Doc: docSA5000}, - {ID: "SA5001", FilterGenerated: false, Fn: c.CheckEarlyDefer, Doc: docSA5001}, - {ID: "SA5002", FilterGenerated: false, Fn: c.CheckInfiniteEmptyLoop, Doc: docSA5002}, - {ID: "SA5003", FilterGenerated: false, Fn: c.CheckDeferInInfiniteLoop, Doc: docSA5003}, - {ID: "SA5004", FilterGenerated: false, Fn: c.CheckLoopEmptyDefault, Doc: docSA5004}, - {ID: "SA5005", FilterGenerated: false, Fn: c.CheckCyclicFinalizer, Doc: docSA5005}, - {ID: "SA5007", FilterGenerated: false, Fn: c.CheckInfiniteRecursion, Doc: docSA5007}, - {ID: "SA5008", FilterGenerated: false, Fn: c.CheckStructTags, Doc: ``}, - {ID: "SA5009", FilterGenerated: false, Fn: c.callChecker(checkPrintfRules), Doc: ``}, - - {ID: "SA6000", FilterGenerated: false, Fn: c.callChecker(checkRegexpMatchLoopRules), Doc: docSA6000}, - {ID: "SA6001", FilterGenerated: false, Fn: c.CheckMapBytesKey, Doc: docSA6001}, - {ID: "SA6002", FilterGenerated: false, Fn: c.callChecker(checkSyncPoolValueRules), Doc: docSA6002}, - {ID: "SA6003", FilterGenerated: false, Fn: c.CheckRangeStringRunes, Doc: docSA6003}, - // {ID: "SA6004", FilterGenerated: false, Fn: c.CheckSillyRegexp, Doc: docSA6004}, - {ID: "SA6005", FilterGenerated: false, Fn: c.CheckToLowerToUpperComparison, Doc: docSA6005}, - - {ID: "SA9001", FilterGenerated: false, Fn: c.CheckDubiousDeferInChannelRangeLoop, Doc: docSA9001}, - {ID: "SA9002", FilterGenerated: false, Fn: c.CheckNonOctalFileMode, Doc: docSA9002}, - {ID: "SA9003", FilterGenerated: false, Fn: c.CheckEmptyBranch, Doc: docSA9003}, - {ID: "SA9004", FilterGenerated: false, Fn: c.CheckMissingEnumTypesInDeclaration, Doc: docSA9004}, - // Filtering generated code because it may include empty structs generated from data models. - {ID: "SA9005", FilterGenerated: true, Fn: c.callChecker(checkNoopMarshal), Doc: docSA9005}, - } - - // "SA5006": c.CheckSliceOutOfBounds, - // "SA4007": c.CheckPredeterminedBooleanExprs, -} - -func (c *Checker) findDeprecated(prog *lint.Program) { +type IsDeprecated struct{ Msg string } + +func (*IsDeprecated) AFact() {} +func (d *IsDeprecated) String() string { return "Deprecated: " + d.Msg } + +func checkDeprecatedMark(pass *analysis.Pass) { var names []*ast.Ident extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { @@ -851,116 +759,90 @@ func (c *Checker) findDeprecated(prog *lint.Program) { } return "" } - doDocs := func(pkg *packages.Package, names []*ast.Ident, docs []*ast.CommentGroup) { + doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) { alt := extractDeprecatedMessage(docs) if alt == "" { return } for _, name := range names { - obj := pkg.TypesInfo.ObjectOf(name) - c.deprecatedObjs[obj] = alt + obj := pass.TypesInfo.ObjectOf(name) + pass.ExportObjectFact(obj, &IsDeprecated{alt}) } } - for _, pkg := range prog.AllPackages { - var docs []*ast.CommentGroup - for _, f := range pkg.Syntax { - docs = append(docs, f.Doc) - } - if alt := extractDeprecatedMessage(docs); alt != "" { - // Don't mark package syscall as deprecated, even though - // it is. A lot of people still use it for simple - // constants like SIGKILL, and I am not comfortable - // telling them to use x/sys for that. - if pkg.PkgPath != "syscall" { - c.deprecatedPkgs[pkg.Types] = alt - } + var docs []*ast.CommentGroup + for _, f := range pass.Files { + docs = append(docs, f.Doc) + } + if alt := extractDeprecatedMessage(docs); alt != "" { + // Don't mark package syscall as deprecated, even though + // it is. A lot of people still use it for simple + // constants like SIGKILL, and I am not comfortable + // telling them to use x/sys for that. + if pass.Pkg.Path() != "syscall" { + pass.ExportPackageFact(&IsDeprecated{alt}) } + } - docs = docs[:0] - for _, f := range pkg.Syntax { - fn := func(node ast.Node) bool { - if node == nil { - return true - } - var ret bool - switch node := node.(type) { - case *ast.GenDecl: - switch node.Tok { - case token.TYPE, token.CONST, token.VAR: - docs = append(docs, node.Doc) - return true - default: - return false - } - case *ast.FuncDecl: - docs = append(docs, node.Doc) - names = []*ast.Ident{node.Name} - ret = false - case *ast.TypeSpec: - docs = append(docs, node.Doc) - names = []*ast.Ident{node.Name} - ret = true - case *ast.ValueSpec: + docs = docs[:0] + for _, f := range pass.Files { + fn := func(node ast.Node) bool { + if node == nil { + return true + } + var ret bool + switch node := node.(type) { + case *ast.GenDecl: + switch node.Tok { + case token.TYPE, token.CONST, token.VAR: docs = append(docs, node.Doc) - names = node.Names - ret = false - case *ast.File: return true - case *ast.StructType: - for _, field := range node.Fields.List { - doDocs(pkg, field.Names, []*ast.CommentGroup{field.Doc}) - } - return false - case *ast.InterfaceType: - for _, field := range node.Methods.List { - doDocs(pkg, field.Names, []*ast.CommentGroup{field.Doc}) - } - return false default: return false } - if len(names) == 0 || len(docs) == 0 { - return ret + case *ast.FuncDecl: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = false + case *ast.TypeSpec: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = true + case *ast.ValueSpec: + docs = append(docs, node.Doc) + names = node.Names + ret = false + case *ast.File: + return true + case *ast.StructType: + for _, field := range node.Fields.List { + doDocs(field.Names, []*ast.CommentGroup{field.Doc}) + } + return false + case *ast.InterfaceType: + for _, field := range node.Methods.List { + doDocs(field.Names, []*ast.CommentGroup{field.Doc}) } - doDocs(pkg, names, docs) - - docs = docs[:0] - names = nil + return false + default: + return false + } + if len(names) == 0 || len(docs) == 0 { return ret } - ast.Inspect(f, fn) - } - } -} + doDocs(names, docs) -func (c *Checker) Init(prog *lint.Program) { - wg := &sync.WaitGroup{} - wg.Add(2) - go func() { - c.funcDescs = functions.NewDescriptions(prog.SSA) - for _, fn := range prog.AllFunctions { - if fn.Blocks != nil { - applyStdlibKnowledge(fn) - ssa.OptimizeBlocks(fn) - } + docs = docs[:0] + names = nil + return ret } - wg.Done() - }() - - go func() { - c.deprecatedPkgs = map[*types.Package]string{} - c.deprecatedObjs = map[types.Object]string{} - c.findDeprecated(prog) - wg.Done() - }() - - wg.Wait() + ast.Inspect(f, fn) + } } -func (c *Checker) isInLoop(b *ssa.BasicBlock) bool { - sets := c.funcDescs.Get(b.Parent()).Loops +func isInLoop(b *ssa.BasicBlock) bool { + sets := functions.FindLoops(b.Parent()) for _, set := range sets { if set[b] { return true @@ -969,105 +851,51 @@ func (c *Checker) isInLoop(b *ssa.BasicBlock) bool { return false } -func applyStdlibKnowledge(fn *ssa.Function) { - if len(fn.Blocks) == 0 { - return - } - - // comma-ok receiving from a time.Tick channel will never return - // ok == false, so any branching on the value of ok can be - // replaced with an unconditional jump. This will primarily match - // `for range time.Tick(x)` loops, but it can also match - // user-written code. - for _, block := range fn.Blocks { - if len(block.Instrs) < 3 { - continue - } - if len(block.Succs) != 2 { - continue - } - var instrs []*ssa.Instruction - for i, ins := range block.Instrs { - if _, ok := ins.(*ssa.DebugRef); ok { - continue - } - instrs = append(instrs, &block.Instrs[i]) - } - - for i, ins := range instrs { - unop, ok := (*ins).(*ssa.UnOp) - if !ok || unop.Op != token.ARROW { - continue - } - call, ok := unop.X.(*ssa.Call) - if !ok { - continue - } - if !IsCallTo(call.Common(), "time.Tick") { - continue - } - ex, ok := (*instrs[i+1]).(*ssa.Extract) - if !ok || ex.Tuple != unop || ex.Index != 1 { - continue - } - - ifstmt, ok := (*instrs[i+2]).(*ssa.If) - if !ok || ifstmt.Cond != ex { - continue - } - - *instrs[i+2] = ssa.NewJump(block) - succ := block.Succs[1] - block.Succs = block.Succs[0:1] - succ.RemovePred(block) - } - } -} - -func (c *Checker) CheckUntrappableSignal(j *lint.Job) { +func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAnyAST(j, call, + if !IsCallToAnyAST(pass, call, "os/signal.Ignore", "os/signal.Notify", "os/signal.Reset") { return } for _, arg := range call.Args { - if conv, ok := arg.(*ast.CallExpr); ok && isName(j, conv.Fun, "os.Signal") { + if conv, ok := arg.(*ast.CallExpr); ok && isName(pass, conv.Fun, "os.Signal") { arg = conv.Args[0] } - if isName(j, arg, "os.Kill") || isName(j, arg, "syscall.SIGKILL") { - j.Errorf(arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", Render(j, arg)) + if isName(pass, arg, "os.Kill") || isName(pass, arg, "syscall.SIGKILL") { + pass.Reportf(arg.Pos(), "%s cannot be trapped (did you mean syscall.SIGTERM?)", Render(pass, arg)) } - if isName(j, arg, "syscall.SIGSTOP") { - j.Errorf(arg, "%s signal cannot be trapped", Render(j, arg)) + if isName(pass, arg, "syscall.SIGSTOP") { + pass.Reportf(arg.Pos(), "%s signal cannot be trapped", Render(pass, arg)) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckTemplate(j *lint.Job) { +func CheckTemplate(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) var kind string - if IsCallToAST(j, call, "(*text/template.Template).Parse") { + if IsCallToAST(pass, call, "(*text/template.Template).Parse") { kind = "text" - } else if IsCallToAST(j, call, "(*html/template.Template).Parse") { + } else if IsCallToAST(pass, call, "(*html/template.Template).Parse") { kind = "html" } else { return } sel := call.Fun.(*ast.SelectorExpr) - if !IsCallToAST(j, sel.X, "text/template.New") && - !IsCallToAST(j, sel.X, "html/template.New") { + if !IsCallToAST(pass, sel.X, "text/template.New") && + !IsCallToAST(pass, sel.X, "html/template.New") { // TODO(dh): this is a cheap workaround for templates with // different delims. A better solution with less false // negatives would use data flow analysis to see where the // template comes from and where it has been return } - s, ok := ExprToString(j, call.Args[Arg("(*text/template.Template).Parse.text")]) + s, ok := ExprToString(pass, call.Args[Arg("(*text/template.Template).Parse.text")]) if !ok { return } @@ -1081,17 +909,18 @@ func (c *Checker) CheckTemplate(j *lint.Job) { if err != nil { // TODO(dominikh): whitelist other parse errors, if any if strings.Contains(err.Error(), "unexpected") { - j.Errorf(call.Args[Arg("(*text/template.Template).Parse.text")], "%s", err) + pass.Reportf(call.Args[Arg("(*text/template.Template).Parse.text")].Pos(), "%s", err) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { +func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "time.Sleep") { + if !IsCallToAST(pass, call, "time.Sleep") { return } lit, ok := call.Args[Arg("time.Sleep.d")].(*ast.BasicLit) @@ -1112,13 +941,14 @@ func (c *Checker) CheckTimeSleepConstant(j *lint.Job) { if n != 1 { recommendation = fmt.Sprintf("time.Sleep(%d * time.Nanosecond)", n) } - j.Errorf(call.Args[Arg("time.Sleep.d")], + pass.Reportf(call.Args[Arg("time.Sleep.d")].Pos(), "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { +func CheckWaitgroupAdd(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { g := node.(*ast.GoStmt) fun, ok := g.Call.Fun.(*ast.FuncLit) @@ -1140,19 +970,20 @@ func (c *Checker) CheckWaitgroupAdd(j *lint.Job) { if !ok { return } - fn, ok := j.Pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) + fn, ok := pass.TypesInfo.ObjectOf(sel.Sel).(*types.Func) if !ok { return } if lint.FuncName(fn) == "(*sync.WaitGroup).Add" { - j.Errorf(sel, "should call %s before starting the goroutine to avoid a race", - Render(j, stmt)) + pass.Reportf(sel.Pos(), "should call %s before starting the goroutine to avoid a race", + Render(pass, stmt)) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.GoStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { +func CheckInfiniteEmptyLoop(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.ForStmt) if len(loop.Body.List) != 0 || loop.Post != nil { @@ -1181,21 +1012,22 @@ func (c *Checker) CheckInfiniteEmptyLoop(j *lint.Job) { return } if ident, ok := loop.Cond.(*ast.Ident); ok { - if k, ok := j.Pkg.TypesInfo.ObjectOf(ident).(*types.Const); ok { + if k, ok := pass.TypesInfo.ObjectOf(ident).(*types.Const); ok { if !constant.BoolVal(k.Val()) { // don't flag `for false {}` loops. They're a debug aid. return } } } - j.Errorf(loop, "loop condition never changes or has a race condition") + pass.Reportf(loop.Pos(), "loop condition never changes or has a race condition") } - j.Errorf(loop, "this loop will spin, using 100%% CPU") + pass.Reportf(loop.Pos(), "this loop will spin, using 100%% CPU") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { +func CheckDeferInInfiniteLoop(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { mightExit := false var defers []ast.Stmt @@ -1228,16 +1060,17 @@ func (c *Checker) CheckDeferInInfiniteLoop(j *lint.Job) { return } for _, stmt := range defers { - j.Errorf(stmt, "defers in this infinite loop will never run") + pass.Reportf(stmt.Pos(), "defers in this infinite loop will never run") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { +func CheckDubiousDeferInChannelRangeLoop(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.RangeStmt) - typ := j.Pkg.TypesInfo.TypeOf(loop.X) + typ := pass.TypesInfo.TypeOf(loop.X) _, ok := typ.Underlying().(*types.Chan) if !ok { return @@ -1245,7 +1078,7 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { fn2 := func(node ast.Node) bool { switch stmt := node.(type) { case *ast.DeferStmt: - j.Errorf(stmt, "defers in this range loop won't run unless the channel gets closed") + pass.Reportf(stmt.Pos(), "defers in this range loop won't run unless the channel gets closed") case *ast.FuncLit: // Don't look into function bodies return false @@ -1254,16 +1087,17 @@ func (c *Checker) CheckDubiousDeferInChannelRangeLoop(j *lint.Job) { } ast.Inspect(loop.Body, fn2) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckTestMainExit(j *lint.Job) { +func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if !isTestMain(j, node) { + if !isTestMain(pass, node) { return } - arg := j.Pkg.TypesInfo.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) + arg := pass.TypesInfo.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) callsRun := false fn2 := func(node ast.Node) bool { call, ok := node.(*ast.CallExpr) @@ -1278,7 +1112,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { if !ok { return true } - if arg != j.Pkg.TypesInfo.ObjectOf(ident) { + if arg != pass.TypesInfo.ObjectOf(ident) { return true } if sel.Sel.Name == "Run" { @@ -1291,7 +1125,7 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { callsExit := false fn3 := func(node ast.Node) bool { - if IsCallToAST(j, node, "os.Exit") { + if IsCallToAST(pass, node, "os.Exit") { callsExit = true return false } @@ -1299,13 +1133,14 @@ func (c *Checker) CheckTestMainExit(j *lint.Job) { } ast.Inspect(node.(*ast.FuncDecl).Body, fn3) if !callsExit && callsRun { - j.Errorf(node, "TestMain should call os.Exit to set exit code") + pass.Reportf(node.Pos(), "TestMain should call os.Exit to set exit code") } } - j.Pkg.Inspector.Preorder(nil, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(nil, fn) + return nil, nil } -func isTestMain(j *lint.Job, node ast.Node) bool { +func isTestMain(pass *analysis.Pass, node ast.Node) bool { decl, ok := node.(*ast.FuncDecl) if !ok { return false @@ -1320,29 +1155,30 @@ func isTestMain(j *lint.Job, node ast.Node) bool { if len(arg.Names) != 1 { return false } - return IsOfType(j, arg.Type, "*testing.M") + return IsOfType(pass, arg.Type, "*testing.M") } -func (c *Checker) CheckExec(j *lint.Job) { +func CheckExec(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - if !IsCallToAST(j, call, "os/exec.Command") { + if !IsCallToAST(pass, call, "os/exec.Command") { return } - val, ok := ExprToString(j, call.Args[Arg("os/exec.Command.name")]) + val, ok := ExprToString(pass, call.Args[Arg("os/exec.Command.name")]) if !ok { return } if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { return } - j.Errorf(call.Args[Arg("os/exec.Command.name")], + pass.Reportf(call.Args[Arg("os/exec.Command.name")].Pos(), "first argument to exec.Command looks like a shell command, but a program name or path are expected") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { +func CheckLoopEmptyDefault(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { loop := node.(*ast.ForStmt) if len(loop.Body.List) != 1 || loop.Cond != nil || loop.Init != nil { @@ -1354,19 +1190,20 @@ func (c *Checker) CheckLoopEmptyDefault(j *lint.Job) { } for _, c := range sel.Body.List { if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 { - j.Errorf(comm, "should not have an empty default case in a for+select loop. The loop will spin.") + pass.Reportf(comm.Pos(), "should not have an empty default case in a for+select loop. The loop will spin.") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { +func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { op := node.(*ast.BinaryExpr) switch op.Op { case token.EQL, token.NEQ: - if basic, ok := j.Pkg.TypesInfo.TypeOf(op.X).Underlying().(*types.Basic); ok { + if basic, ok := pass.TypesInfo.TypeOf(op.X).Underlying().(*types.Basic); ok { if kind := basic.Kind(); kind == types.Float32 || kind == types.Float64 { // f == f and f != f might be used to check for NaN return @@ -1380,12 +1217,12 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { return } - if Render(j, op.X) != Render(j, op.Y) { + if Render(pass, op.X) != Render(pass, op.Y) { return } l1, ok1 := op.X.(*ast.BasicLit) l2, ok2 := op.Y.(*ast.BasicLit) - if ok1 && ok2 && l1.Kind == token.INT && l2.Kind == l1.Kind && l1.Value == "0" && l2.Value == l1.Value && IsGenerated(j.File(l1)) { + if ok1 && ok2 && l1.Kind == token.INT && l2.Kind == l1.Kind && l1.Value == "0" && l2.Value == l1.Value && IsGenerated(pass, l1.Pos()) { // cgo generates the following function call: // _cgoCheckPointer(_cgoBase0, 0 == 0) – it uses 0 == 0 // instead of true in case the user shadowed the @@ -1398,12 +1235,13 @@ func (c *Checker) CheckLhsRhsIdentical(j *lint.Job) { // 0 == 0 are slim. return } - j.Errorf(op, "identical expressions on the left and right side of the '%s' operator", op.Op) + pass.Reportf(op.Pos(), "identical expressions on the left and right side of the '%s' operator", op.Op) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckScopedBreak(j *lint.Job) { +func CheckScopedBreak(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { var body *ast.BlockStmt switch node := node.(type) { @@ -1453,21 +1291,22 @@ func (c *Checker) CheckScopedBreak(j *lint.Job) { if !ok || branch.Tok != token.BREAK || branch.Label != nil { continue } - j.Errorf(branch, "ineffective break statement. Did you mean to break out of the outer loop?") + pass.Reportf(branch.Pos(), "ineffective break statement. Did you mean to break out of the outer loop?") } } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckUnsafePrintf(j *lint.Job) { +func CheckUnsafePrintf(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) var arg int - if IsCallToAnyAST(j, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { + if IsCallToAnyAST(pass, call, "fmt.Printf", "fmt.Sprintf", "log.Printf") { arg = Arg("fmt.Printf.format") - } else if IsCallToAnyAST(j, call, "fmt.Fprintf") { + } else if IsCallToAnyAST(pass, call, "fmt.Fprintf") { arg = Arg("fmt.Fprintf.format") } else { return @@ -1480,13 +1319,14 @@ func (c *Checker) CheckUnsafePrintf(j *lint.Job) { default: return } - j.Errorf(call.Args[arg], + pass.Reportf(call.Args[arg].Pos(), "printf-style function with dynamic format string and no further arguments should use print-style function instead") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckEarlyDefer(j *lint.Job) { +func CheckEarlyDefer(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { block := node.(*ast.BlockStmt) if len(block.List) < 2 { @@ -1513,7 +1353,7 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { if !ok { continue } - sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) + sig, ok := pass.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { continue } @@ -1548,10 +1388,11 @@ func (c *Checker) CheckEarlyDefer(j *lint.Job) { if sel.Sel.Name != "Close" { continue } - j.Errorf(def, "should check returned error before deferring %s", Render(j, def.Call)) + pass.Reportf(def.Pos(), "should check returned error before deferring %s", Render(pass, def.Call)) } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + return nil, nil } func selectorX(sel *ast.SelectorExpr) ast.Node { @@ -1563,7 +1404,7 @@ func selectorX(sel *ast.SelectorExpr) ast.Node { } } -func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { +func CheckEmptyCriticalSection(pass *analysis.Pass) (interface{}, error) { // Initially it might seem like this check would be easier to // implement in SSA. After all, we're only checking for two // consecutive method calls. In reality, however, there may be any @@ -1589,7 +1430,7 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { return nil, "", false } - fn, ok := j.Pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) + fn, ok := pass.TypesInfo.ObjectOf(sel.Sel).(*types.Func) if !ok { return nil, "", false } @@ -1610,44 +1451,47 @@ func (c *Checker) CheckEmptyCriticalSection(j *lint.Job) { sel1, method1, ok1 := mutexParams(block.List[i]) sel2, method2, ok2 := mutexParams(block.List[i+1]) - if !ok1 || !ok2 || Render(j, sel1) != Render(j, sel2) { + if !ok1 || !ok2 || Render(pass, sel1) != Render(pass, sel2) { continue } if (method1 == "Lock" && method2 == "Unlock") || (method1 == "RLock" && method2 == "RUnlock") { - j.Errorf(block.List[i+1], "empty critical section") + pass.Reportf(block.List[i+1].Pos(), "empty critical section") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) + return nil, nil } // cgo produces code like fn(&*_Cvar_kSomeCallbacks) which we don't // want to flag. var cgoIdent = regexp.MustCompile(`^_C(func|var)_.+$`) -func (c *Checker) CheckIneffectiveCopy(j *lint.Job) { +func CheckIneffectiveCopy(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { if unary, ok := node.(*ast.UnaryExpr); ok { if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { ident, ok := star.X.(*ast.Ident) if !ok || !cgoIdent.MatchString(ident.Name) { - j.Errorf(unary, "&*x will be simplified to x. It will not copy x.") + pass.Reportf(unary.Pos(), "&*x will be simplified to x. It will not copy x.") } } } if star, ok := node.(*ast.StarExpr); ok { if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND { - j.Errorf(star, "*&x will be simplified to x. It will not copy x.") + pass.Reportf(star.Pos(), "*&x will be simplified to x. It will not copy x.") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.StarExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckDiffSizeComparison(pass *analysis.Pass) (interface{}, error) { + ranges := pass.ResultOf[valueRangesAnalyzer].(map[*ssa.Function]vrp.Ranges) + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, b := range ssafn.Blocks { for _, ins := range b.Instrs { binop, ok := ins.(*ssa.BinOp) @@ -1662,22 +1506,26 @@ func (c *Checker) CheckDiffSizeComparison(j *lint.Job) { if !ok1 && !ok2 { continue } - r := c.funcDescs.Get(ssafn).Ranges + r := ranges[ssafn] r1, ok1 := r.Get(binop.X).(vrp.StringInterval) r2, ok2 := r.Get(binop.Y).(vrp.StringInterval) if !ok1 || !ok2 { continue } if r1.Length.Intersection(r2.Length).Empty() { - j.Errorf(binop, "comparing strings of different sizes for equality will always return false") + pass.Reportf(binop.Pos(), "comparing strings of different sizes for equality will always return false") } } } } + return nil, nil } -func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { - fn := func(node ast.Node, _ bool) bool { +func CheckCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) { + fn := func(node ast.Node, push bool) bool { + if !push { + return false + } assign, ok := node.(*ast.AssignStmt) if ok { // TODO(dh): This risks missing some Header reads, for @@ -1688,7 +1536,7 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !ok { continue } - if IsOfType(j, op.X, "net/http.Header") { + if IsOfType(pass, op.X, "net/http.Header") { return false } } @@ -1698,23 +1546,24 @@ func (c *Checker) CheckCanonicalHeaderKey(j *lint.Job) { if !ok { return true } - if !IsOfType(j, op.X, "net/http.Header") { + if !IsOfType(pass, op.X, "net/http.Header") { return true } - s, ok := ExprToString(j, op.Index) + s, ok := ExprToString(pass, op.Index) if !ok { return true } if s == http.CanonicalHeaderKey(s) { return true } - j.Errorf(op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) + pass.Reportf(op.Pos(), "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) return true } - j.Pkg.Inspector.Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckBenchmarkN(j *lint.Job) { +func CheckBenchmarkN(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { assign := node.(*ast.AssignStmt) if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { @@ -1727,16 +1576,17 @@ func (c *Checker) CheckBenchmarkN(j *lint.Job) { if sel.Sel.Name != "N" { return } - if !IsOfType(j, sel.X, "*testing.B") { + if !IsOfType(pass, sel.X, "*testing.B") { return } - j.Errorf(assign, "should not assign to %s", Render(j, sel)) + pass.Reportf(assign.Pos(), "should not assign to %s", Render(pass, sel)) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if IsExample(ssafn) { continue } @@ -1776,7 +1626,7 @@ func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" { continue } - j.Errorf(lhs, "this value of %s is never used", lhs) + pass.Reportf(lhs.Pos(), "this value of %s is never used", lhs) } } return true @@ -1797,16 +1647,17 @@ func (c *Checker) CheckUnreadVariableValues(j *lint.Job) { return true } if len(FilterDebug(*refs)) == 0 { - j.Errorf(lhs, "this value of %s is never used", lhs) + pass.Reportf(lhs.Pos(), "this value of %s is never used", lhs) } } return true }) } + return nil, nil } -func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckPredeterminedBooleanExprs(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ssabinop, ok := ins.(*ssa.BinOp) @@ -1841,16 +1692,17 @@ func (c *Checker) CheckPredeterminedBooleanExprs(j *lint.Job) { } b := trues != 0 if trues == 0 || trues == len(xs)*len(ys) { - j.Errorf(ssabinop, "binary expression is always %t for all possible values (%s %s %s)", + pass.Reportf(ssabinop.Pos(), "binary expression is always %t for all possible values (%s %s %s)", b, xs, ssabinop.Op, ys) } } } } + return nil, nil } -func (c *Checker) CheckNilMaps(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckNilMaps(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { mu, ok := ins.(*ssa.MapUpdate) @@ -1864,24 +1716,25 @@ func (c *Checker) CheckNilMaps(j *lint.Job) { if c.Value != nil { continue } - j.Errorf(mu, "assignment to nil map") + pass.Reportf(mu.Pos(), "assignment to nil map") } } } + return nil, nil } -func (c *Checker) CheckExtremeComparison(j *lint.Job) { +func CheckExtremeComparison(pass *analysis.Pass) (interface{}, error) { isobj := func(expr ast.Expr, name string) bool { sel, ok := expr.(*ast.SelectorExpr) if !ok { return false } - return IsObject(j.Pkg.TypesInfo.ObjectOf(sel.Sel), name) + return IsObject(pass.TypesInfo.ObjectOf(sel.Sel), name) } fn := func(node ast.Node) { expr := node.(*ast.BinaryExpr) - tx := j.Pkg.TypesInfo.TypeOf(expr.X) + tx := pass.TypesInfo.TypeOf(expr.X) basic, ok := tx.Underlying().(*types.Basic) if !ok { return @@ -1921,35 +1774,36 @@ func (c *Checker) CheckExtremeComparison(j *lint.Job) { if (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.Y, max) || (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.X, max) { - j.Errorf(expr, "no value of type %s is greater than %s", basic, max) + pass.Reportf(expr.Pos(), "no value of type %s is greater than %s", basic, max) } if expr.Op == token.LEQ && isobj(expr.Y, max) || expr.Op == token.GEQ && isobj(expr.X, max) { - j.Errorf(expr, "every value of type %s is <= %s", basic, max) + pass.Reportf(expr.Pos(), "every value of type %s is <= %s", basic, max) } if (basic.Info() & types.IsUnsigned) != 0 { if (expr.Op == token.LSS || expr.Op == token.LEQ) && IsIntLiteral(expr.Y, "0") || (expr.Op == token.GTR || expr.Op == token.GEQ) && IsIntLiteral(expr.X, "0") { - j.Errorf(expr, "no value of type %s is less than 0", basic) + pass.Reportf(expr.Pos(), "no value of type %s is less than 0", basic) } if expr.Op == token.GEQ && IsIntLiteral(expr.Y, "0") || expr.Op == token.LEQ && IsIntLiteral(expr.X, "0") { - j.Errorf(expr, "every value of type %s is >= 0", basic) + pass.Reportf(expr.Pos(), "every value of type %s is >= 0", basic) } } else { if (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.Y, min) || (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.X, min) { - j.Errorf(expr, "no value of type %s is less than %s", basic, min) + pass.Reportf(expr.Pos(), "no value of type %s is less than %s", basic, min) } if expr.Op == token.GEQ && isobj(expr.Y, min) || expr.Op == token.LEQ && isobj(expr.X, min) { - j.Errorf(expr, "every value of type %s is >= %s", basic, min) + pass.Reportf(expr.Pos(), "every value of type %s is >= %s", basic, min) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ssa.Const, bool) { @@ -1993,8 +1847,8 @@ func consts(val ssa.Value, out []*ssa.Const, visitedPhis map[string]bool) ([]*ss return uniq, true } -func (c *Checker) CheckLoopCondition(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckLoopCondition(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { fn := func(node ast.Node) bool { loop, ok := node.(*ast.ForStmt) if !ok { @@ -2050,16 +1904,17 @@ func (c *Checker) CheckLoopCondition(j *lint.Job) { case *ssa.UnOp: return true } - j.Errorf(cond, "variable in loop condition never changes") + pass.Reportf(cond.Pos(), "variable in loop condition never changes") return true } Inspect(ssafn.Syntax(), fn) } + return nil, nil } -func (c *Checker) CheckArgOverwritten(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckArgOverwritten(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { fn := func(node ast.Node) bool { var typ *ast.FuncType var body *ast.BlockStmt @@ -2079,7 +1934,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { } for _, field := range typ.Params.List { for _, arg := range field.Names { - obj := j.Pkg.TypesInfo.ObjectOf(arg) + obj := pass.TypesInfo.ObjectOf(arg) var ssaobj *ssa.Parameter for _, param := range ssafn.Params { if param.Object() == obj { @@ -2109,7 +1964,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { if !ok { continue } - if j.Pkg.TypesInfo.ObjectOf(ident) == obj { + if pass.TypesInfo.ObjectOf(ident) == obj { assigned = true return false } @@ -2117,7 +1972,7 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { return true }) if assigned { - j.Errorf(arg, "argument %s is overwritten before first use", arg) + pass.Reportf(arg.Pos(), "argument %s is overwritten before first use", arg) } } } @@ -2125,9 +1980,10 @@ func (c *Checker) CheckArgOverwritten(j *lint.Job) { } Inspect(ssafn.Syntax(), fn) } + return nil, nil } -func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { +func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { // This check detects some, but not all unconditional loop exits. // We give up in the following cases: // @@ -2167,7 +2023,7 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { body = node.Body loop = node case *ast.RangeStmt: - typ := j.Pkg.TypesInfo.TypeOf(node.X) + typ := pass.TypesInfo.TypeOf(node.X) if _, ok := typ.Underlying().(*types.Map); ok { // looping once over a map is a valid pattern for // getting an arbitrary element. @@ -2227,24 +2083,25 @@ func (c *Checker) CheckIneffectiveLoop(j *lint.Job) { return true }) if unconditionalExit != nil { - j.Errorf(unconditionalExit, "the surrounding loop is unconditionally terminated") + pass.Reportf(unconditionalExit.Pos(), "the surrounding loop is unconditionally terminated") } return true }) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckNilContext(j *lint.Job) { +func CheckNilContext(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) if len(call.Args) == 0 { return } - if typ, ok := j.Pkg.TypesInfo.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { + if typ, ok := pass.TypesInfo.TypeOf(call.Args[0]).(*types.Basic); !ok || typ.Kind() != types.UntypedNil { return } - sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) + sig, ok := pass.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { return } @@ -2254,13 +2111,14 @@ func (c *Checker) CheckNilContext(j *lint.Job) { if !IsType(sig.Params().At(0).Type(), "context.Context") { return } - j.Errorf(call.Args[0], + pass.Reportf(call.Args[0].Pos(), "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckSeeker(j *lint.Job) { +func CheckSeeker(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) sel, ok := call.Fun.(*ast.SelectorExpr) @@ -2289,12 +2147,13 @@ func (c *Checker) CheckSeeker(j *lint.Job) { if pkg.Name != "io" { return } - j.Errorf(call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") + pass.Reportf(call.Pos(), "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { +func CheckIneffectiveAppend(pass *analysis.Pass) (interface{}, error) { isAppend := func(ins ssa.Value) bool { call, ok := ins.(*ssa.Call) if !ok { @@ -2309,7 +2168,7 @@ func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { return true } - for _, ssafn := range j.Pkg.InitialFunctions { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { val, ok := ins.(ssa.Value) @@ -2353,15 +2212,16 @@ func (c *Checker) CheckIneffectiveAppend(j *lint.Job) { } walkRefs(*refs) if !isUsed { - j.Errorf(ins, "this result of append is never used, except maybe in other appends") + pass.Reportf(ins.Pos(), "this result of append is never used, except maybe in other appends") } } } } + return nil, nil } -func (c *Checker) CheckConcurrentTesting(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckConcurrentTesting(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { gostmt, ok := ins.(*ssa.Go) @@ -2410,53 +2270,68 @@ func (c *Checker) CheckConcurrentTesting(j *lint.Job) { default: continue } - j.Errorf(gostmt, "the goroutine calls T.%s, which must be called in the same goroutine as the test", name) + pass.Reportf(gostmt.Pos(), "the goroutine calls T.%s, which must be called in the same goroutine as the test", name) } } } } } + return nil, nil } -func (c *Checker) CheckCyclicFinalizer(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - if edge.Callee.Func.RelString(nil) != "runtime.SetFinalizer" { - continue - } - arg0 := edge.Site.Common().Args[Arg("runtime.SetFinalizer.obj")] - if iface, ok := arg0.(*ssa.MakeInterface); ok { - arg0 = iface.X - } - unop, ok := arg0.(*ssa.UnOp) - if !ok { - continue - } - v, ok := unop.X.(*ssa.Alloc) - if !ok { - continue - } - arg1 := edge.Site.Common().Args[Arg("runtime.SetFinalizer.finalizer")] - if iface, ok := arg1.(*ssa.MakeInterface); ok { - arg1 = iface.X - } - mc, ok := arg1.(*ssa.MakeClosure) - if !ok { - continue - } - for _, b := range mc.Bindings { - if b == v { - pos := lint.DisplayPosition(j.Pkg.Fset, mc.Fn.Pos()) - j.Errorf(edge.Site, "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) +func eachCall(ssafn *ssa.Function, fn func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function)) { + for _, b := range ssafn.Blocks { + for _, instr := range b.Instrs { + if site, ok := instr.(ssa.CallInstruction); ok { + if g := site.Common().StaticCallee(); g != nil { + fn(ssafn, site, g) } } } } } -func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckCyclicFinalizer(pass *analysis.Pass) (interface{}, error) { + fn := func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) { + if callee.RelString(nil) != "runtime.SetFinalizer" { + return + } + arg0 := site.Common().Args[Arg("runtime.SetFinalizer.obj")] + if iface, ok := arg0.(*ssa.MakeInterface); ok { + arg0 = iface.X + } + unop, ok := arg0.(*ssa.UnOp) + if !ok { + return + } + v, ok := unop.X.(*ssa.Alloc) + if !ok { + return + } + arg1 := site.Common().Args[Arg("runtime.SetFinalizer.finalizer")] + if iface, ok := arg1.(*ssa.MakeInterface); ok { + arg1 = iface.X + } + mc, ok := arg1.(*ssa.MakeClosure) + if !ok { + return + } + for _, b := range mc.Bindings { + if b == v { + pos := lint.DisplayPosition(pass.Fset, mc.Fn.Pos()) + pass.Reportf(site.Pos(), "the finalizer closes over the object, preventing the finalizer from ever running (at %s)", pos) + } + } + } + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + eachCall(ssafn, fn) + } + return nil, nil +} + +/* +func CheckSliceOutOfBounds(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ia, ok := ins.(*ssa.IndexAddr) @@ -2472,15 +2347,17 @@ func (c *Checker) CheckSliceOutOfBounds(j *lint.Job) { continue } if idxr.Lower.Cmp(sr.Length.Upper) >= 0 { - j.Errorf(ia, "index out of bounds") + pass.Reportf(ia.Pos(), "index out of bounds") } } } } + return nil, nil } +*/ -func (c *Checker) CheckDeferLock(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckDeferLock(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { instrs := FilterDebug(block.Instrs) if len(instrs) < 2 { @@ -2512,13 +2389,14 @@ func (c *Checker) CheckDeferLock(j *lint.Job) { case "RLock": alt = "RUnlock" } - j.Errorf(nins, "deferring %s right after having locked already; did you mean to defer %s?", name, alt) + pass.Reportf(nins.Pos(), "deferring %s right after having locked already; did you mean to defer %s?", name, alt) } } } + return nil, nil } -func (c *Checker) CheckNaNComparison(j *lint.Job) { +func CheckNaNComparison(pass *analysis.Pass) (interface{}, error) { isNaN := func(v ssa.Value) bool { call, ok := v.(*ssa.Call) if !ok { @@ -2526,7 +2404,7 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { } return IsCallTo(call.Common(), "math.NaN") } - for _, ssafn := range j.Pkg.InitialFunctions { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ins, ok := ins.(*ssa.BinOp) @@ -2534,27 +2412,27 @@ func (c *Checker) CheckNaNComparison(j *lint.Job) { continue } if isNaN(ins.X) || isNaN(ins.Y) { - j.Errorf(ins, "no value is equal to NaN, not even NaN itself") + pass.Reportf(ins.Pos(), "no value is equal to NaN, not even NaN itself") } } } } + return nil, nil } -func (c *Checker) CheckInfiniteRecursion(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - if edge.Callee != node { - continue +func CheckInfiniteRecursion(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + eachCall(ssafn, func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) { + if callee != ssafn { + return } - if _, ok := edge.Site.(*ssa.Go); ok { + if _, ok := site.(*ssa.Go); ok { // Recursively spawning goroutines doesn't consume // stack space infinitely, so don't flag it. - continue + return } - block := edge.Site.Block() + block := site.Block() canReturn := false for _, b := range ssafn.Blocks { if block.Dominates(b) { @@ -2569,11 +2447,12 @@ func (c *Checker) CheckInfiniteRecursion(j *lint.Job) { } } if canReturn { - continue + return } - j.Errorf(edge.Site, "infinite recursive call") - } + pass.Reportf(site.Pos(), "infinite recursive call") + }) } + return nil, nil } func objectName(obj types.Object) string { @@ -2591,20 +2470,20 @@ func objectName(obj types.Object) string { return name } -func isName(j *lint.Job, expr ast.Expr, name string) bool { +func isName(pass *analysis.Pass, expr ast.Expr, name string) bool { var obj types.Object switch expr := expr.(type) { case *ast.Ident: - obj = j.Pkg.TypesInfo.ObjectOf(expr) + obj = pass.TypesInfo.ObjectOf(expr) case *ast.SelectorExpr: - obj = j.Pkg.TypesInfo.ObjectOf(expr.Sel) + obj = pass.TypesInfo.ObjectOf(expr.Sel) } return objectName(obj) == name } -func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { - if IsInMain(j, ssafn) || IsInTest(j, ssafn) { +func CheckLeakyTimeTick(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + if IsInMain(pass, ssafn) || IsInTest(pass, ssafn) { continue } for _, block := range ssafn.Blocks { @@ -2613,16 +2492,17 @@ func (c *Checker) CheckLeakyTimeTick(j *lint.Job) { if !ok || !IsCallTo(call.Common(), "time.Tick") { continue } - if c.funcDescs.Get(call.Parent()).Infinite { + if !functions.Terminates(call.Parent()) { continue } - j.Errorf(call, "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here") + pass.Reportf(call.Pos(), "using time.Tick leaks the underlying ticker, consider using it only in endless functions, tests and the main package, and use time.NewTicker here") } } } + return nil, nil } -func (c *Checker) CheckDoubleNegation(j *lint.Job) { +func CheckDoubleNegation(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { unary1 := node.(*ast.UnaryExpr) unary2, ok := unary1.X.(*ast.UnaryExpr) @@ -2632,9 +2512,10 @@ func (c *Checker) CheckDoubleNegation(j *lint.Job) { if unary1.Op != token.NOT || unary2.Op != token.NOT { return } - j.Errorf(unary1, "negating a boolean twice has no effect; is this a typo?") + pass.Reportf(unary1.Pos(), "negating a boolean twice has no effect; is this a typo?") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) + return nil, nil } func hasSideEffects(node ast.Node) bool { @@ -2655,7 +2536,7 @@ func hasSideEffects(node ast.Node) bool { return dynamic } -func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { +func CheckRepeatedIfElse(pass *analysis.Pass) (interface{}, error) { seen := map[ast.Node]bool{} var collectConds func(ifstmt *ast.IfStmt, inits []ast.Stmt, conds []ast.Expr) ([]ast.Stmt, []ast.Expr) @@ -2686,18 +2567,19 @@ func (c *Checker) CheckRepeatedIfElse(j *lint.Job) { } counts := map[string]int{} for _, cond := range conds { - s := Render(j, cond) + s := Render(pass, cond) counts[s]++ if counts[s] == 2 { - j.Errorf(cond, "this condition occurs multiple times in this if/else if chain") + pass.Reportf(cond.Pos(), "this condition occurs multiple times in this if/else if chain") } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range ssafn.Blocks { for _, ins := range block.Instrs { ins, ok := ins.(*ssa.BinOp) @@ -2715,7 +2597,7 @@ func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { // of a pattern, x<<0, x<<8, x<<16, ... continue } - path, _ := astutil.PathEnclosingInterval(j.File(ins), ins.Pos(), ins.Pos()) + path, _ := astutil.PathEnclosingInterval(File(pass, ins), ins.Pos(), ins.Pos()) if len(path) == 0 { continue } @@ -2725,19 +2607,20 @@ func (c *Checker) CheckSillyBitwiseOps(j *lint.Job) { switch ins.Op { case token.AND: - j.Errorf(ins, "x & 0 always equals 0") + pass.Reportf(ins.Pos(), "x & 0 always equals 0") case token.OR, token.XOR: - j.Errorf(ins, "x %s 0 always equals x", ins.Op) + pass.Reportf(ins.Pos(), "x %s 0 always equals x", ins.Op) } } } } + return nil, nil } -func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { +func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { call := node.(*ast.CallExpr) - sig, ok := j.Pkg.TypesInfo.TypeOf(call.Fun).(*types.Signature) + sig, ok := pass.TypesInfo.TypeOf(call.Fun).(*types.Signature) if !ok { return } @@ -2764,17 +2647,165 @@ func (c *Checker) CheckNonOctalFileMode(j *lint.Job) { if err != nil { continue } - j.Errorf(call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) + pass.Reportf(call.Args[i].Pos(), "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) + } + } + } + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil +} + +type IsPure struct{} + +func (*IsPure) AFact() {} +func (*IsPure) String() string { return "IsPure" } + +var pureStdlib = map[string]struct{}{ + "errors.New": {}, + "fmt.Errorf": {}, + "fmt.Sprintf": {}, + "fmt.Sprint": {}, + "sort.Reverse": {}, + "strings.Map": {}, + "strings.Repeat": {}, + "strings.Replace": {}, + "strings.Title": {}, + "strings.ToLower": {}, + "strings.ToLowerSpecial": {}, + "strings.ToTitle": {}, + "strings.ToTitleSpecial": {}, + "strings.ToUpper": {}, + "strings.ToUpperSpecial": {}, + "strings.Trim": {}, + "strings.TrimFunc": {}, + "strings.TrimLeft": {}, + "strings.TrimLeftFunc": {}, + "strings.TrimPrefix": {}, + "strings.TrimRight": {}, + "strings.TrimRightFunc": {}, + "strings.TrimSpace": {}, + "strings.TrimSuffix": {}, + "(*net/http.Request).WithContext": {}, +} + +func checkPureFunctionsMark(pass *analysis.Pass) { + seen := map[*ssa.Function]struct{}{} + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg + var check func(ssafn *ssa.Function) (ret bool) + check = func(ssafn *ssa.Function) (ret bool) { + if ssafn.Object() == nil { + // TODO(dh): support closures + return false + } + if pass.ImportObjectFact(ssafn.Object(), new(IsPure)) { + return true + } + if ssafn.Pkg != ssapkg { + // Function is in another package but wasn't marked as + // pure, ergo it isn't pure + return false + } + // Break recursion + if _, ok := seen[ssafn]; ok { + return false + } + + seen[ssafn] = struct{}{} + defer func() { + if ret { + pass.ExportObjectFact(ssafn.Object(), &IsPure{}) + } + }() + + if functions.IsStub(ssafn) { + return false + } + + if _, ok := pureStdlib[ssafn.Object().(*types.Func).FullName()]; ok { + return true + } + + if ssafn.Signature.Results().Len() == 0 { + // A function with no return values is empty or is doing some + // work we cannot see (for example because of build tags); + // don't consider it pure. + return false + } + + for _, param := range ssafn.Params { + if _, ok := param.Type().Underlying().(*types.Basic); !ok { + return false + } + } + + if ssafn.Blocks == nil { + return false + } + checkCall := func(common *ssa.CallCommon) bool { + if common.IsInvoke() { + return false + } + builtin, ok := common.Value.(*ssa.Builtin) + if !ok { + if common.StaticCallee() != ssafn { + if common.StaticCallee() == nil { + return false + } + if !check(common.StaticCallee()) { + return false + } + } + } else { + switch builtin.Name() { + case "len", "cap", "make", "new": + default: + return false + } + } + return true + } + for _, b := range ssafn.Blocks { + for _, ins := range b.Instrs { + switch ins := ins.(type) { + case *ssa.Call: + if !checkCall(ins.Common()) { + return false + } + case *ssa.Defer: + if !checkCall(&ins.Call) { + return false + } + case *ssa.Select: + return false + case *ssa.Send: + return false + case *ssa.Go: + return false + case *ssa.Panic: + return false + case *ssa.Store: + return false + case *ssa.FieldAddr: + return false + case *ssa.UnOp: + if ins.Op == token.MUL || ins.Op == token.AND { + return false + } + } } } + return true + } + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + check(ssafn) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) } -func (c *Checker) CheckPureFunctions(j *lint.Job) { +func CheckPureFunctions(pass *analysis.Pass) (interface{}, error) { + checkPureFunctionsMark(pass) fnLoop: - for _, ssafn := range j.Pkg.InitialFunctions { - if IsInTest(j, ssafn) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + if IsInTest(pass, ssafn) { params := ssafn.Signature.Params() for i := 0; i < params.Len(); i++ { param := params.At(i) @@ -2805,157 +2836,174 @@ fnLoop: if callee == nil { continue } - if c.funcDescs.Get(callee).Pure && !c.funcDescs.Get(callee).Stub { - j.Errorf(ins, "%s is a pure function but its return value is ignored", callee.Name()) + if callee.Object() == nil { + // TODO(dh): support anonymous functions + continue + } + if pass.ImportObjectFact(callee.Object(), new(IsPure)) { + pass.Reportf(ins.Pos(), "%s is a pure function but its return value is ignored", callee.Name()) continue } } } } + return nil, nil } -func (c *Checker) isDeprecated(j *lint.Job, ident *ast.Ident) (bool, string) { - obj := j.Pkg.TypesInfo.ObjectOf(ident) +func isDeprecated(pass *analysis.Pass, ident *ast.Ident) (bool, string) { + obj := pass.TypesInfo.ObjectOf(ident) if obj.Pkg() == nil { return false, "" } - alt := c.deprecatedObjs[obj] - return alt != "", alt + var depr IsDeprecated + if pass.ImportObjectFact(obj, &depr) { + return true, depr.Msg + } + return false, "" } -func (c *Checker) CheckDeprecated(j *lint.Job) { +func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { + checkDeprecatedMark(pass) + // Selectors can appear outside of function literals, e.g. when // declaring package level variables. - var ssafn *ssa.Function + var tfn types.Object stack := 0 fn := func(node ast.Node, push bool) bool { if !push { stack-- - } else { - stack++ + return false } + stack++ if stack == 1 { - ssafn = nil + tfn = nil } if fn, ok := node.(*ast.FuncDecl); ok { - ssafn = j.Pkg.SSA.Prog.FuncValue(j.Pkg.TypesInfo.ObjectOf(fn.Name).(*types.Func)) + tfn = pass.TypesInfo.ObjectOf(fn.Name) } sel, ok := node.(*ast.SelectorExpr) if !ok { return true } - obj := j.Pkg.TypesInfo.ObjectOf(sel.Sel) + obj := pass.TypesInfo.ObjectOf(sel.Sel) if obj.Pkg() == nil { return true } - nodePkg := j.Pkg.Types - if nodePkg == obj.Pkg() || obj.Pkg().Path()+"_test" == nodePkg.Path() { + if pass.Pkg == obj.Pkg() || obj.Pkg().Path()+"_test" == pass.Pkg.Path() { // Don't flag stuff in our own package return true } - if ok, alt := c.isDeprecated(j, sel.Sel); ok { + if ok, alt := isDeprecated(pass, sel.Sel); ok { // Look for the first available alternative, not the first // version something was deprecated in. If a function was // deprecated in Go 1.6, an alternative has been available // already in 1.0, and we're targeting 1.2, it still // makes sense to use the alternative from 1.0, to be // future-proof. - minVersion := deprecated.Stdlib[SelectorName(j, sel)].AlternativeAvailableSince - if !IsGoVersion(j, minVersion) { + minVersion := deprecated.Stdlib[SelectorName(pass, sel)].AlternativeAvailableSince + if !IsGoVersion(pass, minVersion) { return true } - if ssafn != nil { - if _, ok := c.deprecatedObjs[ssafn.Object()]; ok { + if tfn != nil { + var depr IsDeprecated + if pass.ImportObjectFact(tfn, &depr) { // functions that are deprecated may use deprecated // symbols return true } } - j.Errorf(sel, "%s is deprecated: %s", Render(j, sel), alt) + pass.Reportf(sel.Pos(), "%s is deprecated: %s", Render(pass, sel), alt) return true } return true } - for _, f := range j.Pkg.Syntax { + + imps := map[string]*types.Package{} + for _, imp := range pass.Pkg.Imports() { + imps[imp.Path()] = imp + } + for _, f := range pass.Files { ast.Inspect(f, func(node ast.Node) bool { if node, ok := node.(*ast.ImportSpec); ok { p := node.Path.Value path := p[1 : len(p)-1] - imp := j.Pkg.Imports[path] - if alt := c.deprecatedPkgs[imp.Types]; alt != "" { - j.Errorf(node, "Package %s is deprecated: %s", path, alt) + imp := imps[path] + var depr IsDeprecated + if pass.ImportPackageFact(imp, &depr) { + pass.Reportf(node.Pos(), "Package %s is deprecated: %s", path, depr.Msg) } } return true }) } - j.Pkg.Inspector.Nodes(nil, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes(nil, fn) + return nil, nil } -func (c *Checker) callChecker(rules map[string]CallCheck) func(j *lint.Job) { - return func(j *lint.Job) { - c.checkCalls(j, rules) +func callChecker(rules map[string]CallCheck) func(pass *analysis.Pass) (interface{}, error) { + return func(pass *analysis.Pass) (interface{}, error) { + return checkCalls(pass, rules) } } -func (c *Checker) checkCalls(j *lint.Job, rules map[string]CallCheck) { - for _, ssafn := range j.Pkg.InitialFunctions { - node := c.funcDescs.CallGraph.CreateNode(ssafn) - for _, edge := range node.Out { - callee := edge.Callee.Func - obj, ok := callee.Object().(*types.Func) - if !ok { - continue - } +func checkCalls(pass *analysis.Pass, rules map[string]CallCheck) (interface{}, error) { + ranges := pass.ResultOf[valueRangesAnalyzer].(map[*ssa.Function]vrp.Ranges) + fn := func(caller *ssa.Function, site ssa.CallInstruction, callee *ssa.Function) { + obj, ok := callee.Object().(*types.Func) + if !ok { + return + } - r, ok := rules[lint.FuncName(obj)] - if !ok { - continue - } - var args []*Argument - ssaargs := edge.Site.Common().Args - if callee.Signature.Recv() != nil { - ssaargs = ssaargs[1:] - } - for _, arg := range ssaargs { - if iarg, ok := arg.(*ssa.MakeInterface); ok { - arg = iarg.X - } - vr := c.funcDescs.Get(edge.Site.Parent()).Ranges[arg] - args = append(args, &Argument{Value: Value{arg, vr}}) - } - call := &Call{ - Job: j, - Instr: edge.Site, - Args: args, - Checker: c, - Parent: edge.Site.Parent(), + r, ok := rules[lint.FuncName(obj)] + if !ok { + return + } + var args []*Argument + ssaargs := site.Common().Args + if callee.Signature.Recv() != nil { + ssaargs = ssaargs[1:] + } + for _, arg := range ssaargs { + if iarg, ok := arg.(*ssa.MakeInterface); ok { + arg = iarg.X } - r(call) - for idx, arg := range call.Args { - _ = idx - for _, e := range arg.invalids { - // path, _ := astutil.PathEnclosingInterval(f.File, edge.Site.Pos(), edge.Site.Pos()) - // if len(path) < 2 { - // continue - // } - // astcall, ok := path[0].(*ast.CallExpr) - // if !ok { - // continue - // } - // j.Errorf(astcall.Args[idx], "%s", e) + vr := ranges[site.Parent()][arg] + args = append(args, &Argument{Value: Value{arg, vr}}) + } + call := &Call{ + Pass: pass, + Instr: site, + Args: args, + Parent: site.Parent(), + } + r(call) + for idx, arg := range call.Args { + _ = idx + for _, e := range arg.invalids { + // path, _ := astutil.PathEnclosingInterval(f.File, edge.Site.Pos(), edge.Site.Pos()) + // if len(path) < 2 { + // continue + // } + // astcall, ok := path[0].(*ast.CallExpr) + // if !ok { + // continue + // } + // pass.Reportf(astcall.Args[idx], "%s", e) - j.Errorf(edge.Site, "%s", e) - } - } - for _, e := range call.invalids { - j.Errorf(call.Instr.Common(), "%s", e) + pass.Reportf(site.Pos(), "%s", e) } } + for _, e := range call.invalids { + pass.Reportf(call.Instr.Common().Pos(), "%s", e) + } } + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + eachCall(ssafn, fn) + } + return nil, nil } func shortCallName(call *ssa.CallCommon) string { @@ -2975,12 +3023,12 @@ func shortCallName(call *ssa.CallCommon) string { return "" } -func (c *Checker) CheckWriterBufferModified(j *lint.Job) { +func CheckWriterBufferModified(pass *analysis.Pass) (interface{}, error) { // TODO(dh): this might be a good candidate for taint analysis. // Taint the argument as MUST_NOT_MODIFY, then propagate that // through functions like bytes.Split - for _, ssafn := range j.Pkg.InitialFunctions { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { sig := ssafn.Signature if ssafn.Name() != "Write" || sig.Recv() == nil || sig.Params().Len() != 1 || sig.Results().Len() != 2 { continue @@ -3010,7 +3058,7 @@ func (c *Checker) CheckWriterBufferModified(j *lint.Job) { if addr.X != ssafn.Params[1] { continue } - j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") + pass.Reportf(ins.Pos(), "io.Writer.Write must not modify the provided buffer, not even temporarily") case *ssa.Call: if !IsCallTo(ins.Common(), "append") { continue @@ -3018,11 +3066,12 @@ func (c *Checker) CheckWriterBufferModified(j *lint.Job) { if ins.Common().Args[0] != ssafn.Params[1] { continue } - j.Errorf(ins, "io.Writer.Write must not modify the provided buffer, not even temporarily") + pass.Reportf(ins.Pos(), "io.Writer.Write must not modify the provided buffer, not even temporarily") } } } } + return nil, nil } func loopedRegexp(name string) CallCheck { @@ -3030,21 +3079,18 @@ func loopedRegexp(name string) CallCheck { if len(extractConsts(call.Args[0].Value.Value)) == 0 { return } - if !call.Checker.isInLoop(call.Instr.Block()) { + if !isInLoop(call.Instr.Block()) { return } call.Invalid(fmt.Sprintf("calling %s in a loop has poor performance, consider using regexp.Compile", name)) } } -func (c *Checker) CheckEmptyBranch(j *lint.Job) { - for _, ssafn := range j.Pkg.InitialFunctions { +func CheckEmptyBranch(pass *analysis.Pass) (interface{}, error) { + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if ssafn.Syntax() == nil { continue } - if IsGenerated(j.File(ssafn.Syntax())) { - continue - } if IsExample(ssafn) { continue } @@ -3058,20 +3104,21 @@ func (c *Checker) CheckEmptyBranch(j *lint.Job) { if !ok || len(b.List) != 0 { return true } - j.Errorf(ifstmt.Else, "empty branch") + ReportfFG(pass, ifstmt.Else.Pos(), "empty branch") } if len(ifstmt.Body.List) != 0 { return true } - j.Errorf(ifstmt, "empty branch") + ReportfFG(pass, ifstmt.Pos(), "empty branch") return true } Inspect(ssafn.Syntax(), fn) } + return nil, nil } -func (c *Checker) CheckMapBytesKey(j *lint.Job) { - for _, fn := range j.Pkg.InitialFunctions { +func CheckMapBytesKey(pass *analysis.Pass) (interface{}, error) { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, b := range fn.Blocks { insLoop: for _, ins := range b.Instrs { @@ -3115,31 +3162,33 @@ func (c *Checker) CheckMapBytesKey(j *lint.Job) { if !ident { continue } - j.Errorf(conv, "m[string(key)] would be more efficient than k := string(key); m[k]") + pass.Reportf(conv.Pos(), "m[string(key)] would be more efficient than k := string(key); m[k]") } } } + return nil, nil } -func (c *Checker) CheckRangeStringRunes(j *lint.Job) { - sharedcheck.CheckRangeStringRunes(j) +func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { + return sharedcheck.CheckRangeStringRunes(pass) } -func (c *Checker) CheckSelfAssignment(j *lint.Job) { +func CheckSelfAssignment(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { assign := node.(*ast.AssignStmt) if assign.Tok != token.ASSIGN || len(assign.Lhs) != len(assign.Rhs) { return } for i, stmt := range assign.Lhs { - rlh := Render(j, stmt) - rrh := Render(j, assign.Rhs[i]) + rlh := Render(pass, stmt) + rrh := Render(pass, assign.Rhs[i]) if rlh == rrh { - j.Errorf(assign, "self-assignment of %s to %s", rrh, rlh) + ReportfFG(pass, assign.Pos(), "self-assignment of %s to %s", rrh, rlh) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + return nil, nil } func buildTagsIdentical(s1, s2 []string) bool { @@ -3160,8 +3209,8 @@ func buildTagsIdentical(s1, s2 []string) bool { return true } -func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { - for _, f := range job.Pkg.Syntax { +func CheckDuplicateBuildConstraints(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { constraints := buildTags(f) for i, constraint1 := range constraints { for j, constraint2 := range constraints { @@ -3169,19 +3218,20 @@ func (c *Checker) CheckDuplicateBuildConstraints(job *lint.Job) { continue } if buildTagsIdentical(constraint1, constraint2) { - job.Errorf(f, "identical build constraints %q and %q", + ReportfFG(pass, f.Pos(), "identical build constraints %q and %q", strings.Join(constraint1, " "), strings.Join(constraint2, " ")) } } } } + return nil, nil } -func (c *Checker) CheckSillyRegexp(j *lint.Job) { +func CheckSillyRegexp(pass *analysis.Pass) (interface{}, error) { // We could use the rule checking engine for this, but the // arguments aren't really invalid. - for _, fn := range j.Pkg.InitialFunctions { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, b := range fn.Blocks { for _, ins := range b.Instrs { call, ok := ins.(*ssa.Call) @@ -3205,13 +3255,14 @@ func (c *Checker) CheckSillyRegexp(j *lint.Job) { if re.Op != syntax.OpLiteral && re.Op != syntax.OpEmptyMatch { continue } - j.Errorf(call, "regular expression does not contain any meta characters") + pass.Reportf(call.Pos(), "regular expression does not contain any meta characters") } } } + return nil, nil } -func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { +func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { decl := node.(*ast.GenDecl) if !decl.Lparen.IsValid() { @@ -3221,7 +3272,7 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { return } - groups := GroupSpecs(j.Pkg.Fset, decl.Specs) + groups := GroupSpecs(pass.Fset, decl.Specs) groupLoop: for _, group := range groups { if len(group) < 2 { @@ -3254,14 +3305,15 @@ func (c *Checker) CheckMissingEnumTypesInDeclaration(j *lint.Job) { continue groupLoop } } - j.Errorf(group[0], "only the first constant in this group has an explicit type") + pass.Reportf(group[0].Pos(), "only the first constant in this group has an explicit type") } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { - for _, fn := range j.Pkg.InitialFunctions { +func CheckTimerResetReturnValue(pass *analysis.Pass) (interface{}, error) { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { for _, block := range fn.Blocks { for _, ins := range block.Instrs { call, ok := ins.(*ssa.Call) @@ -3314,15 +3366,16 @@ func (c *Checker) CheckTimerResetReturnValue(j *lint.Job) { } if found { - j.Errorf(call, "it is not possible to use Reset's return value correctly, as there is a race condition between draining the channel and the new timer expiring") + pass.Reportf(call.Pos(), "it is not possible to use Reset's return value correctly, as there is a race condition between draining the channel and the new timer expiring") } } } } } + return nil, nil } -func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { +func CheckToLowerToUpperComparison(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { binExpr := node.(*ast.BinaryExpr) @@ -3342,9 +3395,9 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { ) var call string - if IsCallToAST(j, binExpr.X, lo) && IsCallToAST(j, binExpr.Y, lo) { + if IsCallToAST(pass, binExpr.X, lo) && IsCallToAST(pass, binExpr.Y, lo) { call = lo - } else if IsCallToAST(j, binExpr.X, up) && IsCallToAST(j, binExpr.Y, up) { + } else if IsCallToAST(pass, binExpr.X, up) && IsCallToAST(pass, binExpr.Y, up) { call = up } else { return @@ -3355,13 +3408,14 @@ func (c *Checker) CheckToLowerToUpperComparison(j *lint.Job) { bang = "!" } - j.Errorf(binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) + pass.Reportf(binExpr.Pos(), "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { +func CheckUnreachableTypeCases(pass *analysis.Pass) (interface{}, error) { // Check if T subsumes V in a type switch. T subsumes V if T is an interface and T's method set is a subset of V's method set. subsumes := func(T, V types.Type) bool { tIface, ok := T.Underlying().(*types.Interface) @@ -3404,7 +3458,7 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { Ts := make([]types.Type, len(cc.List)) for i, expr := range cc.List { - Ts[i] = j.Pkg.TypesInfo.TypeOf(expr) + Ts[i] = pass.TypesInfo.TypeOf(expr) } ccs = append(ccs, ccAndTypes{cc: cc, types: Ts}) @@ -3419,30 +3473,32 @@ func (c *Checker) CheckUnreachableTypeCases(j *lint.Job) { for i, cc := range ccs[:len(ccs)-1] { for _, next := range ccs[i+1:] { if T, V, yes := subsumesAny(cc.types, next.types); yes { - j.Errorf(next.cc, "unreachable case clause: %s will always match before %s", T.String(), V.String()) + pass.Reportf(next.cc.Pos(), "unreachable case clause: %s will always match before %s", T.String(), V.String()) } } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckSingleArgAppend(j *lint.Job) { +func CheckSingleArgAppend(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - if !IsCallToAST(j, node, "append") { + if !IsCallToAST(pass, node, "append") { return } call := node.(*ast.CallExpr) if len(call.Args) != 1 { return } - j.Errorf(call, "x = append(y) is equivalent to x = y") + ReportfFG(pass, call.Pos(), "x = append(y) is equivalent to x = y") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckStructTags(j *lint.Job) { +func CheckStructTags(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { for _, field := range node.(*ast.StructType).Fields.List { if field.Tag == nil { @@ -3450,35 +3506,36 @@ func (c *Checker) CheckStructTags(j *lint.Job) { } tags, err := parseStructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) if err != nil { - j.Errorf(field.Tag, "unparseable struct tag: %s", err) + pass.Reportf(field.Tag.Pos(), "unparseable struct tag: %s", err) continue } for k, v := range tags { if len(v) > 1 { - j.Errorf(field.Tag, "duplicate struct tag %q", k) + pass.Reportf(field.Tag.Pos(), "duplicate struct tag %q", k) continue } switch k { case "json": - checkJSONTag(j, field, v[0]) + checkJSONTag(pass, field, v[0]) case "xml": - checkXMLTag(j, field, v[0]) + checkXMLTag(pass, field, v[0]) } } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.StructType)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.StructType)(nil)}, fn) + return nil, nil } -func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { +func checkJSONTag(pass *analysis.Pass, field *ast.Field, tag string) { + //lint:ignore SA9003 TODO(dh): should we flag empty tags? if len(tag) == 0 { - // TODO(dh): should we flag empty tags? } fields := strings.Split(tag, ",") for _, r := range fields[0] { if !unicode.IsLetter(r) && !unicode.IsDigit(r) && !strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", r) { - j.Errorf(field.Tag, "invalid JSON field name %q", fields[0]) + pass.Reportf(field.Tag.Pos(), "invalid JSON field name %q", fields[0]) } } var co, cs, ci int @@ -3491,31 +3548,31 @@ func checkJSONTag(j *lint.Job, field *ast.Field, tag string) { case "string": cs++ // only for string, floating point, integer and bool - T := Dereference(j.Pkg.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() + T := Dereference(pass.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() basic, ok := T.(*types.Basic) if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { - j.Errorf(field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") + pass.Reportf(field.Tag.Pos(), "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") } case "inline": ci++ default: - j.Errorf(field.Tag, "unknown JSON option %q", s) + pass.Reportf(field.Tag.Pos(), "unknown JSON option %q", s) } } if co > 1 { - j.Errorf(field.Tag, `duplicate JSON option "omitempty"`) + pass.Reportf(field.Tag.Pos(), `duplicate JSON option "omitempty"`) } if cs > 1 { - j.Errorf(field.Tag, `duplicate JSON option "string"`) + pass.Reportf(field.Tag.Pos(), `duplicate JSON option "string"`) } if ci > 1 { - j.Errorf(field.Tag, `duplicate JSON option "inline"`) + pass.Reportf(field.Tag.Pos(), `duplicate JSON option "inline"`) } } -func checkXMLTag(j *lint.Job, field *ast.Field, tag string) { +func checkXMLTag(pass *analysis.Pass, field *ast.Field, tag string) { + //lint:ignore SA9003 TODO(dh): should we flag empty tags? if len(tag) == 0 { - // TODO(dh): should we flag empty tags? } fields := strings.Split(tag, ",") counts := map[string]int{} @@ -3531,15 +3588,15 @@ func checkXMLTag(j *lint.Job, field *ast.Field, tag string) { counts[s]++ case "": default: - j.Errorf(field.Tag, "unknown XML option %q", s) + pass.Reportf(field.Tag.Pos(), "unknown XML option %q", s) } } for k, v := range counts { if v > 1 { - j.Errorf(field.Tag, "duplicate XML option %q", k) + pass.Reportf(field.Tag.Pos(), "duplicate XML option %q", k) } } if len(exclusives) > 1 { - j.Errorf(field.Tag, "XML options %s are mutually exclusive", strings.Join(exclusives, " and ")) + pass.Reportf(field.Tag.Pos(), "XML options %s are mutually exclusive", strings.Join(exclusives, " and ")) } } diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index 88658dfb8..2b129b53e 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -3,32 +3,111 @@ package staticcheck import ( "testing" - "honnef.co/go/tools/lint" - "honnef.co/go/tools/lint/lintutil" - "honnef.co/go/tools/lint/testutil" + "golang.org/x/tools/go/analysis/analysistest" ) func TestAll(t *testing.T) { - c := NewChecker() - testutil.TestAll(t, c, "") -} - -func BenchmarkStdlib(b *testing.B) { - for i := 0; i < b.N; i++ { - c := NewChecker() - _, err := lintutil.Lint([]lint.Checker{c}, []string{"std"}, nil) - if err != nil { - b.Fatal(err) - } + checks := map[string][]struct { + dir string + version string + }{ + "SA1000": {{dir: "CheckRegexps"}}, + "SA1001": {{dir: "CheckTemplate"}}, + "SA1002": {{dir: "CheckTimeParse"}}, + "SA1003": { + {dir: "CheckEncodingBinary"}, + {dir: "CheckEncodingBinary_go17", version: "1.7"}, + {dir: "CheckEncodingBinary_go18", version: "1.8"}, + }, + "SA1004": {{dir: "CheckTimeSleepConstant"}}, + "SA1005": {{dir: "CheckExec"}}, + "SA1006": {{dir: "CheckUnsafePrintf"}}, + "SA1007": {{dir: "CheckURLs"}}, + "SA1008": {{dir: "CheckCanonicalHeaderKey"}}, + "SA1010": {{dir: "checkStdlibUsageRegexpFindAll"}}, + "SA1011": {{dir: "checkStdlibUsageUTF8Cutset"}}, + "SA1012": {{dir: "checkStdlibUsageNilContext"}}, + "SA1013": {{dir: "checkStdlibUsageSeeker"}}, + "SA1014": {{dir: "CheckUnmarshalPointer"}}, + "SA1015": { + {dir: "CheckLeakyTimeTick"}, + {dir: "CheckLeakyTimeTick-main"}, + }, + "SA1016": {{dir: "CheckUntrappableSignal"}}, + "SA1017": {{dir: "CheckUnbufferedSignalChan"}}, + "SA1018": {{dir: "CheckStringsReplaceZero"}}, + "SA1019": { + {dir: "CheckDeprecated"}, + {dir: "CheckDeprecated_go14", version: "1.4"}, + {dir: "CheckDeprecated_go18", version: "1.8"}, + }, + "SA1020": {{dir: "CheckListenAddress"}}, + "SA1021": {{dir: "CheckBytesEqualIP"}}, + "SA1023": {{dir: "CheckWriterBufferModified"}}, + "SA1024": {{dir: "CheckNonUniqueCutset"}}, + "SA1025": {{dir: "CheckTimerResetReturnValue"}}, + "SA1026": {{dir: "CheckUnsupportedMarshal"}}, + "SA2000": {{dir: "CheckWaitgroupAdd"}}, + "SA2001": {{dir: "CheckEmptyCriticalSection"}}, + "SA2002": {{dir: "CheckConcurrentTesting"}}, + "SA2003": {{dir: "CheckDeferLock"}}, + "SA3000": { + {dir: "CheckTestMainExit-1"}, + {dir: "CheckTestMainExit-2"}, + {dir: "CheckTestMainExit-3"}, + {dir: "CheckTestMainExit-4"}, + {dir: "CheckTestMainExit-5"}, + }, + "SA3001": {{dir: "CheckBenchmarkN"}}, + "SA4000": {{dir: "CheckLhsRhsIdentical"}}, + "SA4001": {{dir: "CheckIneffectiveCopy"}}, + "SA4002": {{dir: "CheckDiffSizeComparison"}}, + "SA4003": {{dir: "CheckExtremeComparison"}}, + "SA4004": {{dir: "CheckIneffectiveLoop"}}, + "SA4006": {{dir: "CheckUnreadVariableValues"}}, + "SA4008": {{dir: "CheckLoopCondition"}}, + "SA4009": {{dir: "CheckArgOverwritten"}}, + "SA4010": {{dir: "CheckIneffectiveAppend"}}, + "SA4011": {{dir: "CheckScopedBreak"}}, + "SA4012": {{dir: "CheckNaNComparison"}}, + "SA4013": {{dir: "CheckDoubleNegation"}}, + "SA4014": {{dir: "CheckRepeatedIfElse"}}, + "SA4015": {{dir: "CheckMathInt"}}, + "SA4016": {{dir: "CheckSillyBitwiseOps"}}, + "SA4017": {{dir: "CheckPureFunctions"}}, + "SA4018": {{dir: "CheckSelfAssignment"}}, + "SA4019": {{dir: "CheckDuplicateBuildConstraints"}}, + "SA4020": {{dir: "CheckUnreachableTypeCases"}}, + "SA4021": {{dir: "CheckSingleArgAppend"}}, + "SA5000": {{dir: "CheckNilMaps"}}, + "SA5001": {{dir: "CheckEarlyDefer"}}, + "SA5002": {{dir: "CheckInfiniteEmptyLoop"}}, + "SA5003": {{dir: "CheckDeferInInfiniteLoop"}}, + "SA5004": {{dir: "CheckLoopEmptyDefault"}}, + "SA5005": {{dir: "CheckCyclicFinalizer"}}, + "SA5007": {{dir: "CheckInfiniteRecursion"}}, + "SA5008": {{dir: "CheckStructTags"}}, + "SA5009": {{dir: "CheckPrintf"}}, + "SA6000": {{dir: "CheckRegexpMatchLoop"}}, + "SA6002": {{dir: "CheckSyncPoolValue"}}, + "SA6003": {{dir: "CheckRangeStringRunes"}}, + "SA6005": {{dir: "CheckToLowerToUpperComparison"}}, + "SA9001": {{dir: "CheckDubiousDeferInChannelRangeLoop"}}, + "SA9002": {{dir: "CheckNonOctalFileMode"}}, + "SA9003": {{dir: "CheckEmptyBranch"}}, + "SA9004": {{dir: "CheckMissingEnumTypesInDeclaration"}}, + "SA9005": {{dir: "CheckNoopMarshal"}}, } -} -func BenchmarkNetHttp(b *testing.B) { - for i := 0; i < b.N; i++ { - c := NewChecker() - _, err := lintutil.Lint([]lint.Checker{c}, []string{"net/http"}, nil) - if err != nil { - b.Fatal(err) + for check, dirs := range checks { + a := Analyzers[check] + for _, dir := range dirs { + if dir.version != "" { + if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { + t.Fatal(err) + } + } + analysistest.Run(t, analysistest.TestData(), a, dir.dir) } } } diff --git a/staticcheck/rules.go b/staticcheck/rules.go index d6af573c2..0152cac1a 100644 --- a/staticcheck/rules.go +++ b/staticcheck/rules.go @@ -13,7 +13,7 @@ import ( "time" "unicode/utf8" - "honnef.co/go/tools/lint" + "golang.org/x/tools/go/analysis" . "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" "honnef.co/go/tools/staticcheck/vrp" @@ -26,12 +26,11 @@ const ( ) type Call struct { - Job *lint.Job + Pass *analysis.Pass Instr ssa.CallInstruction Args []*Argument - Checker *Checker - Parent *ssa.Function + Parent *ssa.Function invalids []string } @@ -184,7 +183,7 @@ func ConvertedFromInt(v Value) bool { return true } -func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { +func validEncodingBinaryType(pass *analysis.Pass, typ types.Type) bool { typ = typ.Underlying() switch typ := typ.(type) { case *types.Basic: @@ -194,19 +193,19 @@ func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { types.Float32, types.Float64, types.Complex64, types.Complex128, types.Invalid: return true case types.Bool: - return IsGoVersion(j, 8) + return IsGoVersion(pass, 8) } return false case *types.Struct: n := typ.NumFields() for i := 0; i < n; i++ { - if !validEncodingBinaryType(j, typ.Field(i).Type()) { + if !validEncodingBinaryType(pass, typ.Field(i).Type()) { return false } } return true case *types.Array: - return validEncodingBinaryType(j, typ.Elem()) + return validEncodingBinaryType(pass, typ.Elem()) case *types.Interface: // we can't determine if it's a valid type or not return true @@ -214,7 +213,7 @@ func validEncodingBinaryType(j *lint.Job, typ types.Type) bool { return false } -func CanBinaryMarshal(j *lint.Job, v Value) bool { +func CanBinaryMarshal(pass *analysis.Pass, v Value) bool { typ := v.Value.Type().Underlying() if ttyp, ok := typ.(*types.Pointer); ok { typ = ttyp.Elem().Underlying() @@ -227,7 +226,7 @@ func CanBinaryMarshal(j *lint.Job, v Value) bool { } } - return validEncodingBinaryType(j, typ) + return validEncodingBinaryType(pass, typ) } func RepeatZeroTimes(name string, arg int) CallCheck { diff --git a/staticcheck/testdata/src/CheckArgOverwritten/CheckArgOverwritten.go b/staticcheck/testdata/src/CheckArgOverwritten/CheckArgOverwritten.go new file mode 100644 index 000000000..421b77462 --- /dev/null +++ b/staticcheck/testdata/src/CheckArgOverwritten/CheckArgOverwritten.go @@ -0,0 +1,6 @@ +package pkg + +var x = func(arg int) { // want `overwritten` + arg = 1 + println(arg) +} diff --git a/staticcheck/testdata/src/CheckBenchmarkN/CheckBenchmarkN.go b/staticcheck/testdata/src/CheckBenchmarkN/CheckBenchmarkN.go index 8f5fa1b29..a900cf4da 100644 --- a/staticcheck/testdata/src/CheckBenchmarkN/CheckBenchmarkN.go +++ b/staticcheck/testdata/src/CheckBenchmarkN/CheckBenchmarkN.go @@ -4,6 +4,6 @@ import "testing" func foo() { var b *testing.B - b.N = 1 // MATCH /should not assign to b.N/ + b.N = 1 // want `should not assign to b\.N` _ = b } diff --git a/staticcheck/testdata/src/CheckBytesEqualIP/CheckBytesEqualIP.go b/staticcheck/testdata/src/CheckBytesEqualIP/CheckBytesEqualIP.go index db6ae4716..ea0833092 100644 --- a/staticcheck/testdata/src/CheckBytesEqualIP/CheckBytesEqualIP.go +++ b/staticcheck/testdata/src/CheckBytesEqualIP/CheckBytesEqualIP.go @@ -11,7 +11,7 @@ func fn() { var b1, b2 []byte var t1, t2 T - bytes.Equal(i1, i2) // MATCH /use net.IP.Equal to compare net.IPs, not bytes.Equal/ + bytes.Equal(i1, i2) // want `use net\.IP\.Equal to compare net\.IPs, not bytes\.Equal` bytes.Equal(b1, b2) bytes.Equal(t1, t2) diff --git a/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go b/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go index 7482aefa1..8f4bffe6f 100644 --- a/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go +++ b/staticcheck/testdata/src/CheckCanonicalHeaderKey/CheckCanonicalHeaderKey.go @@ -6,9 +6,9 @@ func fn() { var r http.Request h := http.Header{} var m map[string][]string - _ = h["foo"] // MATCH /keys in http.Header are canonicalized/ + _ = h["foo"] // want `keys in http\.Header are canonicalized` h["foo"] = nil - _ = r.Header["foo"] // MATCH /keys in http.Header are canonicalized/ + _ = r.Header["foo"] // want `keys in http\.Header are canonicalized` r.Header["foo"] = nil _ = m["foo"] } diff --git a/staticcheck/testdata/src/CheckConcurrentTesting/CheckConcurrentTesting.go b/staticcheck/testdata/src/CheckConcurrentTesting/CheckConcurrentTesting.go index a3b2df2e6..8d0c56479 100644 --- a/staticcheck/testdata/src/CheckConcurrentTesting/CheckConcurrentTesting.go +++ b/staticcheck/testdata/src/CheckConcurrentTesting/CheckConcurrentTesting.go @@ -5,10 +5,10 @@ import "testing" func fn1() { var t *testing.T t.Fatal() - go func() { // MATCH /the goroutine calls T.Fatal, which must be called in the same goroutine as the test/ + go func() { // want `the goroutine calls T\.Fatal, which must be called in the same goroutine as the test` t.Fatal() }() - go fn2(t) // MATCH /the goroutine calls T.Fatal, which must be called in the same goroutine as the test/ + go fn2(t) // want `the goroutine calls T\.Fatal, which must be called in the same goroutine as the test` func() { t.Fatal() }() @@ -17,7 +17,7 @@ func fn1() { t.Fatal() } fn() - go fn() // MATCH /the goroutine calls T.Fatal, which must be called in the same goroutine as the test/ + go fn() // want `the goroutine calls T\.Fatal, which must be called in the same goroutine as the test` } func fn2(t *testing.T) { diff --git a/staticcheck/testdata/src/CheckCyclicFinalizer/CheckCyclicFinalizer.go b/staticcheck/testdata/src/CheckCyclicFinalizer/CheckCyclicFinalizer.go index 477202acb..2aad2e3c4 100644 --- a/staticcheck/testdata/src/CheckCyclicFinalizer/CheckCyclicFinalizer.go +++ b/staticcheck/testdata/src/CheckCyclicFinalizer/CheckCyclicFinalizer.go @@ -8,9 +8,9 @@ import ( func fn() { var x *int foo := func(y *int) { fmt.Println(x) } - runtime.SetFinalizer(x, foo) + runtime.SetFinalizer(x, foo) // want `the finalizer closes over the object, preventing the finalizer from ever running \(at .+:10:9` runtime.SetFinalizer(x, nil) - runtime.SetFinalizer(x, func(_ *int) { + runtime.SetFinalizer(x, func(_ *int) { // want `the finalizer closes over the object, preventing the finalizer from ever running \(at .+:13:26` fmt.Println(x) }) @@ -20,6 +20,3 @@ func fn() { fmt.Println(y) }) } - -// MATCH:11 /the finalizer closes over the object, preventing the finalizer from ever running \(at .+:10:9/ -// MATCH:13 /the finalizer closes over the object, preventing the finalizer from ever running \(at .+:13:26/ diff --git a/staticcheck/testdata/src/CheckDeferInInfiniteLoop/CheckDeferInInfiniteLoop.go b/staticcheck/testdata/src/CheckDeferInInfiniteLoop/CheckDeferInInfiniteLoop.go index 18e4795e1..8bc43e984 100644 --- a/staticcheck/testdata/src/CheckDeferInInfiniteLoop/CheckDeferInInfiniteLoop.go +++ b/staticcheck/testdata/src/CheckDeferInInfiniteLoop/CheckDeferInInfiniteLoop.go @@ -2,10 +2,10 @@ package pkg func fn() { for { - defer println() // MATCH /will never run/ + defer println() // want `will never run` } for { - defer println() // MATCH /will never run/ + defer println() // want `will never run` go func() { return }() diff --git a/staticcheck/testdata/src/CheckDeferLock/CheckDeferLock.go b/staticcheck/testdata/src/CheckDeferLock/CheckDeferLock.go index 27541852c..2ec212e67 100644 --- a/staticcheck/testdata/src/CheckDeferLock/CheckDeferLock.go +++ b/staticcheck/testdata/src/CheckDeferLock/CheckDeferLock.go @@ -7,7 +7,7 @@ var rw sync.RWMutex func fn1() { r.Lock() - defer r.Lock() // MATCH /deferring Lock right after having locked already; did you mean to defer Unlock/ + defer r.Lock() // want `deferring Lock right after having locked already; did you mean to defer Unlock` } func fn2() { @@ -22,7 +22,7 @@ func fn3() { func fn4() { rw.RLock() - defer rw.RLock() // MATCH /deferring RLock right after having locked already; did you mean to defer RUnlock/ + defer rw.RLock() // want `deferring RLock right after having locked already; did you mean to defer RUnlock` } func fn5() { diff --git a/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go index 2a1189328..c0ce1cef9 100644 --- a/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated/CheckDeprecated.go @@ -1,3 +1,3 @@ package pkg -import _ "CheckDeprecatedassist" // MATCH "Alas, it is deprecated." +import _ "CheckDeprecatedassist" // want `Alas, it is deprecated\.` diff --git a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go index 45035215c..bd2420707 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go @@ -8,14 +8,14 @@ import ( "syscall" ) -var _ = syscall.StringByteSlice("") // MATCH /Use ByteSliceFromString instead/ +var _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` func fn1(err error) { var r *http.Request - _ = r.Cancel // MATCH /Use the Context and WithContext methods/ - _ = syscall.StringByteSlice("") // MATCH /Use ByteSliceFromString instead/ + _ = r.Cancel // want `Use the Context and WithContext methods` + _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` _ = os.SEEK_SET - if err == http.ErrWriteAfterFlush { // MATCH /ErrWriteAfterFlush is no longer/ + if err == http.ErrWriteAfterFlush { // want `ErrWriteAfterFlush is no longer` println() } var _ flate.ReadError @@ -28,7 +28,7 @@ func fn1(err error) { } // Deprecated: Don't use this. -func fn2() { +func fn2() { // want fn2:`Deprecated: Don't use this\.` _ = syscall.StringByteSlice("") anon := func(x int) { diff --git a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go index 8d91f9b4d..06754df19 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go @@ -8,27 +8,27 @@ import ( "syscall" ) -var _ = syscall.StringByteSlice("") // MATCH /Use ByteSliceFromString instead/ +var _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` func fn1(err error) { var r *http.Request - _ = r.Cancel // MATCH /Use the Context and WithContext methods/ - _ = syscall.StringByteSlice("") // MATCH /Use ByteSliceFromString instead/ - _ = os.SEEK_SET // MATCH /Use io.SeekStart, io.SeekCurrent, and io.SeekEnd/ - if err == http.ErrWriteAfterFlush { // MATCH /ErrWriteAfterFlush is no longer/ + _ = r.Cancel // want `Use the Context and WithContext methods` + _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` + _ = os.SEEK_SET // want `Use io\.SeekStart, io\.SeekCurrent, and io\.SeekEnd` + if err == http.ErrWriteAfterFlush { // want `ErrWriteAfterFlush is no longer` println() } - var _ flate.ReadError // MATCH /No longer returned/ + var _ flate.ReadError // want `No longer returned` var tr *http.Transport - tr.CancelRequest(nil) // MATCH "CancelRequest is deprecated" + tr.CancelRequest(nil) // want `CancelRequest is deprecated` var conn driver.Conn - conn.Begin() // MATCH "Begin is deprecated" + conn.Begin() // want `Begin is deprecated` } // Deprecated: Don't use this. -func fn2() { +func fn2() { // want fn2:`Deprecated: Don't use this\.` _ = syscall.StringByteSlice("") anon := func(x int) { diff --git a/staticcheck/testdata/src/CheckDiffSizeComparison/CheckDiffSizeComparison.go b/staticcheck/testdata/src/CheckDiffSizeComparison/CheckDiffSizeComparison.go index 8029c828f..306ddb93e 100644 --- a/staticcheck/testdata/src/CheckDiffSizeComparison/CheckDiffSizeComparison.go +++ b/staticcheck/testdata/src/CheckDiffSizeComparison/CheckDiffSizeComparison.go @@ -2,16 +2,16 @@ package pkg func fn1() { var s1 = "foobar" - _ = "a"[:] == s1 // MATCH /comparing strings of different sizes/ - _ = s1 == "a"[:] // MATCH /comparing strings of different sizes/ - _ = "a"[:] == s1[:2] // MATCH /comparing strings of different sizes/ - _ = "ab"[:] == s1[1:2] // MATCH /comparing strings of different sizes/ - _ = "ab"[:] == s1[0+1:2] // MATCH /comparing strings of different sizes/ - _ = "a"[:] == "abc" // MATCH /comparing strings of different sizes/ - _ = "a"[:] == "a"+"bc" // MATCH /comparing strings of different sizes/ - _ = "foobar"[:] == s1+"bc" // MATCH /comparing strings of different sizes/ - _ = "a"[:] == "abc"[:] // MATCH /comparing strings of different sizes/ - _ = "a"[:] == "abc"[:2] // MATCH /comparing strings of different sizes/ + _ = "a"[:] == s1 // want `comparing strings of different sizes` + _ = s1 == "a"[:] // want `comparing strings of different sizes` + _ = "a"[:] == s1[:2] // want `comparing strings of different sizes` + _ = "ab"[:] == s1[1:2] // want `comparing strings of different sizes` + _ = "ab"[:] == s1[0+1:2] // want `comparing strings of different sizes` + _ = "a"[:] == "abc" // want `comparing strings of different sizes` + _ = "a"[:] == "a"+"bc" // want `comparing strings of different sizes` + _ = "foobar"[:] == s1+"bc" // want `comparing strings of different sizes` + _ = "a"[:] == "abc"[:] // want `comparing strings of different sizes` + _ = "a"[:] == "abc"[:2] // want `comparing strings of different sizes` _ = "a" == s1 // ignores _ = s1 == "a" // ignored @@ -22,7 +22,7 @@ func fn1() { _ = "abc"[:] == "abc" _ = "abc"[:] == "a"+"bc" _ = s1[:] == "foo"+"bar" - _ = "abc"[:] == "abc"[:] // MATCH /identical expressions on the left and right side/ + _ = "abc"[:] == "abc"[:] _ = "ab"[:] == "abc"[:2] } @@ -32,17 +32,17 @@ func fn2() { s1 = "1234" } - _ = s1 == "12345"[:] // MATCH /comparing strings of different sizes/ + _ = s1 == "12345"[:] // want `comparing strings of different sizes` _ = s1 == "1234"[:] _ = s1 == "123"[:] - _ = s1 == "12"[:] // MATCH /comparing strings of different sizes/ + _ = s1 == "12"[:] // want `comparing strings of different sizes` } func fn3(x string) { switch x[:1] { case "a": - case "ab": // MATCH /comparing strings of different sizes/ + case "ab": // want `comparing strings of different sizes` case "b": - case "bc": // MATCH /comparing strings of different sizes/ + case "bc": // want `comparing strings of different sizes` } } diff --git a/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go b/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go index 6a1802723..e2729cbf4 100644 --- a/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go +++ b/staticcheck/testdata/src/CheckDoubleNegation/CheckDoubleNegation.go @@ -1,11 +1,11 @@ package pkg func fn(b1, b2 bool) { - if !!b1 { // MATCH /negating a boolean twice/ + if !!b1 { // want `negating a boolean twice` println() } - if b1 && !!b2 { // MATCH /negating a boolean twice/ + if b1 && !!b2 { // want `negating a boolean twice` println() } diff --git a/staticcheck/testdata/src/CheckDubiousDeferInChannelRangeLoop/CheckDubiousDeferInChannelRangeLoop.go b/staticcheck/testdata/src/CheckDubiousDeferInChannelRangeLoop/CheckDubiousDeferInChannelRangeLoop.go index e12c22790..a0ed45e71 100644 --- a/staticcheck/testdata/src/CheckDubiousDeferInChannelRangeLoop/CheckDubiousDeferInChannelRangeLoop.go +++ b/staticcheck/testdata/src/CheckDubiousDeferInChannelRangeLoop/CheckDubiousDeferInChannelRangeLoop.go @@ -3,6 +3,6 @@ package pkg func fn() { var ch chan int for range ch { - defer println() // MATCH /defers in this range loop/ + defer println() // want `defers in this range loop` } } diff --git a/staticcheck/testdata/src/CheckDuplicateBuildConstraints/CheckDuplicateBuildConstraints.go b/staticcheck/testdata/src/CheckDuplicateBuildConstraints/CheckDuplicateBuildConstraints.go index b5b317b63..33866ac74 100644 --- a/staticcheck/testdata/src/CheckDuplicateBuildConstraints/CheckDuplicateBuildConstraints.go +++ b/staticcheck/testdata/src/CheckDuplicateBuildConstraints/CheckDuplicateBuildConstraints.go @@ -1,4 +1,4 @@ // +build one two three go1.1 // +build three one two go1.1 -package pkg // MATCH "identical build constraints" +package pkg // want `identical build constraints` diff --git a/staticcheck/testdata/src/CheckEarlyDefer/CheckEarlyDefer.go b/staticcheck/testdata/src/CheckEarlyDefer/CheckEarlyDefer.go index 75bec1851..cc8c3a761 100644 --- a/staticcheck/testdata/src/CheckEarlyDefer/CheckEarlyDefer.go +++ b/staticcheck/testdata/src/CheckEarlyDefer/CheckEarlyDefer.go @@ -16,7 +16,7 @@ func fn3() (T, error) { func fn2() { rc, err := fn1() - defer rc.Close() // MATCH /should check returned error before deferring rc.Close/ + defer rc.Close() // want `should check returned error before deferring rc\.Close` if err != nil { println() } @@ -31,7 +31,7 @@ func fn2() { defer rc.Close() t, err := fn3() - defer t.rc.Close() // MATCH /should check returned error before deferring t.rc.Close/ + defer t.rc.Close() // want `should check returned error before deferring t\.rc\.Close` if err != nil { println() } diff --git a/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch.go b/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch.go index f99973073..60595ec73 100644 --- a/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch.go +++ b/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch.go @@ -1,10 +1,10 @@ package pkg func fn1() { - if true { // MATCH "empty branch" + if true { // want `empty branch` } - if true { // MATCH "empty branch" - } else { // MATCH "empty branch" + if true { // want `empty branch` + } else { // want `empty branch` } if true { println() @@ -12,10 +12,10 @@ func fn1() { if true { println() - } else { // MATCH "empty branch" + } else { // want `empty branch` } - if true { // MATCH "empty branch" + if true { // want `empty branch` // TODO handle error } @@ -25,6 +25,6 @@ func fn1() { } if true { - } else if false { // MATCH "empty branch" + } else if false { // want `empty branch` } } diff --git a/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch_test.go b/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch_test.go index d97496084..472ab5cb2 100644 --- a/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch_test.go +++ b/staticcheck/testdata/src/CheckEmptyBranch/CheckEmptyBranch_test.go @@ -3,7 +3,7 @@ package pkg import "testing" func TestFoo(t *testing.T) { - if true { // MATCH "empty branch" + if true { // want `empty branch` // TODO } } diff --git a/staticcheck/testdata/src/CheckEmptyCriticalSection/CheckEmptyCriticalSection.go b/staticcheck/testdata/src/CheckEmptyCriticalSection/CheckEmptyCriticalSection.go index 7eb8afc0d..777badf1c 100644 --- a/staticcheck/testdata/src/CheckEmptyCriticalSection/CheckEmptyCriticalSection.go +++ b/staticcheck/testdata/src/CheckEmptyCriticalSection/CheckEmptyCriticalSection.go @@ -5,7 +5,7 @@ import "sync" func fn1() { var x sync.Mutex x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } func fn2() { @@ -16,16 +16,16 @@ func fn2() { }{} x.m1.m2.Lock() - x.m1.m2.Unlock() // MATCH /empty critical section/ + x.m1.m2.Unlock() // want `empty critical section` } func fn3() { var x sync.RWMutex x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` x.RLock() - x.RUnlock() // MATCH /empty critical section/ + x.RUnlock() // want `empty critical section` x.Lock() defer x.Unlock() @@ -41,7 +41,7 @@ func fn4() { } x.m().Lock() - x.m().Unlock() // MATCH /empty critical section/ + x.m().Unlock() // want `empty critical section` } func fn5() { @@ -55,7 +55,7 @@ func fn5() { func fn6() { x := &sync.Mutex{} x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } func fn7() { @@ -64,7 +64,7 @@ func fn7() { }{} x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } func fn8() { @@ -72,7 +72,7 @@ func fn8() { x = new(sync.Mutex) x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } func fn9() { @@ -80,7 +80,7 @@ func fn9() { sync.Locker }{&sync.Mutex{}} x.Lock() - x.Unlock() // MATCH /empty critical section/ + x.Unlock() // want `empty critical section` } type T struct{} diff --git a/staticcheck/testdata/src/CheckEncodingBinary/CheckEncodingBinary.go b/staticcheck/testdata/src/CheckEncodingBinary/CheckEncodingBinary.go index 9dd3e8b44..3f2b655b2 100644 --- a/staticcheck/testdata/src/CheckEncodingBinary/CheckEncodingBinary.go +++ b/staticcheck/testdata/src/CheckEncodingBinary/CheckEncodingBinary.go @@ -39,19 +39,19 @@ func fn() { var x13 []byte var x14 *[]byte var x15 T6 - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x1)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x1)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x2)) - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x3)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x3)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x4)) - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x5)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x5)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x6)) log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x7)) - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x8)) // MATCH /cannot be used with binary.Write/ - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x9)) // MATCH /cannot be used with binary.Write/ - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x10)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x8)) // want `cannot be used with binary\.Write` + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x9)) // want `cannot be used with binary\.Write` + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x10)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x11)) log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, &x13)) - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, &x14)) // MATCH /cannot be used with binary.Write/ + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, &x14)) // want `cannot be used with binary\.Write` log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x15)) log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, &x15)) } diff --git a/staticcheck/testdata/src/CheckEncodingBinary_go17/CheckEncodingBinary.go b/staticcheck/testdata/src/CheckEncodingBinary_go17/CheckEncodingBinary.go index b08e8e85a..a63ec9ef8 100644 --- a/staticcheck/testdata/src/CheckEncodingBinary_go17/CheckEncodingBinary.go +++ b/staticcheck/testdata/src/CheckEncodingBinary_go17/CheckEncodingBinary.go @@ -8,5 +8,5 @@ import ( func fn() { var x bool - log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x)) // MATCH "cannot be used with binary.Write" + log.Println(binary.Write(ioutil.Discard, binary.LittleEndian, x)) // want `cannot be used with binary\.Write` } diff --git a/staticcheck/testdata/src/CheckExec/CheckExec.go b/staticcheck/testdata/src/CheckExec/CheckExec.go index e70710ae1..ae5100239 100644 --- a/staticcheck/testdata/src/CheckExec/CheckExec.go +++ b/staticcheck/testdata/src/CheckExec/CheckExec.go @@ -4,7 +4,7 @@ import "os/exec" func fn() { exec.Command("ls") - exec.Command("ls arg1") // MATCH /first argument to exec/ + exec.Command("ls arg1") // want `first argument to exec` exec.Command(`C:\Program Files\this\is\insane.exe`) exec.Command("/Library/Application Support/VMware Tools/vmware-tools-daemon") } diff --git a/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go b/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go index a37521f2f..49642d1d1 100644 --- a/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go +++ b/staticcheck/testdata/src/CheckExtremeComparison/CheckExtremeComparison.go @@ -13,29 +13,29 @@ func fn() { i int ) - _ = u8 > math.MaxUint8 // MATCH "no value of type uint8 is greater than math.MaxUint8" - _ = u8 >= math.MaxUint8 // MATCH "no value of type uint8 is greater than math.MaxUint8" - _ = u8 >= 0 // MATCH "every value of type uint8 is >= 0" - _ = u8 <= math.MaxUint8 // MATCH "every value of type uint8 is <= math.MaxUint8" + _ = u8 > math.MaxUint8 // want `no value of type uint8 is greater than math\.MaxUint8` + _ = u8 >= math.MaxUint8 // want `no value of type uint8 is greater than math\.MaxUint8` + _ = u8 >= 0 // want `every value of type uint8 is >= 0` + _ = u8 <= math.MaxUint8 // want `every value of type uint8 is <= math\.MaxUint8` _ = u8 > 0 _ = u8 >= 1 _ = u8 < math.MaxUint8 _ = u16 > math.MaxUint8 - _ = u16 > math.MaxUint16 // MATCH "no value of type uint16 is greater than math.MaxUint16" + _ = u16 > math.MaxUint16 // want `no value of type uint16 is greater than math\.MaxUint16` _ = u16 <= math.MaxUint8 - _ = u16 <= math.MaxUint16 // MATCH "every value of type uint16 is <= math.MaxUint16" + _ = u16 <= math.MaxUint16 // want `every value of type uint16 is <= math\.MaxUint16` _ = u > math.MaxUint32 - _ = u > math.MaxUint64 // MATCH "no value of type uint is greater than math.MaxUint64" + _ = u > math.MaxUint64 // want `no value of type uint is greater than math\.MaxUint64` - _ = i8 > math.MaxInt8 // MATCH "no value of type int8 is greater than math.MaxInt8" + _ = i8 > math.MaxInt8 // want `no value of type int8 is greater than math\.MaxInt8` _ = i16 > math.MaxInt8 - _ = i16 > math.MaxInt16 // MATCH "no value of type int16 is greater than math.MaxInt16" + _ = i16 > math.MaxInt16 // want `no value of type int16 is greater than math\.MaxInt16` _ = i > math.MaxInt32 - _ = i > math.MaxInt64 // MATCH "no value of type int is greater than math.MaxInt64" + _ = i > math.MaxInt64 // want `no value of type int is greater than math\.MaxInt64` _ = i8 < 0 - _ = i8 <= math.MinInt8 // MATCH "no value of type int8 is less than math.MinInt8" - _ = i8 < math.MinInt8 // MATCH "no value of type int8 is less than math.MinInt8" - _ = i8 >= math.MinInt8 // MATCH "every value of type int8 is >= math.MinInt8" + _ = i8 <= math.MinInt8 // want `no value of type int8 is less than math\.MinInt8` + _ = i8 < math.MinInt8 // want `no value of type int8 is less than math\.MinInt8` + _ = i8 >= math.MinInt8 // want `every value of type int8 is >= math.MinInt8` } diff --git a/staticcheck/testdata/src/CheckIneffectiveAppend/CheckIneffectiveAppend.go b/staticcheck/testdata/src/CheckIneffectiveAppend/CheckIneffectiveAppend.go index 9f06fceeb..12e84cd80 100644 --- a/staticcheck/testdata/src/CheckIneffectiveAppend/CheckIneffectiveAppend.go +++ b/staticcheck/testdata/src/CheckIneffectiveAppend/CheckIneffectiveAppend.go @@ -4,9 +4,8 @@ import "fmt" func fn1() { var s []int - s = append(s, 1) // MATCH /this result of append is never used/ - // MATCH:9 /this value of s is never used/ - s = append(s, 1) // MATCH /this result of append is never used/ + s = append(s, 1) // want `this result of append is never used` + s = append(s, 1) // want `this result of append is never used` } func fn2() (named []int) { @@ -16,8 +15,7 @@ func fn2() (named []int) { func fn3() { s := make([]int, 0) - // MATCH:20 /this value of s is never used/ - s = append(s, 1) // MATCH /this result of append is never used/ + s = append(s, 1) // want `this result of append is never used` } func fn4() []int { @@ -46,8 +44,7 @@ func fn9() { var s []int s = append(s, 1) fmt.Println(s) - // MATCH:50 /this value of s is never used/ - s = append(s, 1) // MATCH /this result of append is never used/ + s = append(s, 1) // want `this result of append is never used` } func fn10() { @@ -59,6 +56,6 @@ func fn10() { func fn11() { var s []int for x := 0; x < 10; x++ { - s = append(s, 1) // MATCH /this result of append is never used/ + s = append(s, 1) // want `this result of append is never used` } } diff --git a/staticcheck/testdata/src/CheckIneffectiveCopy/CheckIneffectiveCopy.go b/staticcheck/testdata/src/CheckIneffectiveCopy/CheckIneffectiveCopy.go index b9aefa315..8057d4ea8 100644 --- a/staticcheck/testdata/src/CheckIneffectiveCopy/CheckIneffectiveCopy.go +++ b/staticcheck/testdata/src/CheckIneffectiveCopy/CheckIneffectiveCopy.go @@ -6,8 +6,8 @@ func fn1(_ *T) {} func fn2() { t1 := &T{} - fn1(&*t1) // MATCH /will not copy/ - fn1(*&t1) // MATCH /will not copy/ + fn1(&*t1) // want `will not copy` + fn1(*&t1) // want `will not copy` _Cvar_something := &T{} fn1(&*_Cvar_something) diff --git a/staticcheck/testdata/src/CheckIneffectiveLoop/CheckIneffectiveLoop.go b/staticcheck/testdata/src/CheckIneffectiveLoop/CheckIneffectiveLoop.go index a06e05a49..3aca3a4f9 100644 --- a/staticcheck/testdata/src/CheckIneffectiveLoop/CheckIneffectiveLoop.go +++ b/staticcheck/testdata/src/CheckIneffectiveLoop/CheckIneffectiveLoop.go @@ -5,7 +5,7 @@ func fn() { if true { println() } - break // MATCH /the surrounding loop is unconditionally terminated/ + break // want `the surrounding loop is unconditionally terminated` } for { if true { @@ -18,7 +18,7 @@ func fn() { if true { println() } - break // MATCH /the surrounding loop is unconditionally terminated/ + break // want `the surrounding loop is unconditionally terminated` } for range (map[int]int)(nil) { if true { @@ -46,3 +46,12 @@ func fn() { break } } + +var z = func() { + for { + if true { + println() + } + break // want `the surrounding loop is unconditionally terminated` + } +} diff --git a/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go b/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go index 72cc1c26e..2cd45a7f4 100644 --- a/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go +++ b/staticcheck/testdata/src/CheckInfiniteEmptyLoop/CheckInfiniteEmptyLoop.go @@ -3,7 +3,7 @@ package pkg func fn2() bool { return true } func fn() { - for { // MATCH /this loop will spin/ + for { // want `this loop will spin` } for fn2() { @@ -13,26 +13,21 @@ func fn() { break } - for true { // MATCH "loop condition never changes" + for true { // want `loop condition never changes` `this loop will spin` } x := true - for x { // MATCH "loop condition never changes" + for x { // want `loop condition never changes` `this loop will spin` } x = false - for x { // MATCH "loop condition never changes" + for x { // want `loop condition never changes` `this loop will spin` } for false { } false := true - for false { // MATCH "loop condition never changes" + for false { // want `loop condition never changes` `this loop will spin` } } - -// MATCH:16 "this loop will spin" -// MATCH:20 "this loop will spin" -// MATCH:24 "this loop will spin" -// MATCH:31 "this loop will spin" diff --git a/staticcheck/testdata/src/CheckInfiniteRecursion/CheckInfiniteRecursion.go b/staticcheck/testdata/src/CheckInfiniteRecursion/CheckInfiniteRecursion.go index fa2247d7a..b38c59c8a 100644 --- a/staticcheck/testdata/src/CheckInfiniteRecursion/CheckInfiniteRecursion.go +++ b/staticcheck/testdata/src/CheckInfiniteRecursion/CheckInfiniteRecursion.go @@ -2,7 +2,7 @@ package pkg func fn1(x int) bool { println(x) - return fn1(x + 1) // MATCH /infinite recursive call/ + return fn1(x + 1) // want `infinite recursive call` return true } @@ -38,7 +38,7 @@ func fn4(p *int, n int) { func fn5(p *int, n int) { x := 0 - fn5(&x, n-1) // MATCH /infinite recursive call/ + fn5(&x, n-1) // want `infinite recursive call` if x != n { panic("stack is corrupted") } @@ -53,12 +53,12 @@ type T struct { } func (t T) Fn1() { - t.Fn1() // MATCH /infinite recursive call/ + t.Fn1() // want `infinite recursive call` } func (t T) Fn2() { x := T{} - x.Fn2() // MATCH /infinite recursive call/ + x.Fn2() // want `infinite recursive call` } func (t T) Fn3() { diff --git a/staticcheck/testdata/src/CheckLeakyTimeTick/CheckLeakyTimeTick.go b/staticcheck/testdata/src/CheckLeakyTimeTick/CheckLeakyTimeTick.go index 6d11d39d6..d3d493b5b 100644 --- a/staticcheck/testdata/src/CheckLeakyTimeTick/CheckLeakyTimeTick.go +++ b/staticcheck/testdata/src/CheckLeakyTimeTick/CheckLeakyTimeTick.go @@ -9,7 +9,7 @@ func fn1() { } func fn2() { - for range time.Tick(0) { // MATCH /leaks the underlying ticker/ + for range time.Tick(0) { // want `leaks the underlying ticker` println("") if true { break @@ -18,7 +18,7 @@ func fn2() { } func fn3() { - for range time.Tick(0) { // MATCH /leaks the underlying ticker/ + for range time.Tick(0) { // want `leaks the underlying ticker` println("") if true { return diff --git a/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go b/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go index be682350f..04cdeeea5 100644 --- a/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go +++ b/staticcheck/testdata/src/CheckLhsRhsIdentical/CheckLhsRhsIdentical.go @@ -3,25 +3,25 @@ package pkg type Float float64 func fn(a int, s []int, f1 float64, f2 Float) { - if 0 == 0 { // MATCH /identical expressions/ + if 0 == 0 { // want `identical expressions` println() } - if 1 == 1 { // MATCH /identical expressions/ + if 1 == 1 { // want `identical expressions` println() } - if a == a { // MATCH /identical expressions/ + if a == a { // want `identical expressions` println() } - if a != a { // MATCH /identical expressions/ + if a != a { // want `identical expressions` println() } - if s[0] == s[0] { // MATCH /identical expressions/ + if s[0] == s[0] { // want `identical expressions` println() } - if 1&1 == 1 { // MATCH /identical expressions/ + if 1&1 == 1 { // want `identical expressions` println() } - if (1 + 2 + 3) == (1 + 2 + 3) { // MATCH /identical expressions/ + if (1 + 2 + 3) == (1 + 2 + 3) { // want `identical expressions` println() } if f1 == f1 { @@ -30,7 +30,7 @@ func fn(a int, s []int, f1 float64, f2 Float) { if f1 != f1 { println() } - if f1 > f1 { // MATCH /identical expressions/ + if f1 > f1 { // want `identical expressions` println() } if f2 == f2 { diff --git a/staticcheck/testdata/src/CheckListenAddress/CheckListenAddress.go b/staticcheck/testdata/src/CheckListenAddress/CheckListenAddress.go index 8df7d8733..4a1b04fe7 100644 --- a/staticcheck/testdata/src/CheckListenAddress/CheckListenAddress.go +++ b/staticcheck/testdata/src/CheckListenAddress/CheckListenAddress.go @@ -4,8 +4,8 @@ import "net/http" func fn() { // Seen in actual code - http.ListenAndServe("localhost:8080/", nil) // MATCH /invalid port or service name in host:port pair/ - http.ListenAndServe("localhost", nil) // MATCH /invalid port or service name in host:port pair/ + http.ListenAndServe("localhost:8080/", nil) // want `invalid port or service name in host:port pair` + http.ListenAndServe("localhost", nil) // want `invalid port or service name in host:port pair` http.ListenAndServe("localhost:8080", nil) http.ListenAndServe(":8080", nil) http.ListenAndServe(":http", nil) diff --git a/staticcheck/testdata/src/CheckLoopCondition/CheckLoopCondition.go b/staticcheck/testdata/src/CheckLoopCondition/CheckLoopCondition.go index 7264a3dbc..511e02a9d 100644 --- a/staticcheck/testdata/src/CheckLoopCondition/CheckLoopCondition.go +++ b/staticcheck/testdata/src/CheckLoopCondition/CheckLoopCondition.go @@ -2,9 +2,7 @@ package pkg func fn() { for i := 0; i < 10; i++ { - for j := 0; j < 10; i++ { // MATCH /variable in loop condition never changes/ + for j := 0; j < 10; i++ { // want `variable in loop condition never changes` } } } - -// M_ATCH:5 /j < 10 is always true for all possible values/ diff --git a/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go b/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go index 90dc05f40..9b21562bb 100644 --- a/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go +++ b/staticcheck/testdata/src/CheckLoopEmptyDefault/CheckLoopEmptyDefault.go @@ -10,7 +10,7 @@ func fn() { for { select { case <-ch: - default: // MATCH /should not have an empty default case/ + default: // want `should not have an empty default case` } } diff --git a/staticcheck/testdata/src/CheckMathInt/CheckMathInt.go b/staticcheck/testdata/src/CheckMathInt/CheckMathInt.go index c735f6f93..d413e8549 100644 --- a/staticcheck/testdata/src/CheckMathInt/CheckMathInt.go +++ b/staticcheck/testdata/src/CheckMathInt/CheckMathInt.go @@ -3,6 +3,6 @@ package pkg import "math" func fn(x int) { - math.Ceil(float64(x)) // MATCH /on a converted integer is pointless/ - math.Floor(float64(x * 2)) // MATCH /on a converted integer is pointless/ + math.Ceil(float64(x)) // want `on a converted integer is pointless` + math.Floor(float64(x * 2)) // want `on a converted integer is pointless` } diff --git a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go index 41cfa2d01..923e0c43b 100644 --- a/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go +++ b/staticcheck/testdata/src/CheckMissingEnumTypesInDeclaration/CheckMissingEnumTypesInDeclaration.go @@ -10,7 +10,7 @@ const ( ) const ( - c6 int = 1 // MATCH "only the first constant in this group has an explicit type" + c6 int = 1 // want `only the first constant in this group has an explicit type` c7 = 2 c8 = 3 ) @@ -46,7 +46,7 @@ const ( c23 int = 1 c24 int = 2 - c25 string = "" // MATCH "only the first constant in this group has an explicit type" + c25 string = "" // want `only the first constant in this group has an explicit type` c26 = "" c27 = 1 @@ -56,12 +56,12 @@ const ( c30 = 2 c31 int = 2 - c32 string = "" // MATCH "only the first constant in this group has an explicit type" + c32 string = "" // want `only the first constant in this group has an explicit type` c33 = "" ) const ( - c34 int = 1 // MATCH "only the first constant in this group has an explicit type" + c34 int = 1 // want `only the first constant in this group has an explicit type` c35 = 2 c36 int = 2 diff --git a/staticcheck/testdata/src/CheckNaNComparison/CheckNaNComparison.go b/staticcheck/testdata/src/CheckNaNComparison/CheckNaNComparison.go index c6b7a2762..78d775371 100644 --- a/staticcheck/testdata/src/CheckNaNComparison/CheckNaNComparison.go +++ b/staticcheck/testdata/src/CheckNaNComparison/CheckNaNComparison.go @@ -3,7 +3,7 @@ package pkg import "math" func fn(f float64) { - _ = f == math.NaN() // MATCH /no value is equal to NaN/ - _ = f > math.NaN() // MATCH /no value is equal to NaN/ - _ = f != math.NaN() // MATCH /no value is equal to NaN/ + _ = f == math.NaN() // want `no value is equal to NaN` + _ = f > math.NaN() // want `no value is equal to NaN` + _ = f != math.NaN() // want `no value is equal to NaN` } diff --git a/staticcheck/testdata/src/CheckNilMaps/CheckNilMaps.go b/staticcheck/testdata/src/CheckNilMaps/CheckNilMaps.go index 89399da7d..0fbfc088b 100644 --- a/staticcheck/testdata/src/CheckNilMaps/CheckNilMaps.go +++ b/staticcheck/testdata/src/CheckNilMaps/CheckNilMaps.go @@ -2,7 +2,7 @@ package pkg func fn1() { var m map[int]int - m[1] = 1 // MATCH /assignment to nil map/ + m[1] = 1 // want `assignment to nil map` } func fn2(m map[int]int) { diff --git a/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go b/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go index 49edecaea..49cbab7d2 100644 --- a/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go +++ b/staticcheck/testdata/src/CheckNonOctalFileMode/CheckNonOctalFileMode.go @@ -3,5 +3,5 @@ package pkg import "os" func fn() { - os.OpenFile("", 0, 644) // MATCH /file mode.+/ + os.OpenFile("", 0, 644) // want `file mode.+` } diff --git a/staticcheck/testdata/src/CheckNonUniqueCutset/CheckNonUniqueCutset.go b/staticcheck/testdata/src/CheckNonUniqueCutset/CheckNonUniqueCutset.go index 72a38ba0c..a8ae03cd6 100644 --- a/staticcheck/testdata/src/CheckNonUniqueCutset/CheckNonUniqueCutset.go +++ b/staticcheck/testdata/src/CheckNonUniqueCutset/CheckNonUniqueCutset.go @@ -7,5 +7,5 @@ func fn(s string) { _ = strings.TrimLeft(s, "a") _ = strings.TrimLeft(s, "µ") _ = strings.TrimLeft(s, "abc") - _ = strings.TrimLeft(s, "https://") // MATCH "duplicate characters" + _ = strings.TrimLeft(s, "https://") // want `duplicate characters` } diff --git a/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go b/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go index 2a7200005..9d3d0dbb6 100644 --- a/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go +++ b/staticcheck/testdata/src/CheckNoopMarshal/CheckNoopMarshal.go @@ -46,9 +46,9 @@ func fn() { // don't flag structs with no fields json.Marshal(T1{}) // no exported fields - json.Marshal(T2{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T2{}) // want `struct doesn't have any exported fields, nor custom marshaling` // pointer vs non-pointer makes no difference - json.Marshal(&T2{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(&T2{}) // want `struct doesn't have any exported fields, nor custom marshaling` // exported field json.Marshal(T3{}) // exported field, pointer makes no difference @@ -62,11 +62,11 @@ func fn() { // MarshalJSON json.Marshal(T7{}) // MarshalXML does not apply to JSON - json.Marshal(T8{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T8{}) // want `struct doesn't have any exported fields, nor custom marshaling` // MarshalText json.Marshal(T9{}) // embeds exported struct, but it has no fields - json.Marshal(T11{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T11{}) // want `struct doesn't have any exported fields, nor custom marshaling` // embeds type with MarshalJSON json.Marshal(T12{}) // embeds type with MarshalJSON and type isn't exported @@ -76,11 +76,11 @@ func fn() { // embedded pointer to struct with exported fields json.Marshal(T16{}) // don't recurse forever on recursive data structure - json.Marshal(T17{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Marshal(T17{}) // want `struct doesn't have any exported fields, nor custom marshaling` json.Marshal(T18{}) // MarshalJSON does not apply to JSON - xml.Marshal(T7{}) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + xml.Marshal(T7{}) // want `struct doesn't have any exported fields, nor custom marshaling` // MarshalXML xml.Marshal(T8{}) @@ -90,32 +90,34 @@ func fn() { var t8 T8 var t9 T9 // check that all other variations of methods also work - json.Unmarshal(nil, &t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + json.Unmarshal(nil, &t2) // want `struct doesn't have any exported fields, nor custom marshaling` json.Unmarshal(nil, &t3) json.Unmarshal(nil, &t9) - xml.Unmarshal(nil, &t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + xml.Unmarshal(nil, &t2) // want `struct doesn't have any exported fields, nor custom marshaling` xml.Unmarshal(nil, &t3) xml.Unmarshal(nil, &t9) - (*json.Decoder)(nil).Decode(&t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Decoder)(nil).Decode(&t2) // want `struct doesn't have any exported fields, nor custom marshaling` (*json.Decoder)(nil).Decode(&t3) (*json.Decoder)(nil).Decode(&t9) - (*json.Encoder)(nil).Encode(t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Encoder)(nil).Encode(t2) // want `struct doesn't have any exported fields, nor custom marshaling` (*json.Encoder)(nil).Encode(t3) (*json.Encoder)(nil).Encode(t9) - (*xml.Decoder)(nil).Decode(&t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Decoder)(nil).Decode(&t2) // want `struct doesn't have any exported fields, nor custom marshaling` (*xml.Decoder)(nil).Decode(&t3) (*xml.Decoder)(nil).Decode(&t9) - (*xml.Encoder)(nil).Encode(t2) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Encoder)(nil).Encode(t2) // want `struct doesn't have any exported fields, nor custom marshaling` (*xml.Encoder)(nil).Encode(t3) (*xml.Encoder)(nil).Encode(t9) (*json.Decoder)(nil).Decode(&t7) - (*json.Decoder)(nil).Decode(&t8) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Decoder)(nil).Decode(&t8) // want `struct doesn't have any exported fields, nor custom marshaling` (*json.Encoder)(nil).Encode(t7) - (*json.Encoder)(nil).Encode(t8) // MATCH "struct doesn't have any exported fields, nor custom marshaling" - (*xml.Decoder)(nil).Decode(&t7) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*json.Encoder)(nil).Encode(t8) // want `struct doesn't have any exported fields, nor custom marshaling` + (*xml.Decoder)(nil).Decode(&t7) // want `struct doesn't have any exported fields, nor custom marshaling` (*xml.Decoder)(nil).Decode(&t8) - (*xml.Encoder)(nil).Encode(t7) // MATCH "struct doesn't have any exported fields, nor custom marshaling" + (*xml.Encoder)(nil).Encode(t7) // want `struct doesn't have any exported fields, nor custom marshaling` (*xml.Encoder)(nil).Encode(t8) } + +var _, _ = json.Marshal(T9{}) diff --git a/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go b/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go index 1433dc1c2..07ff26a3f 100644 --- a/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go +++ b/staticcheck/testdata/src/CheckPrintf/CheckPrintf.go @@ -65,68 +65,68 @@ func fn() { fmt.Printf("%g", 1+2i) fmt.Printf("%#e %#E %#f %#F %#g %#G", 1.2, 1.2, 1.2, 1.2, 1.2, 1.2) // OK since Go 1.9 // Some bad format/argTypes - fmt.Printf("%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" - _ = fmt.Sprintf("%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" - fmt.Fprintf(os.Stdout, "%b", "hi") // MATCH "Printf format %b has arg #1 of wrong type string" - fmt.Printf("%t", c) // MATCH "Printf format %t has arg #1 of wrong type complex64" - fmt.Printf("%t", 1+2i) // MATCH "Printf format %t has arg #1 of wrong type complex128" - fmt.Printf("%c", 2.3) // MATCH "Printf format %c has arg #1 of wrong type float64" - fmt.Printf("%d", 2.3) // MATCH "Printf format %d has arg #1 of wrong type float64" - fmt.Printf("%e", "hi") // MATCH "Printf format %e has arg #1 of wrong type string" - fmt.Printf("%E", true) // MATCH "Printf format %E has arg #1 of wrong type bool" - fmt.Printf("%f", "hi") // MATCH "Printf format %f has arg #1 of wrong type string" - fmt.Printf("%F", 'x') // MATCH "Printf format %F has arg #1 of wrong type rune" - fmt.Printf("%g", "hi") // MATCH "Printf format %g has arg #1 of wrong type string" - fmt.Printf("%g", imap) // MATCH "Printf format %g has arg #1 of wrong type map[int]int" - fmt.Printf("%G", i) // MATCH "Printf format %G has arg #1 of wrong type int" - fmt.Printf("%o", x) // MATCH "Printf format %o has arg #1 of wrong type float64" - fmt.Printf("%p", 23) // MATCH "Printf format %p has arg #1 of wrong type int" - fmt.Printf("%q", x) // MATCH "Printf format %q has arg #1 of wrong type float64" - fmt.Printf("%s", b) // MATCH "Printf format %s has arg #1 of wrong type bool" - fmt.Printf("%s", byte(65)) // MATCH "Printf format %s has arg #1 of wrong type byte" - fmt.Printf("%t", 23) // MATCH "Printf format %t has arg #1 of wrong type int" - fmt.Printf("%U", x) // MATCH "Printf format %U has arg #1 of wrong type float64" - fmt.Printf("%X", 2.3) // MATCH "Printf format %X has arg #1 of wrong type float64" - fmt.Printf("%s", stringerv) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.ptrStringer" - fmt.Printf("%t", stringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.ptrStringer" - fmt.Printf("%s", embeddedStringerv) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.embeddedStringer" - fmt.Printf("%t", embeddedStringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.embeddedStringer" - fmt.Printf("%q", notstringerv) // MATCH "Printf format %q has arg #1 of wrong type CheckPrintf.notstringer" - fmt.Printf("%t", notstringerv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.notstringer" - fmt.Printf("%t", stringerarrayv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.stringerarray" - fmt.Printf("%t", notstringerarrayv) // MATCH "Printf format %t has arg #1 of wrong type CheckPrintf.notstringerarray" - fmt.Printf("%q", notstringerarrayv) // MATCH "Printf format %q has arg #1 of wrong type CheckPrintf.notstringerarray" - fmt.Printf("%d", BoolFormatter(true)) // MATCH "Printf format %d has arg #1 of wrong type CheckPrintf.BoolFormatter" + fmt.Printf("%b", "hi") // want `Printf format %b has arg #1 of wrong type string` + _ = fmt.Sprintf("%b", "hi") // want `Printf format %b has arg #1 of wrong type string` + fmt.Fprintf(os.Stdout, "%b", "hi") // want `Printf format %b has arg #1 of wrong type string` + fmt.Printf("%t", c) // want `Printf format %t has arg #1 of wrong type complex64` + fmt.Printf("%t", 1+2i) // want `Printf format %t has arg #1 of wrong type complex128` + fmt.Printf("%c", 2.3) // want `Printf format %c has arg #1 of wrong type float64` + fmt.Printf("%d", 2.3) // want `Printf format %d has arg #1 of wrong type float64` + fmt.Printf("%e", "hi") // want `Printf format %e has arg #1 of wrong type string` + fmt.Printf("%E", true) // want `Printf format %E has arg #1 of wrong type bool` + fmt.Printf("%f", "hi") // want `Printf format %f has arg #1 of wrong type string` + fmt.Printf("%F", 'x') // want `Printf format %F has arg #1 of wrong type rune` + fmt.Printf("%g", "hi") // want `Printf format %g has arg #1 of wrong type string` + fmt.Printf("%g", imap) // want `Printf format %g has arg #1 of wrong type map\[int\]int` + fmt.Printf("%G", i) // want `Printf format %G has arg #1 of wrong type int` + fmt.Printf("%o", x) // want `Printf format %o has arg #1 of wrong type float64` + fmt.Printf("%p", 23) // want `Printf format %p has arg #1 of wrong type int` + fmt.Printf("%q", x) // want `Printf format %q has arg #1 of wrong type float64` + fmt.Printf("%s", b) // want `Printf format %s has arg #1 of wrong type bool` + fmt.Printf("%s", byte(65)) // want `Printf format %s has arg #1 of wrong type byte` + fmt.Printf("%t", 23) // want `Printf format %t has arg #1 of wrong type int` + fmt.Printf("%U", x) // want `Printf format %U has arg #1 of wrong type float64` + fmt.Printf("%X", 2.3) // want `Printf format %X has arg #1 of wrong type float64` + fmt.Printf("%s", stringerv) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.ptrStringer` + fmt.Printf("%t", stringerv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.ptrStringer` + fmt.Printf("%s", embeddedStringerv) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.embeddedStringer` + fmt.Printf("%t", embeddedStringerv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.embeddedStringer` + fmt.Printf("%q", notstringerv) // want `Printf format %q has arg #1 of wrong type CheckPrintf\.notstringer` + fmt.Printf("%t", notstringerv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.notstringer` + fmt.Printf("%t", stringerarrayv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.stringerarray` + fmt.Printf("%t", notstringerarrayv) // want `Printf format %t has arg #1 of wrong type CheckPrintf\.notstringerarray` + fmt.Printf("%q", notstringerarrayv) // want `Printf format %q has arg #1 of wrong type CheckPrintf\.notstringerarray` + fmt.Printf("%d", BoolFormatter(true)) // want `Printf format %d has arg #1 of wrong type CheckPrintf\.BoolFormatter` fmt.Printf("%z", FormatterVal(true)) // correct (the type is responsible for formatting) fmt.Printf("%d", FormatterVal(true)) // correct (the type is responsible for formatting) fmt.Printf("%s", nonemptyinterface) // correct (the type is responsible for formatting) - fmt.Printf("%.*s %d %6g", 3, "hi", 23, 'x') // MATCH "Printf format %6g has arg #4 of wrong type rune" - fmt.Printf("%s", "hi", 3) // MATCH "Printf call needs 1 args but has 2 args" - fmt.Printf("%"+("s"), "hi", 3) // MATCH "Printf call needs 1 args but has 2 args" + fmt.Printf("%.*s %d %6g", 3, "hi", 23, 'x') // want `Printf format %6g has arg #4 of wrong type rune` + fmt.Printf("%s", "hi", 3) // want `Printf call needs 1 args but has 2 args` + fmt.Printf("%"+("s"), "hi", 3) // want `Printf call needs 1 args but has 2 args` fmt.Printf("%s%%%d", "hi", 3) // correct fmt.Printf("%08s", "woo") // correct fmt.Printf("% 8s", "woo") // correct fmt.Printf("%.*d", 3, 3) // correct - fmt.Printf("%.*d x", 3, 3, 3, 3) // MATCH "Printf call needs 2 args but has 4 args" - fmt.Printf("%.*d x", "hi", 3) // MATCH "Printf format %.*d reads non-int arg #1 as argument of *" + fmt.Printf("%.*d x", 3, 3, 3, 3) // want `Printf call needs 2 args but has 4 args` + fmt.Printf("%.*d x", "hi", 3) // want `Printf format %\.\*d reads non-int arg #1 as argument of \*` fmt.Printf("%.*d x", i, 3) // correct - fmt.Printf("%.*d x", s, 3) // MATCH "Printf format %.*d reads non-int arg #1 as argument of *" - fmt.Printf("%*% x", 0.22) // MATCH "Printf format %*% reads non-int arg #1 as argument of *" + fmt.Printf("%.*d x", s, 3) // want `Printf format %\.\*d reads non-int arg #1 as argument of \*` + fmt.Printf("%*% x", 0.22) // want `Printf format %\*% reads non-int arg #1 as argument of \*` fmt.Printf("%q %q", multi()...) // ok fmt.Printf("%#q", `blah`) // ok const format = "%s %s\n" fmt.Printf(format, "hi", "there") - fmt.Printf(format, "hi") // MATCH "Printf format %s reads arg #2, but call has only 1 args" - fmt.Printf("%s %d %.3v %q", "str", 4) // MATCH "Printf format %.3v reads arg #3, but call has only 2 args" + fmt.Printf(format, "hi") // want `Printf format %s reads arg #2, but call has only 1 args` + fmt.Printf("%s %d %.3v %q", "str", 4) // want `Printf format %\.3v reads arg #3, but call has only 2 args` fmt.Printf("%#s", FormatterVal(true)) // correct (the type is responsible for formatting) - fmt.Printf("d%", 2) // MATCH "couldn't parse format string" + fmt.Printf("d%", 2) // want `couldn't parse format string` fmt.Printf("%d", percentDV) fmt.Printf("%d", &percentDV) - fmt.Printf("%d", notPercentDV) // MATCH "Printf format %d has arg #1 of wrong type CheckPrintf.notPercentDStruct" - fmt.Printf("%d", ¬PercentDV) // MATCH "Printf format %d has arg #1 of wrong type *CheckPrintf.notPercentDStruct" + fmt.Printf("%d", notPercentDV) // want `Printf format %d has arg #1 of wrong type CheckPrintf\.notPercentDStruct` + fmt.Printf("%d", ¬PercentDV) // want `Printf format %d has arg #1 of wrong type \*CheckPrintf\.notPercentDStruct` fmt.Printf("%p", ¬PercentDV) // Works regardless: we print it as a pointer. - fmt.Printf("%q", &percentDV) // MATCH "Printf format %q has arg #1 of wrong type *CheckPrintf.percentDStruct" + fmt.Printf("%q", &percentDV) // want `Printf format %q has arg #1 of wrong type \*CheckPrintf\.percentDStruct` fmt.Printf("%s", percentSV) fmt.Printf("%s", &percentSV) // Good argument reorderings. @@ -136,13 +136,13 @@ func fn() { fmt.Printf("%[2]*.[1]*[3]d", 2, 3, 4) fmt.Fprintf(os.Stderr, "%[2]*.[1]*[3]d", 2, 3, 4) // Use Fprintf to make sure we count arguments correctly. // Bad argument reorderings. - fmt.Printf("%[xd", 3) // MATCH "couldn't parse format string" - fmt.Printf("%[x]d x", 3) // MATCH "couldn't parse format string" - fmt.Printf("%[3]*s x", "hi", 2) // MATCH "Printf format %[3]*s reads arg #3, but call has only 2 args" - fmt.Printf("%[3]d x", 2) // MATCH "Printf format %[3]d reads arg #3, but call has only 1 args" - fmt.Printf("%[2]*.[1]*[3]d x", 2, "hi", 4) // MATCH "Printf format %[2]*.[1]*[3]d reads non-int arg #2 as argument of *" - fmt.Printf("%[0]s x", "arg1") // MATCH "Printf format %[0]s reads invalid arg 0; indices are 1-based" - fmt.Printf("%[0]d x", 1) // MATCH "Printf format %[0]d reads invalid arg 0; indices are 1-based" + fmt.Printf("%[xd", 3) // want `couldn't parse format string` + fmt.Printf("%[x]d x", 3) // want `couldn't parse format string` + fmt.Printf("%[3]*s x", "hi", 2) // want `Printf format %\[3\]\*s reads arg #3, but call has only 2 args` + fmt.Printf("%[3]d x", 2) // want `Printf format %\[3\]d reads arg #3, but call has only 1 args` + fmt.Printf("%[2]*.[1]*[3]d x", 2, "hi", 4) // want `Printf format %\[2\]\*\.\[1\]\*\[3\]d reads non-int arg #2 as argument of \*` + fmt.Printf("%[0]s x", "arg1") // want `Printf format %\[0\]s reads invalid arg 0; indices are 1-based` + fmt.Printf("%[0]d x", 1) // want `Printf format %\[0\]d reads invalid arg 0; indices are 1-based` // Interfaces can be used with any verb. var iface interface { @@ -150,7 +150,7 @@ func fn() { } fmt.Printf("%f", iface) // ok: fmt treats interfaces as transparent and iface may well have a float concrete type // Can print functions in many ways - fmt.Printf("%s", someFunction) // MATCH "Printf format %s has arg #1 of wrong type func()" + fmt.Printf("%s", someFunction) // want `Printf format %s has arg #1 of wrong type func\(\)` fmt.Printf("%d", someFunction) // ok: maybe someone wants to see the pointer fmt.Printf("%v", someFunction) // ok: maybe someone wants to see the pointer in decimal fmt.Printf("%p", someFunction) // ok: maybe someone wants to see the pointer @@ -163,11 +163,11 @@ func fn() { // indexed arguments fmt.Printf("%d %[3]d %d %[2]d x", 1, 2, 3, 4) // OK - fmt.Printf("%d %[0]d %d %[2]d x", 1, 2, 3, 4) // MATCH "Printf format %[0]d reads invalid arg 0; indices are 1-based" - fmt.Printf("%d %[3]d %d %[-2]d x", 1, 2, 3, 4) // MATCH "couldn't parse format string" - fmt.Printf("%d %[3]d %d %[2234234234234]d x", 1, 2, 3, 4) // MATCH "Printf format %[2234234234234]d reads arg #2234234234234, but call has only 4 args" - fmt.Printf("%d %[3]d %-10d %[2]d x", 1, 2, 3) // MATCH "Printf format %-10d reads arg #4, but call has only 3 args" - fmt.Printf("%[1][3]d x", 1, 2) // MATCH "couldn't parse format string" + fmt.Printf("%d %[0]d %d %[2]d x", 1, 2, 3, 4) // want `Printf format %\[0\]d reads invalid arg 0; indices are 1-based` + fmt.Printf("%d %[3]d %d %[-2]d x", 1, 2, 3, 4) // want `couldn't parse format string` + fmt.Printf("%d %[3]d %d %[2234234234234]d x", 1, 2, 3, 4) // want `Printf format %\[2234234234234\]d reads arg #2234234234234, but call has only 4 args` + fmt.Printf("%d %[3]d %-10d %[2]d x", 1, 2, 3) // want `Printf format %-10d reads arg #4, but call has only 3 args` + fmt.Printf("%[1][3]d x", 1, 2) // want `couldn't parse format string` fmt.Printf("%[1]d x", 1, 2) // OK fmt.Printf("%d %[3]d %d %[2]d x", 1, 2, 3, 4, 5) // OK @@ -191,11 +191,11 @@ func fn() { t1 := T1{&T2{"hi"}} fmt.Printf("%s\n", &x1) - fmt.Printf("%s\n", t1) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.T1" + fmt.Printf("%s\n", t1) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.T1` var x2 struct{ A *int } - fmt.Printf("%p\n", x2) // MATCH "Printf format %p has arg #1 of wrong type struct{A *int}" + fmt.Printf("%p\n", x2) // want `Printf format %p has arg #1 of wrong type struct\{A \*int\}` var x3 [2]int - fmt.Printf("%p", x3) // MATCH "Printf format %p has arg #1 of wrong type [2]int" + fmt.Printf("%p", x3) // want `Printf format %p has arg #1 of wrong type \[2\]int` ue := unexportedError{nil} fmt.Printf("%s", ue) @@ -364,20 +364,20 @@ func UnexportedStringerOrError() { fmt.Printf("%s", unexportedInterface{3}) // ok; we can't see the problem us := unexportedStringer{} - fmt.Printf("%s", us) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.unexportedStringer" - fmt.Printf("%s", &us) // MATCH "Printf format %s has arg #1 of wrong type *CheckPrintf.unexportedStringer" + fmt.Printf("%s", us) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.unexportedStringer` + fmt.Printf("%s", &us) // want `Printf format %s has arg #1 of wrong type \*CheckPrintf\.unexportedStringer` usf := unexportedStringerOtherFields{ s: "foo", S: "bar", } - fmt.Printf("%s", usf) // MATCH "Printf format %s has arg #1 of wrong type CheckPrintf.unexportedStringerOtherFields" - fmt.Printf("%s", &usf) // MATCH "Printf format %s has arg #1 of wrong type *CheckPrintf.unexportedStringerOtherFields" + fmt.Printf("%s", usf) // want `Printf format %s has arg #1 of wrong type CheckPrintf\.unexportedStringerOtherFields` + fmt.Printf("%s", &usf) // want `Printf format %s has arg #1 of wrong type \*CheckPrintf\.unexportedStringerOtherFields` intSlice := []int{3, 4} - fmt.Printf("%s", intSlice) // MATCH "Printf format %s has arg #1 of wrong type []int" + fmt.Printf("%s", intSlice) // want `Printf format %s has arg #1 of wrong type \[\]int` nonStringerArray := [1]unexportedStringer{{}} - fmt.Printf("%s", nonStringerArray) // MATCH "Printf format %s has arg #1 of wrong type [1]CheckPrintf.unexportedStringer" + fmt.Printf("%s", nonStringerArray) // want `Printf format %s has arg #1 of wrong type \[1\]CheckPrintf\.unexportedStringer` fmt.Printf("%s", []stringer{3, 4}) // not an error fmt.Printf("%s", [2]stringer{3, 4}) // not an error } diff --git a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go index dc6d9ccf3..5a269e829 100644 --- a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go +++ b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go @@ -7,17 +7,17 @@ import ( ) func fn1() { - strings.Replace("", "", "", 1) // MATCH /is a pure function but its return value is ignored/ - foo(1, 2) // MATCH /is a pure function but its return value is ignored/ + strings.Replace("", "", "", 1) // want `is a pure function but its return value is ignored` + foo(1, 2) // want `is a pure function but its return value is ignored` bar(1, 2) } func fn2() { r, _ := http.NewRequest("GET", "/", nil) - r.WithContext(context.Background()) // MATCH /is a pure function but its return value is ignored/ + r.WithContext(context.Background()) // want `is a pure function but its return value is ignored` } -func foo(a, b int) int { return a + b } +func foo(a, b int) int { return a + b } // want foo:"IsPure" func bar(a, b int) int { println(a + b) return a + b diff --git a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions_test.go b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions_test.go index a3a1efd9f..71fe2fa48 100644 --- a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions_test.go +++ b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions_test.go @@ -6,7 +6,7 @@ import ( ) func TestFoo(t *testing.T) { - strings.Replace("", "", "", 1) // MATCH /is a pure function but its return value is ignored/ + strings.Replace("", "", "", 1) // want `is a pure function but its return value is ignored` } func BenchmarkFoo(b *testing.B) { diff --git a/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go b/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go index 59d349ac0..b6761a187 100644 --- a/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go +++ b/staticcheck/testdata/src/CheckRangeStringRunes/CheckRangeStringRunes.go @@ -5,7 +5,7 @@ func fn(s string) { println(r) } - for _, r := range []rune(s) { // MATCH "should range over string" + for _, r := range []rune(s) { // want `should range over string` println(r) } @@ -15,7 +15,7 @@ func fn(s string) { } x := []rune(s) - for _, r := range x { // MATCH "should range over string" + for _, r := range x { // want `should range over string` println(r) } diff --git a/staticcheck/testdata/src/CheckRegexpMatchLoop/CheckRegexpMatchLoop.go b/staticcheck/testdata/src/CheckRegexpMatchLoop/CheckRegexpMatchLoop.go index 00ce19ec8..83b95450a 100644 --- a/staticcheck/testdata/src/CheckRegexpMatchLoop/CheckRegexpMatchLoop.go +++ b/staticcheck/testdata/src/CheckRegexpMatchLoop/CheckRegexpMatchLoop.go @@ -8,8 +8,8 @@ func fn() { regexp.MatchReader(".", nil) for { - regexp.Match(".", nil) // MATCH /calling regexp.Match in a loop has poor performance/ - regexp.MatchString(".", "") // MATCH /calling regexp.MatchString in a loop has poor performance/ - regexp.MatchReader(".", nil) // MATCH /calling regexp.MatchReader in a loop has poor performance/ + regexp.Match(".", nil) // want `calling regexp\.Match in a loop has poor performance` + regexp.MatchString(".", "") // want `calling regexp\.MatchString in a loop has poor performance` + regexp.MatchReader(".", nil) // want `calling regexp\.MatchReader in a loop has poor performance` } } diff --git a/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go b/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go index 42cbd13e3..48f00e102 100644 --- a/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go +++ b/staticcheck/testdata/src/CheckRegexps/CheckRegexps.go @@ -8,12 +8,12 @@ import ( const c1 = `[` const c2 = `(abc)` -var re1 = regexp.MustCompile(`ab\yef`) // MATCH /error parsing regexp/ -var re2 = regexp.MustCompile(c1) // MATCH /error parsing regexp/ +var re1 = regexp.MustCompile(`ab\yef`) // want `error parsing regexp` +var re2 = regexp.MustCompile(c1) // want `error parsing regexp` var re3 = regexp.MustCompile(c2) func fn() { - _, err := regexp.Compile(`foo(`) // MATCH /error parsing regexp/ + _, err := regexp.Compile(`foo(`) // want `error parsing regexp` if err != nil { panic(err) } @@ -21,7 +21,19 @@ func fn() { log.Println("of course 'foo(' matches 'foo('") } - regexp.Match("foo(", nil) // MATCH /error parsing regexp/ - regexp.MatchReader("foo(", nil) // MATCH /error parsing regexp/ - regexp.MatchString("foo(", "") // MATCH /error parsing regexp/ + regexp.Match("foo(", nil) // want `error parsing regexp` + regexp.MatchReader("foo(", nil) // want `error parsing regexp` + regexp.MatchString("foo(", "") // want `error parsing regexp` } + +// must be a basic type to trigger SA4017 (in case of a test failure) +type T string + +func (T) Fn() {} + +// Don't get confused by methods named init +func (T) init() {} + +// this will become a synthetic init function, that we don't want to +// ignore +var _ = regexp.MustCompile("(") // want `error parsing regexp` diff --git a/staticcheck/testdata/src/CheckRepeatedIfElse/CheckRepeatedIfElse.go b/staticcheck/testdata/src/CheckRepeatedIfElse/CheckRepeatedIfElse.go index 7d0f79135..95255900b 100644 --- a/staticcheck/testdata/src/CheckRepeatedIfElse/CheckRepeatedIfElse.go +++ b/staticcheck/testdata/src/CheckRepeatedIfElse/CheckRepeatedIfElse.go @@ -3,8 +3,8 @@ package pkg func fn1(b1, b2 bool) { if b1 && !b2 { } else if b1 { - } else if b1 && !b2 { // MATCH /condition occurs multiple times/ - } else if b1 { // MATCH /condition occurs multiple times/ + } else if b1 && !b2 { // want `condition occurs multiple times` + } else if b1 { // want `condition occurs multiple times` } else { println() } diff --git a/staticcheck/testdata/src/CheckScopedBreak/CheckScopedBreak.go b/staticcheck/testdata/src/CheckScopedBreak/CheckScopedBreak.go index d3a2eab2a..1c1c8b9b8 100644 --- a/staticcheck/testdata/src/CheckScopedBreak/CheckScopedBreak.go +++ b/staticcheck/testdata/src/CheckScopedBreak/CheckScopedBreak.go @@ -5,16 +5,16 @@ func fn() { for { switch { case true: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` default: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } } for { select { case <-ch: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } } @@ -25,7 +25,7 @@ func fn() { switch { case true: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } switch { @@ -37,9 +37,9 @@ func fn() { switch { case true: if true { - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } else { - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } } } @@ -66,7 +66,7 @@ label: for range ([]int)(nil) { switch { default: - break // MATCH /ineffective break statement/ + break // want `ineffective break statement` } } } diff --git a/staticcheck/testdata/src/CheckSelfAssignment/CheckSelfAssignment.go b/staticcheck/testdata/src/CheckSelfAssignment/CheckSelfAssignment.go index 8865e95fe..f47d973a8 100644 --- a/staticcheck/testdata/src/CheckSelfAssignment/CheckSelfAssignment.go +++ b/staticcheck/testdata/src/CheckSelfAssignment/CheckSelfAssignment.go @@ -3,9 +3,9 @@ package pkg func fn(x int) { var z int var y int - x = x // MATCH "self-assignment" - y = y // MATCH "self-assignment" - y, x, z = y, x, 1 + x = x // want `self-assignment` + y = y // want `self-assignment` + y, x, z = y, x, 1 // want `self-assignment of y to y` `self-assignment of x to x` y = x _ = y _ = x @@ -15,6 +15,3 @@ func fn(x int) { println(x) }() } - -// MATCH:8 "self-assignment of y to y" -// MATCH:8 "self-assignment of x to x" diff --git a/staticcheck/testdata/src/CheckSillyBitwiseOps/CheckSillyBitwiseOps.go b/staticcheck/testdata/src/CheckSillyBitwiseOps/CheckSillyBitwiseOps.go index b8bea100c..2b0eacedf 100644 --- a/staticcheck/testdata/src/CheckSillyBitwiseOps/CheckSillyBitwiseOps.go +++ b/staticcheck/testdata/src/CheckSillyBitwiseOps/CheckSillyBitwiseOps.go @@ -1,10 +1,10 @@ package pkg func fn(x int) { - println(x | 0) // MATCH "x | 0 always equals x" - println(x & 0) // MATCH "x & 0 always equals 0" - println(x ^ 0) // MATCH "x ^ 0 always equals x" - println((x << 5) | 0) // MATCH "x | 0 always equals x" + println(x | 0) // want `x \| 0 always equals x` + println(x & 0) // want `x & 0 always equals 0` + println(x ^ 0) // want `x \^ 0 always equals x` + println((x << 5) | 0) // want `x \| 0 always equals x` println(x | 1) println(x << 0) } diff --git a/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go b/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go index 27f9bd600..2b6f0adda 100644 --- a/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go +++ b/staticcheck/testdata/src/CheckSingleArgAppend/CheckSingleArgAppend.go @@ -3,11 +3,11 @@ package pkg //lint:file-ignore SA4010,SA4006 Not relevant to this test case func fn(arg []int) { - x := append(arg) // MATCH "x = append(y) is equivalent to x = y" + x := append(arg) // want `x = append\(y\) is equivalent to x = y` _ = x y := append(arg, 1) _ = y - arg = append(arg) // MATCH "x = append(y) is equivalent to x = y" + arg = append(arg) // want `x = append\(y\) is equivalent to x = y` arg = append(arg, 1, 2, 3) var nilly []int arg = append(arg, nilly...) diff --git a/staticcheck/testdata/src/CheckStringsReplaceZero/CheckStringsReplaceZero.go b/staticcheck/testdata/src/CheckStringsReplaceZero/CheckStringsReplaceZero.go index 426ccbc6a..318c4cb8d 100644 --- a/staticcheck/testdata/src/CheckStringsReplaceZero/CheckStringsReplaceZero.go +++ b/staticcheck/testdata/src/CheckStringsReplaceZero/CheckStringsReplaceZero.go @@ -3,7 +3,7 @@ package pkg import "strings" func fn() { - _ = strings.Replace("", "", "", 0) // MATCH /calling strings.Replace with n == 0/ + _ = strings.Replace("", "", "", 0) // want `calling strings\.Replace with n == 0` _ = strings.Replace("", "", "", -1) _ = strings.Replace("", "", "", 1) } diff --git a/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go b/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go index b8d4ddde7..06af02e12 100644 --- a/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go +++ b/staticcheck/testdata/src/CheckStructTags/CheckStructTags.go @@ -1,18 +1,18 @@ package pkg type T1 struct { - B int `foo:"" foo:""` // MATCH "duplicate struct tag" + B int `foo:"" foo:""` // want `duplicate struct tag` C int `foo:"" bar:""` D int `json:"-"` - E int `json:"\\"` // MATCH "invalid JSON field name" - F int `json:",omitempty,omitempty"` // MATCH "duplicate JSON option "omitempty"" + E int `json:"\\"` // want `invalid JSON field name` + F int `json:",omitempty,omitempty"` // want `duplicate JSON option "omitempty"` G int `json:",omitempty,string"` - H int `json:",string,omitempty,string"` // MATCH "duplicate JSON option "string"" - I int `json:",unknown"` // MATCH "unknown JSON option "unknown"" + H int `json:",string,omitempty,string"` // want `duplicate JSON option "string"` + I int `json:",unknown"` // want `unknown JSON option "unknown"` J int `json:",string"` K *int `json:",string"` - L **int `json:",string"` // MATCH "the JSON string option" - M complex128 `json:",string"` // MATCH "the JSON string option" + L **int `json:",string"` // want `the JSON string option` + M complex128 `json:",string"` // want `the JSON string option` N int `json:"some-name"` O int `json:"some-name,inline"` } @@ -25,13 +25,13 @@ type T2 struct { E int `xml:",comment"` F int `xml:",omitempty"` G int `xml:",any"` - H int `xml:",unknown"` // MATCH "unknown XML option" - I int `xml:",any,any"` // MATCH "duplicate XML option" + H int `xml:",unknown"` // want `unknown XML option` + I int `xml:",any,any"` // want `duplicate XML option` J int `xml:"a>b>c,"` - K int `xml:",attr,cdata"` // MATCH "mutually exclusive" + K int `xml:",attr,cdata"` // want `mutually exclusive` } type T3 struct { A int `json:",omitempty" xml:",attr"` - B int `json:",unknown" xml:",attr"` // MATCH "unknown JSON option" + B int `json:",unknown" xml:",attr"` // want `unknown JSON option` } diff --git a/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go b/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go index 548ab5ab2..44d8d20b7 100644 --- a/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go +++ b/staticcheck/testdata/src/CheckSyncPoolValue/CheckSyncPoolValue.go @@ -18,13 +18,13 @@ func fn() { s := []int{} v := sync.Pool{} - v.Put(s) // MATCH /argument should be pointer-like/ + v.Put(s) // want `argument should be pointer-like` v.Put(&s) - v.Put(T1{}) // MATCH /argument should be pointer-like/ - v.Put(T2{}) // MATCH /argument should be pointer-like/ + v.Put(T1{}) // want `argument should be pointer-like` + v.Put(T2{}) // want `argument should be pointer-like` p := &sync.Pool{} - p.Put(s) // MATCH /argument should be pointer-like/ + p.Put(s) // want `argument should be pointer-like` p.Put(&s) var i interface{} @@ -34,5 +34,5 @@ func fn() { p.Put(up) var basic int - p.Put(basic) // MATCH /argument should be pointer-like/ + p.Put(basic) // want `argument should be pointer-like` } diff --git a/staticcheck/testdata/src/CheckTemplate/CheckTemplate.go b/staticcheck/testdata/src/CheckTemplate/CheckTemplate.go index f38050599..f4e1cc6ff 100644 --- a/staticcheck/testdata/src/CheckTemplate/CheckTemplate.go +++ b/staticcheck/testdata/src/CheckTemplate/CheckTemplate.go @@ -9,11 +9,11 @@ const tmpl1 = `{{.Name}} {{.LastName}` const tmpl2 = `{{fn}}` func fn() { - tt.New("").Parse(tmpl1) // MATCH /template/ + tt.New("").Parse(tmpl1) // want `template` tt.New("").Parse(tmpl2) t1 := tt.New("") t1.Parse(tmpl1) - th.New("").Parse(tmpl1) // MATCH /template/ + th.New("").Parse(tmpl1) // want `template` th.New("").Parse(tmpl2) t2 := th.New("") t2.Parse(tmpl1) diff --git a/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go b/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go index 2834f026f..3e372be5a 100644 --- a/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go +++ b/staticcheck/testdata/src/CheckTestMainExit-1/CheckTestMainExit-1.go @@ -2,6 +2,6 @@ package pkg import "testing" -func TestMain(m *testing.M) { // MATCH /should call os.Exit/ +func TestMain(m *testing.M) { // want `should call os\.Exit` m.Run() } diff --git a/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go b/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go index 7f4b0be36..b09399ecf 100644 --- a/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go +++ b/staticcheck/testdata/src/CheckTestMainExit-4/CheckTestMainExit-4.go @@ -7,7 +7,7 @@ import ( func helper() { os.Exit(1) } -func TestMain(m *testing.M) { // MATCH /should call os.Exit/ +func TestMain(m *testing.M) { // want `should call os\.Exit` // FIXME(dominikh): this is a false positive m.Run() helper() diff --git a/staticcheck/testdata/src/CheckTimeParse/CheckTimeParse.go b/staticcheck/testdata/src/CheckTimeParse/CheckTimeParse.go index 06bdf53ee..246aac043 100644 --- a/staticcheck/testdata/src/CheckTimeParse/CheckTimeParse.go +++ b/staticcheck/testdata/src/CheckTimeParse/CheckTimeParse.go @@ -6,8 +6,8 @@ const c1 = "12345" const c2 = "2006" func fn() { - time.Parse("12345", "") // MATCH /parsing time/ - time.Parse(c1, "") // MATCH /parsing time/ + time.Parse("12345", "") // want `parsing time` + time.Parse(c1, "") // want `parsing time` time.Parse(c2, "") time.Parse(time.RFC3339Nano, "") time.Parse(time.Kitchen, "") diff --git a/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go b/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go index 53451f45f..4f6ed0c41 100644 --- a/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go +++ b/staticcheck/testdata/src/CheckTimeSleepConstant/CheckTimeSleepConstant.go @@ -6,8 +6,8 @@ const c1 = 1 const c2 = 200 func fn() { - time.Sleep(1) // MATCH /sleeping for 1/ - time.Sleep(42) // MATCH /sleeping for 42/ + time.Sleep(1) // want `sleeping for 1` + time.Sleep(42) // want `sleeping for 42` time.Sleep(201) time.Sleep(c1) time.Sleep(c2) diff --git a/staticcheck/testdata/src/CheckTimerResetReturnValue/CheckTimerResetReturnValue.go b/staticcheck/testdata/src/CheckTimerResetReturnValue/CheckTimerResetReturnValue.go index a67c5c1cf..bb4cfb44d 100644 --- a/staticcheck/testdata/src/CheckTimerResetReturnValue/CheckTimerResetReturnValue.go +++ b/staticcheck/testdata/src/CheckTimerResetReturnValue/CheckTimerResetReturnValue.go @@ -26,7 +26,7 @@ func fn4() { func fn5() { t := time.NewTimer(time.Second) - if t.Reset(time.Second) { // MATCH "it is not possible to use Reset's return value correctly" + if t.Reset(time.Second) { // want `it is not possible to use Reset's return value correctly` <-t.C } } @@ -53,7 +53,7 @@ func fn7(x bool) { func fn8() { t := time.NewTimer(time.Second) - abc := t.Reset(time.Second) // MATCH "it is not possible to use Reset's return value correctly" + abc := t.Reset(time.Second) // want `it is not possible to use Reset's return value correctly` if abc { <-t.C } @@ -69,7 +69,7 @@ func fn9() { func fn10() { t := time.NewTimer(time.Second) - if !t.Reset(time.Second) { // MATCH "it is not possible to use Reset's return value correctly" + if !t.Reset(time.Second) { // want `it is not possible to use Reset's return value correctly` <-t.C } } diff --git a/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go index 4a42ee616..263d11021 100644 --- a/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go +++ b/staticcheck/testdata/src/CheckToLowerToUpperComparison/CheckToLowerToUpperComparison.go @@ -8,24 +8,24 @@ func fn() { s2 = "bar" ) - if strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" + if strings.ToLower(s1) == strings.ToLower(s2) { // want `should use strings\.EqualFold\(a, b\) instead of strings\.ToLower\(a\) == strings\.ToLower\(b\)` panic("") } - if strings.ToUpper(s1) == strings.ToUpper(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToUpper(a) == strings.ToUpper(b)" + if strings.ToUpper(s1) == strings.ToUpper(s2) { // want `should use strings\.EqualFold\(a, b\) instead of strings\.ToUpper\(a\) == strings\.ToUpper\(b\)` panic("") } - if strings.ToLower(s1) != strings.ToLower(s2) { // MATCH "should use !strings.EqualFold(a, b) instead of strings.ToLower(a) != strings.ToLower(b)" + if strings.ToLower(s1) != strings.ToLower(s2) { // want `should use !strings\.EqualFold\(a, b\) instead of strings\.ToLower\(a\) != strings\.ToLower\(b\)` panic("") } - switch strings.ToLower(s1) == strings.ToLower(s2) { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" + switch strings.ToLower(s1) == strings.ToLower(s2) { // want `should use strings\.EqualFold\(a, b\) instead of strings\.ToLower\(a\) == strings\.ToLower\(b\)` case true, false: panic("") } - if strings.ToLower(s1) == strings.ToLower(s2) || s1+s2 == s2+s1 { // MATCH "should use strings.EqualFold(a, b) instead of strings.ToLower(a) == strings.ToLower(b)" { + if strings.ToLower(s1) == strings.ToLower(s2) || s1+s2 == s2+s1 { // want `should use strings\.EqualFold\(a, b\) instead of strings\.ToLower\(a\) == strings\.ToLower\(b\)` panic("") } diff --git a/staticcheck/testdata/src/CheckURLs/CheckURLs.go b/staticcheck/testdata/src/CheckURLs/CheckURLs.go index a350359ee..0c9477296 100644 --- a/staticcheck/testdata/src/CheckURLs/CheckURLs.go +++ b/staticcheck/testdata/src/CheckURLs/CheckURLs.go @@ -4,6 +4,6 @@ import "net/url" func fn() { url.Parse("foobar") - url.Parse(":") // MATCH /is not a valid URL/ + url.Parse(":") // want `is not a valid URL` url.Parse("https://golang.org") } diff --git a/staticcheck/testdata/src/CheckUnbufferedSignalChan/CheckUnbufferedSignalChan.go b/staticcheck/testdata/src/CheckUnbufferedSignalChan/CheckUnbufferedSignalChan.go index 70d986a54..cb4d853ae 100644 --- a/staticcheck/testdata/src/CheckUnbufferedSignalChan/CheckUnbufferedSignalChan.go +++ b/staticcheck/testdata/src/CheckUnbufferedSignalChan/CheckUnbufferedSignalChan.go @@ -8,7 +8,7 @@ import ( func fn(b bool) { c0 := make(chan os.Signal) - signal.Notify(c0, os.Interrupt) // MATCH /the channel used with signal.Notify should be buffered/ + signal.Notify(c0, os.Interrupt) // want `the channel used with signal\.Notify should be buffered` c1 := make(chan os.Signal, 1) signal.Notify(c1, os.Interrupt, syscall.SIGHUP) diff --git a/staticcheck/testdata/src/CheckUnmarshalPointer/CheckUnmarshalPointer.go b/staticcheck/testdata/src/CheckUnmarshalPointer/CheckUnmarshalPointer.go index b22c6cee1..862837a2d 100644 --- a/staticcheck/testdata/src/CheckUnmarshalPointer/CheckUnmarshalPointer.go +++ b/staticcheck/testdata/src/CheckUnmarshalPointer/CheckUnmarshalPointer.go @@ -7,12 +7,12 @@ func fn1(i3 interface{}) { var i1 interface{} = v var i2 interface{} = &v p := &v - json.Unmarshal([]byte(`{}`), v) // MATCH /Unmarshal expects to unmarshal into a pointer/ + json.Unmarshal([]byte(`{}`), v) // want `Unmarshal expects to unmarshal into a pointer` json.Unmarshal([]byte(`{}`), &v) - json.Unmarshal([]byte(`{}`), i1) // MATCH /Unmarshal expects to unmarshal into a pointer/ + json.Unmarshal([]byte(`{}`), i1) // want `Unmarshal expects to unmarshal into a pointer` json.Unmarshal([]byte(`{}`), i2) json.Unmarshal([]byte(`{}`), i3) json.Unmarshal([]byte(`{}`), p) - json.NewDecoder(nil).Decode(v) // MATCH /Decode expects to unmarshal into a pointer/ + json.NewDecoder(nil).Decode(v) // want `Decode expects to unmarshal into a pointer` } diff --git a/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go b/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go index d08a18392..2fdddd4a7 100644 --- a/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go +++ b/staticcheck/testdata/src/CheckUnreachableTypeCases/CheckUnreachableTypeCases.go @@ -19,37 +19,37 @@ func fn1() { switch v.(type) { case io.Reader: println("io.Reader") - case io.ReadCloser: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + case io.ReadCloser: // want `unreachable case clause: io\.Reader will always match before io\.ReadCloser` println("io.ReadCloser") } switch v.(type) { case io.Reader: println("io.Reader") - case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + case T: // want `unreachable case clause: io\.Reader will always match before CheckUnreachableTypeCases\.T` println("T") } switch v.(type) { case io.Reader: println("io.Reader") - case io.ReadCloser: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + case io.ReadCloser: // want `unreachable case clause: io\.Reader will always match before io\.ReadCloser` println("io.ReadCloser") - case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + case T: // want `unreachable case clause: io\.Reader will always match before CheckUnreachableTypeCases\.T` println("T") } switch v.(type) { case io.Reader: println("io.Reader") - case io.ReadCloser, T: // MATCH "unreachable case clause: io.Reader will always match before io.ReadCloser" + case io.ReadCloser, T: // want `unreachable case clause: io\.Reader will always match before io\.ReadCloser` println("io.ReadCloser or T") } switch v.(type) { case io.ReadCloser, io.Reader: println("io.ReadCloser or io.Reader") - case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + case T: // want `unreachable case clause: io\.Reader will always match before CheckUnreachableTypeCases\.T` println("T") } @@ -58,21 +58,21 @@ func fn1() { println("something else") case io.Reader: println("io.Reader") - case T: // MATCH "unreachable case clause: io.Reader will always match before CheckUnreachableTypeCases.T" + case T: // want `unreachable case clause: io\.Reader will always match before CheckUnreachableTypeCases\.T` println("T") } switch err.(type) { case V: println("V") - case U: // MATCH "unreachable case clause: CheckUnreachableTypeCases.V will always match before CheckUnreachableTypeCases.U" + case U: // want `unreachable case clause: CheckUnreachableTypeCases\.V will always match before CheckUnreachableTypeCases\.U` println("U") } switch err.(type) { case U: println("U") - case V: // MATCH "unreachable case clause: CheckUnreachableTypeCases.U will always match before CheckUnreachableTypeCases.V" + case V: // want `unreachable case clause: CheckUnreachableTypeCases\.U will always match before CheckUnreachableTypeCases\.V` println("V") } } diff --git a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go index 4f588232e..c50c4689a 100644 --- a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go +++ b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues.go @@ -2,13 +2,13 @@ package pkg func fn1() { var x int - x = gen() // MATCH /this value of x is never used/ + x = gen() // want `this value of x is never used` x = gen() println(x) var y int if true { - y = gen() // MATCH /this value of y is never used/ + y = gen() // want `this value of y is never used` } y = gen() println(y) @@ -20,14 +20,11 @@ func gen() int { } func fn2() { - x, y := gen(), gen() + x, y := gen(), gen() // want `this value of x is never used` `this value of y is never used` x, y = gen(), gen() println(x, y) } -// MATCH:23 /this value of x is never used/ -// MATCH:23 /this value of y is never used/ - func fn3() { x := uint32(0) if true { @@ -44,25 +41,19 @@ func gen2() (int, int) { } func fn4() { - x, y := gen2() // MATCH /this value of x is never used/ + x, y := gen2() // want `this value of x is never used` println(y) - x, y = gen2() + x, y = gen2() // want `this value of x is never used` `this value of y is never used` x, y = gen2() println(x, y) } -// MATCH:49 /this value of x is never used/ -// MATCH:49 /this value of y is never used/ - func fn5(m map[string]string) { - v, ok := m[""] + v, ok := m[""] // want `this value of v is never used` `this value of ok is never used` v, ok = m[""] println(v, ok) } -// MATCH:58 /this value of v is never used/ -// MATCH:58 /this value of ok is never used/ - func fn6() { x := gen() // Do not report variables if they've been assigned to the blank identifier @@ -72,8 +63,16 @@ func fn6() { func fn7() { func() { var x int - x = gen() // MATCH /this value of x is never used/ + x = gen() // want `this value of x is never used` x = gen() println(x) }() } + +func fn() int { println(); return 0 } + +var y = func() { + v := fn() // want `never used` + v = fn() + println(v) +} diff --git a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues_test.go b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues_test.go index c85a1a01a..9884718fb 100644 --- a/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues_test.go +++ b/staticcheck/testdata/src/CheckUnreadVariableValues/CheckUnreadVariableValues_test.go @@ -2,10 +2,8 @@ package pkg import "testing" -func fn() int { println(); return 0 } - func TestFoo(t *testing.T) { - x := fn() // MATCH "never used" + x := fn() // want `never used` x = fn() println(x) } diff --git a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go index 48cad7933..33a336de6 100644 --- a/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go +++ b/staticcheck/testdata/src/CheckUnsafePrintf/CheckUnsafePrintf.go @@ -8,12 +8,12 @@ import ( func fn(s string) { fn2 := func() string { return "" } - fmt.Printf(fn2()) // MATCH /should use print-style function/ - _ = fmt.Sprintf(fn2()) // MATCH /should use print-style function/ - log.Printf(fn2()) // MATCH /should use print-style function/ - fmt.Printf(s) // MATCH /should use print-style function/ + fmt.Printf(fn2()) // want `should use print-style function` + _ = fmt.Sprintf(fn2()) // want `should use print-style function` + log.Printf(fn2()) // want `should use print-style function` + fmt.Printf(s) // want `should use print-style function` fmt.Printf(s, "") - fmt.Fprintf(os.Stdout, s) // MATCH /should use print-style function/ + fmt.Fprintf(os.Stdout, s) // want `should use print-style function` fmt.Fprintf(os.Stdout, s, "") fmt.Printf(fn2(), "") diff --git a/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go b/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go index e02b90fc8..63b1aec8d 100644 --- a/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go +++ b/staticcheck/testdata/src/CheckUnsupportedMarshal/CheckUnsupportedMarshal.go @@ -56,29 +56,29 @@ func fn() { var t8 T8 json.Marshal(t1) json.Marshal(t2) - json.Marshal(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" + json.Marshal(t3) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T3\.C` json.Marshal(t4) - json.Marshal(t5) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T5.B" + json.Marshal(t5) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T5\.B` json.Marshal(t6) (*json.Encoder)(nil).Encode(t1) (*json.Encoder)(nil).Encode(t2) - (*json.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" + (*json.Encoder)(nil).Encode(t3) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T3\.C` (*json.Encoder)(nil).Encode(t4) - (*json.Encoder)(nil).Encode(t5) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T5.B" + (*json.Encoder)(nil).Encode(t5) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T5\.B` (*json.Encoder)(nil).Encode(t6) xml.Marshal(t1) xml.Marshal(t2) - xml.Marshal(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" + xml.Marshal(t3) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T3\.C` xml.Marshal(t4) xml.Marshal(t5) - xml.Marshal(t6) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T6.B" + xml.Marshal(t6) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T6\.B` (*xml.Encoder)(nil).Encode(t1) (*xml.Encoder)(nil).Encode(t2) - (*xml.Encoder)(nil).Encode(t3) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T3.C" + (*xml.Encoder)(nil).Encode(t3) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T3\.C` (*xml.Encoder)(nil).Encode(t4) (*xml.Encoder)(nil).Encode(t5) - (*xml.Encoder)(nil).Encode(t6) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T6.B" + (*xml.Encoder)(nil).Encode(t6) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T6\.B` - json.Marshal(t8) // MATCH "trying to marshal chan or func value, field CheckUnsupportedMarshal.T8.T7.T3.C" + json.Marshal(t8) // want `trying to marshal chan or func value, field CheckUnsupportedMarshal\.T8\.T7\.T3\.C` } diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go index 0e64d898d..3e0a64dfe 100644 --- a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go +++ b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go @@ -9,14 +9,14 @@ import ( func fn() { c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) - signal.Ignore(os.Signal(syscall.SIGKILL)) // MATCH /cannot be trapped/ - signal.Ignore(os.Kill) // MATCH /cannot be trapped/ - signal.Notify(c, os.Kill) // MATCH /cannot be trapped/ - signal.Reset(os.Kill) // MATCH /cannot be trapped/ - signal.Ignore(syscall.SIGKILL) // MATCH /cannot be trapped/ - signal.Notify(c, syscall.SIGKILL) // MATCH /cannot be trapped/ - signal.Reset(syscall.SIGKILL) // MATCH /cannot be trapped/ - signal.Ignore(syscall.SIGSTOP) // MATCH /cannot be trapped/ - signal.Notify(c, syscall.SIGSTOP) // MATCH /cannot be trapped/ - signal.Reset(syscall.SIGSTOP) // MATCH /cannot be trapped/ + signal.Ignore(os.Signal(syscall.SIGKILL)) // want `cannot be trapped` + signal.Ignore(os.Kill) // want `cannot be trapped` + signal.Notify(c, os.Kill) // want `cannot be trapped` + signal.Reset(os.Kill) // want `cannot be trapped` + signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` + signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` + signal.Reset(syscall.SIGKILL) // want `cannot be trapped` + signal.Ignore(syscall.SIGSTOP) // want `cannot be trapped` + signal.Notify(c, syscall.SIGSTOP) // want `cannot be trapped` + signal.Reset(syscall.SIGSTOP) // want `cannot be trapped` } diff --git a/staticcheck/testdata/src/CheckWaitgroupAdd/CheckWaitgroupAdd.go b/staticcheck/testdata/src/CheckWaitgroupAdd/CheckWaitgroupAdd.go index c5769fbd8..3688c2c32 100644 --- a/staticcheck/testdata/src/CheckWaitgroupAdd/CheckWaitgroupAdd.go +++ b/staticcheck/testdata/src/CheckWaitgroupAdd/CheckWaitgroupAdd.go @@ -12,7 +12,7 @@ func fn() { }() go func() { - wg.Add(1) // MATCH "should call wg.Add(1) before starting" + wg.Add(1) // want `should call wg\.Add\(1\) before starting` wg.Done() }() diff --git a/staticcheck/testdata/src/CheckWriterBufferModified/CheckWriterBufferModified.go b/staticcheck/testdata/src/CheckWriterBufferModified/CheckWriterBufferModified.go index 6305286a6..dc316708a 100644 --- a/staticcheck/testdata/src/CheckWriterBufferModified/CheckWriterBufferModified.go +++ b/staticcheck/testdata/src/CheckWriterBufferModified/CheckWriterBufferModified.go @@ -6,13 +6,13 @@ type T3 struct{} type T4 struct{} func (T1) Write(b []byte) (int, error) { - b = append(b, '\n') // MATCH /io.Writer.Write must not modify the provided buffer/ + b = append(b, '\n') // want `io\.Writer\.Write must not modify the provided buffer` _ = b return 0, nil } func (T2) Write(b []byte) (int, error) { - b[0] = 0 // MATCH /io.Writer.Write must not modify the provided buffer/ + b[0] = 0 // want `io\.Writer\.Write must not modify the provided buffer` return 0, nil } diff --git a/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go b/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go index 594049525..7e2af8f92 100644 --- a/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go +++ b/staticcheck/testdata/src/checkStdlibUsageNilContext/checkStdlibUsageNilContext.go @@ -6,7 +6,7 @@ func fn1(ctx context.Context) {} func fn2(x string, ctx context.Context) {} func fn3() { - fn1(nil) // MATCH /do not pass a nil Context/ + fn1(nil) // want `do not pass a nil Context` fn1(context.TODO()) fn2("", nil) } diff --git a/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go b/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go index 71990e5eb..86056e577 100644 --- a/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go +++ b/staticcheck/testdata/src/checkStdlibUsageRegexpFindAll/checkStdlibUsageRegexpFindAll.go @@ -4,5 +4,5 @@ import "regexp" func fn() { var r *regexp.Regexp - _ = r.FindAll(nil, 0) //MATCH /calling a FindAll method with n == 0 will return no results/ + _ = r.FindAll(nil, 0) //want `calling a FindAll method with n == 0 will return no results` } diff --git a/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go b/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go index 53c4f28af..8ebd909e5 100644 --- a/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go +++ b/staticcheck/testdata/src/checkStdlibUsageSeeker/checkStdlibUsageSeeker.go @@ -7,6 +7,6 @@ func fn() { var s io.Seeker s.Seek(0, 0) s.Seek(0, io.SeekStart) - s.Seek(io.SeekStart, 0) // MATCH /the first argument of io.Seeker is the offset/ + s.Seek(io.SeekStart, 0) // want `the first argument of io\.Seeker is the offset` s.Seek(SeekStart, 0) } diff --git a/staticcheck/testdata/src/checkStdlibUsageUTF8Cutset/checkStdlibUsageUTF8Cutset.go b/staticcheck/testdata/src/checkStdlibUsageUTF8Cutset/checkStdlibUsageUTF8Cutset.go index d1ee3edd9..7caf5a7c7 100644 --- a/staticcheck/testdata/src/checkStdlibUsageUTF8Cutset/checkStdlibUsageUTF8Cutset.go +++ b/staticcheck/testdata/src/checkStdlibUsageUTF8Cutset/checkStdlibUsageUTF8Cutset.go @@ -3,7 +3,7 @@ package pkg import "strings" func fn() { - println(strings.Trim("\x80test\xff", "\xff")) // MATCH /is not a valid UTF-8 encoded string/ + println(strings.Trim("\x80test\xff", "\xff")) // want `is not a valid UTF-8 encoded string` println(strings.Trim("foo", "bar")) s := "\xff" diff --git a/staticcheck/testdata/src/function-literals/function-literals.go b/staticcheck/testdata/src/function-literals/function-literals.go deleted file mode 100644 index bfd0e1917..000000000 --- a/staticcheck/testdata/src/function-literals/function-literals.go +++ /dev/null @@ -1,23 +0,0 @@ -package pkg - -func fn() int { println(); return 0 } - -var x = func(arg int) { // MATCH "overwritten" - arg = 1 - println(arg) -} - -var y = func() { - v := fn() // MATCH "never used" - v = fn() - println(v) -} - -var z = func() { - for { - if true { - println() - } - break // MATCH "the surrounding loop is unconditionally terminated" - } -} diff --git a/staticcheck/testdata/src/synthetic/synthetic.go b/staticcheck/testdata/src/synthetic/synthetic.go deleted file mode 100644 index 02c9e8053..000000000 --- a/staticcheck/testdata/src/synthetic/synthetic.go +++ /dev/null @@ -1,15 +0,0 @@ -package pkg - -import "regexp" - -// must be a basic type to trigger SA4017 (in case of a test failure) -type T string - -func (T) Fn() {} - -// Don't get confused by methods named init -func (T) init() {} - -// this will become a synthetic init function, that we don't want to -// ignore -var _ = regexp.MustCompile("(") // MATCH /error parsing regexp/ diff --git a/stylecheck/analysis.go b/stylecheck/analysis.go new file mode 100644 index 000000000..5a36ec77c --- /dev/null +++ b/stylecheck/analysis.go @@ -0,0 +1,111 @@ +package stylecheck + +import ( + "flag" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/lint" + "honnef.co/go/tools/lint/lintutil" +) + +func newFlagSet() flag.FlagSet { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.Var(lintutil.NewVersionFlag(), "go", "Target Go version") + return *fs +} + +var Analyzers = map[string]*analysis.Analyzer{ + "ST1000": { + Name: "ST1000", + Run: CheckPackageComment, + Doc: docST1000, + Requires: []*analysis.Analyzer{}, + Flags: newFlagSet(), + }, + "ST1001": { + Name: "ST1001", + Run: CheckDotImports, + Doc: docST1001, + Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, config.Analyzer}, + Flags: newFlagSet(), + }, + "ST1003": { + Name: "ST1003", + Run: CheckNames, + Doc: docST1003, + Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, config.Analyzer}, + Flags: newFlagSet(), + }, + "ST1005": { + Name: "ST1005", + Run: CheckErrorStrings, + Doc: docST1005, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "ST1006": { + Name: "ST1006", + Run: CheckReceiverNames, + Doc: docST1006, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "ST1008": { + Name: "ST1008", + Run: CheckErrorReturn, + Doc: docST1008, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "ST1011": { + Name: "ST1011", + Run: CheckTimeNames, + Doc: docST1011, + Flags: newFlagSet(), + }, + "ST1012": { + Name: "ST1012", + Run: CheckErrorVarNames, + Doc: docST1012, + Requires: []*analysis.Analyzer{config.Analyzer}, + Flags: newFlagSet(), + }, + "ST1013": { + Name: "ST1013", + Run: CheckHTTPStatusCodes, + Doc: docST1013, + Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer, config.Analyzer}, + Flags: newFlagSet(), + }, + "ST1015": { + Name: "ST1015", + Run: CheckDefaultCaseOrder, + Doc: docST1015, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + "ST1016": { + Name: "ST1016", + Run: CheckReceiverNamesIdentical, + Doc: docST1016, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Flags: newFlagSet(), + }, + "ST1017": { + Name: "ST1017", + Run: CheckYodaConditions, + Doc: docST1017, + Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Flags: newFlagSet(), + }, + "ST1018": { + Name: "ST1018", + Run: CheckInvisibleCharacters, + Doc: docST1018, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), + }, +} diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 120d97f03..35ddc15b9 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -11,49 +11,18 @@ import ( "unicode" "unicode/utf8" - "honnef.co/go/tools/lint" + "honnef.co/go/tools/config" + "honnef.co/go/tools/internal/passes/buildssa" . "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) -type Checker struct { - CheckGenerated bool -} - -func NewChecker() *Checker { - return &Checker{} -} - -func (*Checker) Name() string { return "stylecheck" } -func (*Checker) Prefix() string { return "ST" } -func (c *Checker) Init(prog *lint.Program) {} - -func (c *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "ST1000", FilterGenerated: false, Fn: c.CheckPackageComment, Doc: docST1000}, - {ID: "ST1001", FilterGenerated: true, Fn: c.CheckDotImports, Doc: docST1001}, - // {ID: "ST1002", FilterGenerated: true, Fn: c.CheckBlankImports, Doc: docST1002}, - {ID: "ST1003", FilterGenerated: true, Fn: c.CheckNames, Doc: docST1003}, - // {ID: "ST1004", FilterGenerated: false, Fn: nil, , Doc: docST1004}, - {ID: "ST1005", FilterGenerated: false, Fn: c.CheckErrorStrings, Doc: docST1005}, - {ID: "ST1006", FilterGenerated: false, Fn: c.CheckReceiverNames, Doc: docST1006}, - // {ID: "ST1007", FilterGenerated: true, Fn: c.CheckIncDec, Doc: docST1007}, - {ID: "ST1008", FilterGenerated: false, Fn: c.CheckErrorReturn, Doc: docST1008}, - // {ID: "ST1009", FilterGenerated: false, Fn: c.CheckUnexportedReturn, Doc: docST1009}, - // {ID: "ST1010", FilterGenerated: false, Fn: c.CheckContextFirstArg, Doc: docST1010}, - {ID: "ST1011", FilterGenerated: false, Fn: c.CheckTimeNames, Doc: docST1011}, - {ID: "ST1012", FilterGenerated: false, Fn: c.CheckErrorVarNames, Doc: docST1012}, - {ID: "ST1013", FilterGenerated: true, Fn: c.CheckHTTPStatusCodes, Doc: docST1013}, - {ID: "ST1015", FilterGenerated: true, Fn: c.CheckDefaultCaseOrder, Doc: docST1015}, - {ID: "ST1016", FilterGenerated: false, Fn: c.CheckReceiverNamesIdentical, Doc: docST1016}, - {ID: "ST1017", FilterGenerated: true, Fn: c.CheckYodaConditions, Doc: docST1017}, - {ID: "ST1018", FilterGenerated: false, Fn: c.CheckInvisibleCharacters, Doc: docST1018}, - } -} - -func (c *Checker) CheckPackageComment(j *lint.Job) { +func CheckPackageComment(pass *analysis.Pass) (interface{}, error) { // - At least one file in a non-main package should have a package comment // // - The comment should be of the form @@ -62,57 +31,59 @@ func (c *Checker) CheckPackageComment(j *lint.Job) { // which case they get appended. But that doesn't happen a lot in // the real world. - if j.Pkg.Name == "main" { - return + if pass.Pkg.Name() == "main" { + return nil, nil } hasDocs := false - for _, f := range j.Pkg.Syntax { - if IsInTest(j, f) { + for _, f := range pass.Files { + if IsInTest(pass, f) { continue } if f.Doc != nil && len(f.Doc.List) > 0 { hasDocs = true prefix := "Package " + f.Name.Name + " " if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { - j.Errorf(f.Doc, `package comment should be of the form "%s..."`, prefix) + pass.Reportf(f.Doc.Pos(), `package comment should be of the form "%s..."`, prefix) } f.Doc.Text() } } if !hasDocs { - for _, f := range j.Pkg.Syntax { - if IsInTest(j, f) { + for _, f := range pass.Files { + if IsInTest(pass, f) { continue } - j.Errorf(f, "at least one file in a package should have a package comment") + pass.Reportf(f.Pos(), "at least one file in a package should have a package comment") } } + return nil, nil } -func (c *Checker) CheckDotImports(j *lint.Job) { - for _, f := range j.Pkg.Syntax { +func CheckDotImports(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { imports: for _, imp := range f.Imports { path := imp.Path.Value path = path[1 : len(path)-1] - for _, w := range j.Pkg.Config.DotImportWhitelist { + for _, w := range config.For(pass).DotImportWhitelist { if w == path { continue imports } } - if imp.Name != nil && imp.Name.Name == "." && !IsInTest(j, f) { - j.Errorf(imp, "should not use dot imports") + if imp.Name != nil && imp.Name.Name == "." && !IsInTest(pass, f) { + ReportfFG(pass, imp.Pos(), "should not use dot imports") } } } + return nil, nil } -func (c *Checker) CheckBlankImports(j *lint.Job) { - fset := j.Pkg.Fset - for _, f := range j.Pkg.Syntax { - if IsInMain(j, f) || IsInTest(j, f) { +func CheckBlankImports(pass *analysis.Pass) (interface{}, error) { + fset := pass.Fset + for _, f := range pass.Files { + if IsInMain(pass, f) || IsInTest(pass, f) { continue } @@ -161,13 +132,14 @@ func (c *Checker) CheckBlankImports(j *lint.Job) { } if imp.Doc == nil && imp.Comment == nil && !skip[imp] { - j.Errorf(imp, "a blank import should be only in a main or test package, or have a comment justifying it") + pass.Reportf(imp.Pos(), "a blank import should be only in a main or test package, or have a comment justifying it") } } } + return nil, nil } -func (c *Checker) CheckIncDec(j *lint.Job) { +func CheckIncDec(pass *analysis.Pass) (interface{}, error) { // TODO(dh): this can be noisy for function bodies that look like this: // x += 3 // ... @@ -192,14 +164,15 @@ func (c *Checker) CheckIncDec(j *lint.Job) { suffix = "--" } - j.Errorf(assign, "should replace %s with %s%s", Render(j, assign), Render(j, assign.Lhs[0]), suffix) + pass.Reportf(assign.Pos(), "should replace %s with %s%s", Render(pass, assign), Render(pass, assign.Lhs[0]), suffix) } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckErrorReturn(j *lint.Job) { +func CheckErrorReturn(pass *analysis.Pass) (interface{}, error) { fnLoop: - for _, fn := range j.Pkg.InitialFunctions { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { sig := fn.Type().(*types.Signature) rets := sig.Results() if rets == nil || rets.Len() < 2 { @@ -213,21 +186,22 @@ fnLoop: } for i := rets.Len() - 2; i >= 0; i-- { if rets.At(i).Type() == types.Universe.Lookup("error").Type() { - j.Errorf(rets.At(i), "error should be returned as the last argument") + pass.Reportf(rets.At(i).Pos(), "error should be returned as the last argument") continue fnLoop } } } + return nil, nil } // CheckUnexportedReturn checks that exported functions on exported // types do not return unexported types. -func (c *Checker) CheckUnexportedReturn(j *lint.Job) { - for _, fn := range j.Pkg.InitialFunctions { +func CheckUnexportedReturn(pass *analysis.Pass) (interface{}, error) { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if fn.Synthetic != "" || fn.Parent() != nil { continue } - if !ast.IsExported(fn.Name()) || IsInMain(j, fn) || IsInTest(j, fn) { + if !ast.IsExported(fn.Name()) || IsInMain(pass, fn) || IsInTest(pass, fn) { continue } sig := fn.Type().(*types.Signature) @@ -239,14 +213,16 @@ func (c *Checker) CheckUnexportedReturn(j *lint.Job) { if named, ok := DereferenceR(res.At(i).Type()).(*types.Named); ok && !ast.IsExported(named.Obj().Name()) && named != types.Universe.Lookup("error").Type() { - j.Errorf(fn, "should not return unexported type") + pass.Reportf(fn.Pos(), "should not return unexported type") } } } + return nil, nil } -func (c *Checker) CheckReceiverNames(j *lint.Job) { - for _, m := range j.Pkg.SSA.Members { +func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) { + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg + for _, m := range ssapkg.Members { if T, ok := m.Object().(*types.TypeName); ok && !T.IsAlias() { ms := typeutil.IntuitiveMethodSet(T.Type(), nil) for _, sel := range ms { @@ -257,18 +233,20 @@ func (c *Checker) CheckReceiverNames(j *lint.Job) { continue } if recv.Name() == "self" || recv.Name() == "this" { - j.Errorf(recv, `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) + pass.Reportf(recv.Pos(), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) } if recv.Name() == "_" { - j.Errorf(recv, "receiver name should not be an underscore, omit the name if it is unused") + pass.Reportf(recv.Pos(), "receiver name should not be an underscore, omit the name if it is unused") } } } } + return nil, nil } -func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) { - for _, m := range j.Pkg.SSA.Members { +func CheckReceiverNamesIdentical(pass *analysis.Pass) (interface{}, error) { + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg + for _, m := range ssapkg.Members { names := map[string]int{} var firstFn *types.Func @@ -296,16 +274,17 @@ func (c *Checker) CheckReceiverNamesIdentical(j *lint.Job) { seen = append(seen, fmt.Sprintf("%dx %q", count, name)) } - j.Errorf(firstFn, "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) + pass.Reportf(firstFn.Pos(), "methods on the same type should have the same receiver name (seen %s)", strings.Join(seen, ", ")) } } + return nil, nil } -func (c *Checker) CheckContextFirstArg(j *lint.Job) { +func CheckContextFirstArg(pass *analysis.Pass) (interface{}, error) { // TODO(dh): this check doesn't apply to test helpers. Example from the stdlib: // func helperCommandContext(t *testing.T, ctx context.Context, s ...string) (cmd *exec.Cmd) { fnLoop: - for _, fn := range j.Pkg.InitialFunctions { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if fn.Synthetic != "" || fn.Parent() != nil { continue } @@ -319,28 +298,29 @@ fnLoop: for i := 1; i < params.Len(); i++ { param := params.At(i) if types.TypeString(param.Type(), nil) == "context.Context" { - j.Errorf(param, "context.Context should be the first argument of a function") + pass.Reportf(param.Pos(), "context.Context should be the first argument of a function") continue fnLoop } } } + return nil, nil } -func (c *Checker) CheckErrorStrings(j *lint.Job) { +func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) { objNames := map[*ssa.Package]map[string]bool{} - ssapkg := j.Pkg.SSA + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg objNames[ssapkg] = map[string]bool{} for _, m := range ssapkg.Members { if typ, ok := m.(*ssa.Type); ok { objNames[ssapkg][typ.Name()] = true } } - for _, fn := range j.Pkg.InitialFunctions { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { objNames[fn.Package()][fn.Name()] = true } - for _, fn := range j.Pkg.InitialFunctions { - if IsInTest(j, fn) { + for _, fn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + if IsInTest(pass, fn) { // We don't care about malformed error messages in tests; // they're usually for direct human consumption, not part // of an API @@ -368,7 +348,7 @@ func (c *Checker) CheckErrorStrings(j *lint.Job) { } switch s[len(s)-1] { case '.', ':', '!', '\n': - j.Errorf(call, "error strings should not end with punctuation or a newline") + pass.Reportf(call.Pos(), "error strings should not end with punctuation or a newline") } idx := strings.IndexByte(s, ' ') if idx == -1 { @@ -402,13 +382,14 @@ func (c *Checker) CheckErrorStrings(j *lint.Job) { // // It could still be a proper noun, though. - j.Errorf(call, "error strings should not be capitalized") + pass.Reportf(call.Pos(), "error strings should not be capitalized") } } } + return nil, nil } -func (c *Checker) CheckTimeNames(j *lint.Job) { +func CheckTimeNames(pass *analysis.Pass) (interface{}, error) { suffixes := []string{ "Sec", "Secs", "Seconds", "Msec", "Msecs", @@ -423,31 +404,32 @@ func (c *Checker) CheckTimeNames(j *lint.Job) { for _, name := range names { for _, suffix := range suffixes { if strings.HasSuffix(name.Name, suffix) { - j.Errorf(name, "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix) + pass.Reportf(name.Pos(), "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix) break } } } } - for _, f := range j.Pkg.Syntax { + for _, f := range pass.Files { ast.Inspect(f, func(node ast.Node) bool { switch node := node.(type) { case *ast.ValueSpec: - T := j.Pkg.TypesInfo.TypeOf(node.Type) + T := pass.TypesInfo.TypeOf(node.Type) fn(T, node.Names) case *ast.FieldList: for _, field := range node.List { - T := j.Pkg.TypesInfo.TypeOf(field.Type) + T := pass.TypesInfo.TypeOf(field.Type) fn(T, field.Names) } } return true }) } + return nil, nil } -func (c *Checker) CheckErrorVarNames(j *lint.Job) { - for _, f := range j.Pkg.Syntax { +func CheckErrorVarNames(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { for _, decl := range f.Decls { gen, ok := decl.(*ast.GenDecl) if !ok || gen.Tok != token.VAR { @@ -461,7 +443,7 @@ func (c *Checker) CheckErrorVarNames(j *lint.Job) { for i, name := range spec.Names { val := spec.Values[i] - if !IsCallToAST(j, val, "errors.New") && !IsCallToAST(j, val, "fmt.Errorf") { + if !IsCallToAST(pass, val, "errors.New") && !IsCallToAST(pass, val, "fmt.Errorf") { continue } @@ -470,12 +452,13 @@ func (c *Checker) CheckErrorVarNames(j *lint.Job) { prefix = "Err" } if !strings.HasPrefix(name.Name, prefix) { - j.Errorf(name, "error var %s should have name of the form %sFoo", name.Name, prefix) + pass.Reportf(name.Pos(), "error var %s should have name of the form %sFoo", name.Name, prefix) } } } } } + return nil, nil } var httpStatusCodes = map[int]string{ @@ -540,19 +523,22 @@ var httpStatusCodes = map[int]string{ 511: "StatusNetworkAuthenticationRequired", } -func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) { +func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) { whitelist := map[string]bool{} - for _, code := range j.Pkg.Config.HTTPStatusCodeWhitelist { + for _, code := range config.For(pass).HTTPStatusCodeWhitelist { whitelist[code] = true } fn := func(node ast.Node) bool { + if node == nil { + return true + } call, ok := node.(*ast.CallExpr) if !ok { return true } var arg int - switch CallNameAST(j, call) { + switch CallNameAST(pass, call) { case "net/http.Error": arg = 2 case "net/http.Redirect": @@ -580,29 +566,32 @@ func (c *Checker) CheckHTTPStatusCodes(j *lint.Job) { if !ok { return true } - j.Errorf(lit, "should use constant http.%s instead of numeric literal %d", s, n) + ReportfFG(pass, lit.Pos(), "should use constant http.%s instead of numeric literal %d", s, n) return true } - for _, f := range j.Pkg.Syntax { + // OPT(dh): replace with inspector + for _, f := range pass.Files { ast.Inspect(f, fn) } + return nil, nil } -func (c *Checker) CheckDefaultCaseOrder(j *lint.Job) { +func CheckDefaultCaseOrder(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { stmt := node.(*ast.SwitchStmt) list := stmt.Body.List for i, c := range list { if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 { - j.Errorf(c, "default case should be first or last in switch statement") + ReportfFG(pass, c.Pos(), "default case should be first or last in switch statement") break } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SwitchStmt)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckYodaConditions(j *lint.Job) { +func CheckYodaConditions(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { cond := node.(*ast.BinaryExpr) if cond.Op != token.EQL && cond.Op != token.NEQ { @@ -615,12 +604,13 @@ func (c *Checker) CheckYodaConditions(j *lint.Job) { // Don't flag lit == lit conditions, just in case return } - j.Errorf(cond, "don't use Yoda conditions") + ReportfFG(pass, cond.Pos(), "don't use Yoda conditions") } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) + return nil, nil } -func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { +func CheckInvisibleCharacters(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { lit := node.(*ast.BasicLit) if lit.Kind != token.STRING { @@ -628,11 +618,12 @@ func (c *Checker) CheckInvisibleCharacters(j *lint.Job) { } for _, r := range lit.Value { if unicode.Is(unicode.Cf, r) { - j.Errorf(lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r) + pass.Reportf(lit.Pos(), "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r) } else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' { - j.Errorf(lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) + pass.Reportf(lit.Pos(), "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) } } } - j.Pkg.Inspector.Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BasicLit)(nil)}, fn) + return nil, nil } diff --git a/stylecheck/lint_test.go b/stylecheck/lint_test.go index 548fadd14..7b3dd3cc6 100644 --- a/stylecheck/lint_test.go +++ b/stylecheck/lint_test.go @@ -3,10 +3,44 @@ package stylecheck import ( "testing" - "honnef.co/go/tools/lint/testutil" + "golang.org/x/tools/go/analysis/analysistest" ) func TestAll(t *testing.T) { - c := NewChecker() - testutil.TestAll(t, c, "") + checks := map[string][]struct { + dir string + version string + }{ + "ST1000": { + {dir: "CheckPackageComment-1"}, + {dir: "CheckPackageComment-2"}, + }, + "ST1001": {{dir: "CheckDotImports"}}, + "ST1003": { + {dir: "CheckNames"}, + {dir: "CheckNames_generated"}, + }, + "ST1005": {{dir: "CheckErrorStrings"}}, + "ST1006": {{dir: "CheckReceiverNames"}}, + "ST1008": {{dir: "CheckErrorReturn"}}, + "ST1011": {{dir: "CheckTimeNames"}}, + "ST1012": {{dir: "CheckErrorVarNames"}}, + "ST1013": {{dir: "CheckHTTPStatusCodes"}}, + "ST1015": {{dir: "CheckDefaultCaseOrder"}}, + "ST1016": {{dir: "CheckReceiverNamesIdentical"}}, + "ST1017": {{dir: "CheckYodaConditions"}}, + "ST1018": {{dir: "CheckInvisibleCharacters"}}, + } + + for check, dirs := range checks { + a := Analyzers[check] + for _, dir := range dirs { + if dir.version != "" { + if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { + t.Fatal(err) + } + } + analysistest.Run(t, analysistest.TestData(), a, dir.dir) + } + } } diff --git a/stylecheck/names.go b/stylecheck/names.go index 1c0718fdd..160f9d7ff 100644 --- a/stylecheck/names.go +++ b/stylecheck/names.go @@ -9,7 +9,8 @@ import ( "strings" "unicode" - "honnef.co/go/tools/lint" + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/config" . "honnef.co/go/tools/lint/lintdsl" ) @@ -21,7 +22,7 @@ var knownNameExceptions = map[string]bool{ "kWh": true, } -func (c *Checker) CheckNames(j *lint.Job) { +func CheckNames(pass *analysis.Pass) (interface{}, error) { // A large part of this function is copied from // github.com/golang/lint, Copyright (c) 2013 The Go Authors, // licensed under the BSD 3-clause license. @@ -45,7 +46,7 @@ func (c *Checker) CheckNames(j *lint.Job) { // Handle two common styles from other languages that don't belong in Go. if len(id.Name) >= 5 && allCaps(id.Name) && strings.Contains(id.Name, "_") { - j.Errorf(id, "should not use ALL_CAPS in Go names; use CamelCase instead") + ReportfFG(pass, id.Pos(), "should not use ALL_CAPS in Go names; use CamelCase instead") return } @@ -55,10 +56,10 @@ func (c *Checker) CheckNames(j *lint.Job) { } if len(id.Name) > 2 && strings.Contains(id.Name[1:len(id.Name)-1], "_") { - j.Errorf(id, "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should) + ReportfFG(pass, id.Pos(), "should not use underscores in Go names; %s %s should be %s", thing, id.Name, should) return } - j.Errorf(id, "%s %s should be %s", thing, id.Name, should) + ReportfFG(pass, id.Pos(), "%s %s should be %s", thing, id.Name, should) } checkList := func(fl *ast.FieldList, thing string, initialisms map[string]bool) { if fl == nil { @@ -71,17 +72,18 @@ func (c *Checker) CheckNames(j *lint.Job) { } } - initialisms := make(map[string]bool, len(j.Pkg.Config.Initialisms)) - for _, word := range j.Pkg.Config.Initialisms { + il := config.For(pass).Initialisms + initialisms := make(map[string]bool, len(il)) + for _, word := range il { initialisms[word] = true } - for _, f := range j.Pkg.Syntax { + for _, f := range pass.Files { // Package names need slightly different handling than other names. if !strings.HasSuffix(f.Name.Name, "_test") && strings.Contains(f.Name.Name, "_") { - j.Errorf(f, "should not use underscores in package names") + ReportfFG(pass, f.Pos(), "should not use underscores in package names") } if strings.IndexFunc(f.Name.Name, unicode.IsUpper) != -1 { - j.Errorf(f, "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) + ReportfFG(pass, f.Pos(), "should not use MixedCaps in package name; %s should be %s", f.Name.Name, strings.ToLower(f.Name.Name)) } ast.Inspect(f, func(node ast.Node) bool { @@ -104,7 +106,7 @@ func (c *Checker) CheckNames(j *lint.Job) { return true } - if IsInTest(j, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + if IsInTest(pass, v) && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { return true } @@ -173,6 +175,7 @@ func (c *Checker) CheckNames(j *lint.Job) { return true }) } + return nil, nil } // lintName returns a different name if it should be different. diff --git a/stylecheck/testdata/src/CheckContextFirstArg.disabled/CheckContextFirstArg.go b/stylecheck/testdata/src/CheckContextFirstArg.disabled/CheckContextFirstArg.go index 975b0684b..a96ab60e2 100644 --- a/stylecheck/testdata/src/CheckContextFirstArg.disabled/CheckContextFirstArg.go +++ b/stylecheck/testdata/src/CheckContextFirstArg.disabled/CheckContextFirstArg.go @@ -8,5 +8,5 @@ type T int func fn1(int) {} func fn2(context.Context, int) {} func fn3(context.Context, int, context.Context) {} -func fn4(int, context.Context) {} // MATCH "context.Context should be the first argument of a function" -func (T) FN(int, context.Context) {} // MATCH "context.Context should be the first argument of a function" +func fn4(int, context.Context) {} // want `context\.Context should be the first argument of a function` +func (T) FN(int, context.Context) {} // want `context\.Context should be the first argument of a function` diff --git a/stylecheck/testdata/src/CheckDefaultCaseOrder/CheckDefaultCaseOrder.go b/stylecheck/testdata/src/CheckDefaultCaseOrder/CheckDefaultCaseOrder.go index 88294e3e5..28c32eb18 100644 --- a/stylecheck/testdata/src/CheckDefaultCaseOrder/CheckDefaultCaseOrder.go +++ b/stylecheck/testdata/src/CheckDefaultCaseOrder/CheckDefaultCaseOrder.go @@ -30,7 +30,7 @@ func fn(x int) { switch x { case 1: - default: // MATCH "default case should be first or last in switch statement" + default: // want `default case should be first or last in switch statement` case 2: } } diff --git a/stylecheck/testdata/src/CheckDotImports/CheckDotImports.go b/stylecheck/testdata/src/CheckDotImports/CheckDotImports.go index d353b6608..246afe6d5 100644 --- a/stylecheck/testdata/src/CheckDotImports/CheckDotImports.go +++ b/stylecheck/testdata/src/CheckDotImports/CheckDotImports.go @@ -1,6 +1,6 @@ // Package pkg ... package pkg -import . "fmt" // MATCH "should not use dot imports" +import . "fmt" // want `should not use dot imports` var _ = Println diff --git a/stylecheck/testdata/src/CheckErrorReturn/CheckErrorReturn.go b/stylecheck/testdata/src/CheckErrorReturn/CheckErrorReturn.go index b353bf63b..feed85e58 100644 --- a/stylecheck/testdata/src/CheckErrorReturn/CheckErrorReturn.go +++ b/stylecheck/testdata/src/CheckErrorReturn/CheckErrorReturn.go @@ -1,8 +1,8 @@ // Package pkg ... package pkg -func fn1() (error, int) { return nil, 0 } // MATCH "error should be returned as the last argument" -func fn2() (a, b error, c int) { return nil, nil, 0 } // MATCH "error should be returned as the last argument" +func fn1() (error, int) { return nil, 0 } // want `error should be returned as the last argument` +func fn2() (a, b error, c int) { return nil, nil, 0 } // want `error should be returned as the last argument` func fn3() (a int, b, c error) { return 0, nil, nil } func fn4() (error, error) { return nil, nil } func fn5() int { return 0 } diff --git a/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go b/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go index 0de45215b..2659fa126 100644 --- a/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go +++ b/stylecheck/testdata/src/CheckErrorStrings/CheckErrorStrings.go @@ -5,11 +5,11 @@ import "errors" func fn() { errors.New("a perfectly fine error") - errors.New("Not a great error") // MATCH "error strings should not be capitalized" - errors.New("also not a great error.") // MATCH "error strings should not end with punctuation or a newline" + errors.New("Not a great error") // want `error strings should not be capitalized` + errors.New("also not a great error.") // want `error strings should not end with punctuation or a newline` errors.New("URL is okay") errors.New("SomeFunc is okay") - errors.New("URL is okay, but the period is not.") // MATCH "error strings should not end with punctuation or a newline" + errors.New("URL is okay, but the period is not.") // want `error strings should not end with punctuation or a newline` errors.New("T must not be nil") } diff --git a/stylecheck/testdata/src/CheckErrorVarNames/CheckErrorVarNames.go b/stylecheck/testdata/src/CheckErrorVarNames/CheckErrorVarNames.go index fcae261ae..5652ef9a5 100644 --- a/stylecheck/testdata/src/CheckErrorVarNames/CheckErrorVarNames.go +++ b/stylecheck/testdata/src/CheckErrorVarNames/CheckErrorVarNames.go @@ -7,19 +7,16 @@ import ( ) var ( - foo = errors.New("") // MATCH "error var foo should have name of the form errFoo" + foo = errors.New("") // want `error var foo should have name of the form errFoo` errBar = errors.New("") - qux, fisk, errAnother = errors.New(""), errors.New(""), errors.New("") - abc = fmt.Errorf("") // MATCH "error var abc should have name of the form errFoo" + qux, fisk, errAnother = errors.New(""), errors.New(""), errors.New("") // want `error var qux should have name of the form errFoo` `error var fisk should have name of the form errFoo` + abc = fmt.Errorf("") // want `error var abc should have name of the form errFoo` errAbc = fmt.Errorf("") ) -var wrong = errors.New("") // MATCH "error var wrong should have name of the form errFoo" +var wrong = errors.New("") // want `error var wrong should have name of the form errFoo` var result = fn() func fn() error { return nil } - -// MATCH:12 "error var qux should have name of the form errFoo" -// MATCH:12 "error var fisk should have name of the form errFoo" diff --git a/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go b/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go index 17eac85ca..602be22bb 100644 --- a/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go +++ b/stylecheck/testdata/src/CheckHTTPStatusCodes/CheckHTTPStatusCodes.go @@ -5,10 +5,10 @@ import "net/http" func fn() { // Check all the supported functions - http.Error(nil, "", 506) // MATCH "http.StatusVariantAlsoNegotiates" - http.Redirect(nil, nil, "", 506) // MATCH "http.StatusVariantAlsoNegotiates" - http.StatusText(506) // MATCH "http.StatusVariantAlsoNegotiates" - http.RedirectHandler("", 506) // MATCH "http.StatusVariantAlsoNegotiates" + http.Error(nil, "", 506) // want `http\.StatusVariantAlsoNegotiates` + http.Redirect(nil, nil, "", 506) // want `http\.StatusVariantAlsoNegotiates` + http.StatusText(506) // want `http\.StatusVariantAlsoNegotiates` + http.RedirectHandler("", 506) // want `http\.StatusVariantAlsoNegotiates` // Don't flag literals with no known constant http.StatusText(600) diff --git a/stylecheck/testdata/src/CheckIncDec.disabled/CheckIncDec.go b/stylecheck/testdata/src/CheckIncDec.disabled/CheckIncDec.go index 7755a9944..c8aa51d10 100644 --- a/stylecheck/testdata/src/CheckIncDec.disabled/CheckIncDec.go +++ b/stylecheck/testdata/src/CheckIncDec.disabled/CheckIncDec.go @@ -5,8 +5,8 @@ func fn() { var x int x-- x++ - x += 1 // MATCH "should replace x += 1 with x++" - x -= 1 // MATCH "should replace x -= 1 with x--" + x += 1 // want `should replace x \+= 1 with x\+\+` + x -= 1 // want `should replace x -= 1 with x--` x /= 1 x += 2 x -= 2 diff --git a/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go b/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go index 152505f69..bc6a7446c 100644 --- a/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go +++ b/stylecheck/testdata/src/CheckInvisibleCharacters/CheckInvisibleCharacters.go @@ -2,14 +2,11 @@ package pkg var ( - a = "" // MATCH "Unicode control character U+0007" - b = "" + a = "" // want `Unicode control character U\+0007` + b = "" // want `Unicode control character U\+0007` `Unicode control character U\+001A` c = "Test test" d = `T est` - e = `Zero​Width` // MATCH "Unicode format character U+200B" + e = `Zero​Width` // want `Unicode format character U\+200B` f = "\u200b" ) - -// MATCH:6 "Unicode control character U+0007" -// MATCH:6 "Unicode control character U+001A" diff --git a/stylecheck/testdata/src/CheckNames/CheckNames.go b/stylecheck/testdata/src/CheckNames/CheckNames.go index 9f06d9361..a61b5d4a0 100644 --- a/stylecheck/testdata/src/CheckNames/CheckNames.go +++ b/stylecheck/testdata/src/CheckNames/CheckNames.go @@ -1,29 +1,29 @@ // Package pkg_foo ... -package pkg_foo // MATCH "should not use underscores in package names" +package pkg_foo // want `should not use underscores in package names` var range_ int var _abcdef int var abcdef_ int -var abc_def int // MATCH "should not use underscores in Go names; var abc_def should be abcDef" -var abc_def_ int // MATCH "should not use underscores in Go names; var abc_def_ should be abcDef_" +var abc_def int // want `should not use underscores in Go names; var abc_def should be abcDef` +var abc_def_ int // want `should not use underscores in Go names; var abc_def_ should be abcDef_` -func fn_1() {} // MATCH "func fn_1 should be fn1" +func fn_1() {} // want `func fn_1 should be fn1` func fn2() {} -func fn_Id() {} // MATCH "func fn_Id should be fnID" -func fnId() {} // MATCH "func fnId should be fnID" +func fn_Id() {} // want `func fn_Id should be fnID` +func fnId() {} // want `func fnId should be fnID` -var FOO_BAR int // MATCH "should not use ALL_CAPS in Go names; use CamelCase instead" -var Foo_BAR int // MATCH "var Foo_BAR should be FooBAR" -var foo_bar int // MATCH "foo_bar should be fooBar" +var FOO_BAR int // want `should not use ALL_CAPS in Go names; use CamelCase instead` +var Foo_BAR int // want `var Foo_BAR should be FooBAR` +var foo_bar int // want `foo_bar should be fooBar` var kFoobar int // not a check we inherited from golint. more false positives than true ones. func fn(x []int) { var ( - a_b = 1 // MATCH "var a_b should be aB" - c_d int // MATCH "var c_d should be cD" + a_b = 1 // want `var a_b should be aB` + c_d int // want `var c_d should be cD` ) a_b += 2 - for e_f := range x { // MATCH "range var e_f should be eF" + for e_f := range x { // want `range var e_f should be eF` _ = e_f } @@ -35,16 +35,16 @@ func fn(x []int) { func fn_3() {} //export not actually the export keyword -func fn_4() {} // MATCH "func fn_4 should be fn4" +func fn_4() {} // want `func fn_4 should be fn4` //export -func fn_5() {} // MATCH "func fn_5 should be fn5" +func fn_5() {} // want `func fn_5 should be fn5` // export fn_6 -func fn_6() {} // MATCH "func fn_6 should be fn6" +func fn_6() {} // want `func fn_6 should be fn6` //export fn_8 -func fn_7() {} // MATCH "func fn_7 should be fn7" +func fn_7() {} // want `func fn_7 should be fn7` //go:linkname fn_8 time.Now func fn_8() {} diff --git a/stylecheck/testdata/src/CheckPackageComment-1/CheckPackageComment-1.go b/stylecheck/testdata/src/CheckPackageComment-1/CheckPackageComment-1.go index 52f77f4ec..2862e374d 100644 --- a/stylecheck/testdata/src/CheckPackageComment-1/CheckPackageComment-1.go +++ b/stylecheck/testdata/src/CheckPackageComment-1/CheckPackageComment-1.go @@ -1 +1 @@ -package pkg // MATCH "at least one file in a package should have a package comment" +package pkg // want `at least one file in a package should have a package comment` diff --git a/stylecheck/testdata/src/CheckPackageComment-2/CheckPackageComment-2.go b/stylecheck/testdata/src/CheckPackageComment-2/CheckPackageComment-2.go index 1ee009306..21499368e 100644 --- a/stylecheck/testdata/src/CheckPackageComment-2/CheckPackageComment-2.go +++ b/stylecheck/testdata/src/CheckPackageComment-2/CheckPackageComment-2.go @@ -1,4 +1,2 @@ -// This package is great +// This package is great // want `package comment should be of the form` package pkg - -// MATCH:1 "package comment should be of the form" diff --git a/stylecheck/testdata/src/CheckReceiverNames/CheckReceiverNames.go b/stylecheck/testdata/src/CheckReceiverNames/CheckReceiverNames.go index e6bb2c48d..ead410cec 100644 --- a/stylecheck/testdata/src/CheckReceiverNames/CheckReceiverNames.go +++ b/stylecheck/testdata/src/CheckReceiverNames/CheckReceiverNames.go @@ -3,9 +3,9 @@ package pkg type T1 int -func (x T1) Fn1() {} // MATCH "methods on the same type should have the same receiver name" +func (x T1) Fn1() {} func (y T1) Fn2() {} func (x T1) Fn3() {} func (T1) Fn4() {} -func (_ T1) Fn5() {} // MATCH "receiver name should not be an underscore, omit the name if it is unused" -func (self T1) Fn6() {} // MATCH "receiver name should be a reflection of its identity" +func (_ T1) Fn5() {} // want `receiver name should not be an underscore, omit the name if it is unused` +func (self T1) Fn6() {} // want `receiver name should be a reflection of its identity` diff --git a/stylecheck/testdata/src/CheckReceiverNamesIdentical/CheckReceiverNames.go b/stylecheck/testdata/src/CheckReceiverNamesIdentical/CheckReceiverNames.go new file mode 100644 index 000000000..7447a8107 --- /dev/null +++ b/stylecheck/testdata/src/CheckReceiverNamesIdentical/CheckReceiverNames.go @@ -0,0 +1,11 @@ +// Package pkg ... +package pkg + +type T1 int + +func (x T1) Fn1() {} // want `methods on the same type should have the same receiver name` +func (y T1) Fn2() {} +func (x T1) Fn3() {} +func (T1) Fn4() {} +func (_ T1) Fn5() {} +func (self T1) Fn6() {} diff --git a/stylecheck/testdata/src/CheckTimeNames/CheckTimeNames.go b/stylecheck/testdata/src/CheckTimeNames/CheckTimeNames.go index 9049b4b3c..be68e4574 100644 --- a/stylecheck/testdata/src/CheckTimeNames/CheckTimeNames.go +++ b/stylecheck/testdata/src/CheckTimeNames/CheckTimeNames.go @@ -6,12 +6,12 @@ import "time" type T1 struct { aMS int B time.Duration - BMillis time.Duration // MATCH "don't use unit-specific suffix" + BMillis time.Duration // want `don't use unit-specific suffix` } -func fn1(a, b, cMS time.Duration) { // MATCH "don't use unit-specific suffix" +func fn1(a, b, cMS time.Duration) { // want `don't use unit-specific suffix` var x time.Duration - var xMS time.Duration // MATCH "don't use unit-specific suffix" - var y, yMS time.Duration // MATCH "don't use unit-specific suffix" + var xMS time.Duration // want `don't use unit-specific suffix` + var y, yMS time.Duration // want `don't use unit-specific suffix` _, _, _, _ = x, xMS, y, yMS } diff --git a/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go b/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go index 273794656..94e3c6c8b 100644 --- a/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go +++ b/stylecheck/testdata/src/CheckYodaConditions/CheckYodaConditions.go @@ -2,15 +2,15 @@ package pkg func fn(x string, y int) { - if "" == x { // MATCH "Yoda" + if "" == x { // want `Yoda` } - if 0 == y { // MATCH "Yoda" + if 0 == y { // want `Yoda` } if 0 > y { } if "" == "" { } - if "" == "" || 0 == y { // MATCH "Yoda" + if "" == "" || 0 == y { // want `Yoda` } } diff --git a/stylecheck/testdata/src/_CheckBlankImports.disabled/CheckBlankImports-2.go b/stylecheck/testdata/src/_CheckBlankImports.disabled/CheckBlankImports-2.go index 87224cd33..87f93861c 100644 --- a/stylecheck/testdata/src/_CheckBlankImports.disabled/CheckBlankImports-2.go +++ b/stylecheck/testdata/src/_CheckBlankImports.disabled/CheckBlankImports-2.go @@ -1,15 +1,15 @@ // Package pkg ... package pkg -import _ "fmt" +import _ "fmt" // want `blank import` -import _ "fmt" +import _ "fmt" // want `blank import` import _ "fmt" import _ "fmt" -import _ "fmt" +import _ "fmt" // want `blank import` import "strings" -import _ "fmt" +import _ "fmt" // want `blank import` // This is fine import _ "fmt" @@ -22,17 +22,17 @@ import _ "fmt" // This is fine import _ "fmt" import "bytes" -import _ "fmt" +import _ "fmt" // want `blank import` import _ "fmt" // This is fine // This is not fine import ( - _ "fmt" + _ "fmt" // want `blank import` ) import ( - _ "fmt" + _ "fmt" // want `blank import` "strconv" // This is fine _ "fmt" @@ -41,11 +41,3 @@ import ( var _ = strings.NewReader var _ = bytes.NewBuffer var _ = strconv.IntSize - -// MATCH:4 "blank import" -// MATCH:6 "blank import" -// MATCH:10 "blank import" -// MATCH:12 "blank import" -// MATCH:25 "blank import" -// MATCH:31 "blank import" -// MATCH:35 "blank import" diff --git a/stylecheck/testdata/src/_CheckUnexportedReturn.disabled/CheckUnexportedReturn.go b/stylecheck/testdata/src/_CheckUnexportedReturn.disabled/CheckUnexportedReturn.go index 802015fde..0a49b888f 100644 --- a/stylecheck/testdata/src/_CheckUnexportedReturn.disabled/CheckUnexportedReturn.go +++ b/stylecheck/testdata/src/_CheckUnexportedReturn.disabled/CheckUnexportedReturn.go @@ -10,12 +10,12 @@ func fn1() string { return "" } func Fn2() error { return nil } func fn3() error { return nil } func fn5() t1 { return 0 } -func Fn6() t1 { return 0 } // MATCH "should not return unexported type" -func Fn7() *t1 { return nil } // MATCH "should not return unexported type" +func Fn6() t1 { return 0 } // want `should not return unexported type` +func Fn7() *t1 { return nil } // want `should not return unexported type` func Fn8() T2 { return 0 } func (Recv) fn9() t1 { return 0 } -func (Recv) Fn10() t1 { return 0 } // MATCH "should not return unexported type" +func (Recv) Fn10() t1 { return 0 } // want `should not return unexported type` func (Recv) Fn11() T2 { return 0 } func (recv) fn9() t1 { return 0 } diff --git a/unused/testdata/src/alias/alias.go b/unused/testdata/src/alias/alias.go index f181c173a..911501e59 100644 --- a/unused/testdata/src/alias/alias.go +++ b/unused/testdata/src/alias/alias.go @@ -1,11 +1,11 @@ package main type t1 struct{} -type t2 struct{} // MATCH "t2 is unused" +type t2 struct{} // want `t2` type t3 struct{} type alias1 = t1 -type alias2 = t2 // MATCH "alias2 is unused" +type alias2 = t2 // want `alias2` type alias3 = t3 type alias4 = int diff --git a/unused/testdata/src/blank/blank.go b/unused/testdata/src/blank/blank.go index b43ab1078..ee707b626 100644 --- a/unused/testdata/src/blank/blank.go +++ b/unused/testdata/src/blank/blank.go @@ -2,7 +2,7 @@ package pkg import _ "fmt" -type t1 struct{} // MATCH /t1 is unused/ +type t1 struct{} // want `t1` type t2 struct { _ int } @@ -12,7 +12,7 @@ type t5 struct{} var _ = t2{} -func fn1() { // MATCH /fn1 is unused/ +func fn1() { // want `fn1` _ = t1{} var _ = t1{} } diff --git a/unused/testdata/src/cgo/cgo.go b/unused/testdata/src/cgo/cgo.go index 6b484f820..4b852d173 100644 --- a/unused/testdata/src/cgo/cgo.go +++ b/unused/testdata/src/cgo/cgo.go @@ -3,4 +3,4 @@ package pkg //go:cgo_export_dynamic func foo() {} -func bar() {} // MATCH /bar is unused/ +func bar() {} // want `bar` diff --git a/unused/testdata/src/consts/consts.go b/unused/testdata/src/consts/consts.go index 8c1076cbd..1cab7ddde 100644 --- a/unused/testdata/src/consts/consts.go +++ b/unused/testdata/src/consts/consts.go @@ -12,9 +12,9 @@ const ( c7 c8 - c9 // MATCH "c9 is unused" - c10 // MATCH "c10 is unused" - c11 // MATCH "c11 is unused" + c9 // want `c9` + c10 // want `c10` + c11 // want `c11` ) var _ = []int{c3: 1} @@ -31,5 +31,5 @@ func init() { } func Fn() { - const X = 1 // MATCH "X is unused" + const X = 1 // want `X` } diff --git a/unused/testdata/src/conversion/conversion.go b/unused/testdata/src/conversion/conversion.go index afeb1f7a1..0821c67da 100644 --- a/unused/testdata/src/conversion/conversion.go +++ b/unused/testdata/src/conversion/conversion.go @@ -17,12 +17,12 @@ type t2 struct { type t3 struct { a int - b int // MATCH /b is unused/ + b int // want `b` } type t4 struct { a int - b int // MATCH /b is unused/ + b int // want `b` } type t5 struct { diff --git a/unused/testdata/src/cyclic/cyclic.go b/unused/testdata/src/cyclic/cyclic.go index 8601c24f2..b9dfc952d 100644 --- a/unused/testdata/src/cyclic/cyclic.go +++ b/unused/testdata/src/cyclic/cyclic.go @@ -1,9 +1,9 @@ package pkg -func a() { // MATCH /a is unused/ +func a() { // want `a` b() } -func b() { // MATCH /b is unused/ +func b() { // want `b` a() } diff --git a/unused/testdata/src/embedding/embedding.go b/unused/testdata/src/embedding/embedding.go index b45b3fc72..03fb8dd2f 100644 --- a/unused/testdata/src/embedding/embedding.go +++ b/unused/testdata/src/embedding/embedding.go @@ -27,8 +27,8 @@ type I2 interface { type t3 struct{} type t4 struct { - x int // MATCH /x is unused/ - y int // MATCH /y is unused/ + x int // want `x` + y int // want `y` t3 } diff --git a/unused/testdata/src/fields/fields.go b/unused/testdata/src/fields/fields.go index feb5ea022..401acf4f1 100644 --- a/unused/testdata/src/fields/fields.go +++ b/unused/testdata/src/fields/fields.go @@ -23,9 +23,8 @@ type a1 [1]t14 type t15 struct{ f151 int } type a2 [1]t15 type t16 struct{ f161 int } -type t17 struct{ f171, f172 int } // MATCH /t17 is unused/ -// MATCH:28 /f183 is unused/ -type t18 struct{ f181, f182, f183 int } // MATCH /f182 is unused/ +type t17 struct{ f171, f172 int } // want `t17` +type t18 struct{ f181, f182, f183 int } // want `f182` `f183` type t19 struct{ f191 int } type m2 map[string]t19 @@ -33,7 +32,7 @@ type m2 map[string]t19 type t20 struct{ f201 int } type m3 map[string]t20 -type t21 struct{ f211, f212 int } // MATCH /f211 is unused/ +type t21 struct{ f211, f212 int } // want `f211` func foo() { _ = t10{1} @@ -58,7 +57,7 @@ func foo() { _ = a1{{1}} _ = a2{0: {1}} _ = map[[1]t16]int{{{1}}: 1} - y := struct{ x int }{} // MATCH /x is unused/ + y := struct{ x int }{} // want `x` _ = y _ = t18{f181: 1} _ = []m2{{"a": {1}}} diff --git a/unused/testdata/src/functions/functions.go b/unused/testdata/src/functions/functions.go index 8434e8587..cb74a895f 100644 --- a/unused/testdata/src/functions/functions.go +++ b/unused/testdata/src/functions/functions.go @@ -11,18 +11,18 @@ func main() { _ = st() } -type t1 struct{} // MATCH /t1 is unused/ +type t1 struct{} // want `t1` type t2 struct{} type t3 struct{} -func fn1() t1 { return t1{} } // MATCH /fn1 is unused/ +func fn1() t1 { return t1{} } // want `fn1` func fn2() (x t2) { return } func fn3() *t3 { return nil } func fn4() { const x = 1 - const y = 2 // MATCH /y is unused/ - type foo int // MATCH /foo is unused/ + const y = 2 // want `y` + type foo int // want `foo` type bar int _ = x diff --git a/unused/testdata/src/generated1/generated1.go b/unused/testdata/src/generated1/generated1.go deleted file mode 100644 index 1a8ca55f6..000000000 --- a/unused/testdata/src/generated1/generated1.go +++ /dev/null @@ -1,5 +0,0 @@ -// Code generated by a clever monkey; DO NOT EDIT. - -package pkg - -type t struct{} diff --git a/unused/testdata/src/generated2/generated2.go b/unused/testdata/src/generated2/generated2.go deleted file mode 100644 index 17d736ee1..000000000 --- a/unused/testdata/src/generated2/generated2.go +++ /dev/null @@ -1,5 +0,0 @@ -// Code generated by a bunch of monkeys with typewriters and RSI, DO NOT EDIT. - -package pkg - -type t struct{} diff --git a/unused/testdata/src/interfaces/interfaces.go b/unused/testdata/src/interfaces/interfaces.go index cb507a07e..59b1be73e 100644 --- a/unused/testdata/src/interfaces/interfaces.go +++ b/unused/testdata/src/interfaces/interfaces.go @@ -7,7 +7,7 @@ type I interface { type t struct{} func (t) fn1() {} -func (t) fn2() {} // MATCH /fn2 is unused/ +func (t) fn2() {} // want `fn2` func init() { _ = t{} diff --git a/unused/testdata/src/linkname/linkname.go b/unused/testdata/src/linkname/linkname.go index 1c43c35da..1423a2148 100644 --- a/unused/testdata/src/linkname/linkname.go +++ b/unused/testdata/src/linkname/linkname.go @@ -12,7 +12,7 @@ func foo() {} var bar int var ( - baz int // MATCH "baz is unused" + baz int // want `baz` //go:linkname qux other3 qux int ) diff --git a/unused/testdata/src/main/main.go b/unused/testdata/src/main/main.go index ab000fc79..ae5c913ae 100644 --- a/unused/testdata/src/main/main.go +++ b/unused/testdata/src/main/main.go @@ -1,13 +1,13 @@ package main func Fn1() {} -func Fn2() {} // MATCH /Fn2 is unused/ +func Fn2() {} // want `Fn2` -const X = 1 // MATCH /X is unused/ +const X = 1 // want `X` -var Y = 2 // MATCH /Y is unused/ +var Y = 2 // want `Y` -type Z struct{} // MATCH /Z is unused/ +type Z struct{} // want `Z` func main() { Fn1() diff --git a/unused/testdata/src/methods/methods.go b/unused/testdata/src/methods/methods.go index 17673addd..0eaf6ee7f 100644 --- a/unused/testdata/src/methods/methods.go +++ b/unused/testdata/src/methods/methods.go @@ -6,7 +6,7 @@ type t3 struct{} func (t1) Foo() {} func (t3) Foo() {} -func (t3) foo() {} // MATCH /foo is unused/ +func (t3) foo() {} // want `foo` func init() { _ = t1{} diff --git a/unused/testdata/src/nested/nested.go b/unused/testdata/src/nested/nested.go index 518172830..7e108a28c 100644 --- a/unused/testdata/src/nested/nested.go +++ b/unused/testdata/src/nested/nested.go @@ -1,10 +1,10 @@ package pkg -type t struct{} // MATCH /t is unused/ +type t struct{} // want `t` func (t) fragment() {} -func fn() bool { // MATCH /fn is unused/ +func fn() bool { // want `fn` var v interface{} = t{} switch obj := v.(type) { case interface { diff --git a/unused/testdata/src/nocopy-main/nocopy-main.go b/unused/testdata/src/nocopy-main/nocopy-main.go index 4fefb5071..369a5d503 100644 --- a/unused/testdata/src/nocopy-main/nocopy-main.go +++ b/unused/testdata/src/nocopy-main/nocopy-main.go @@ -2,8 +2,8 @@ package main type myNoCopy1 struct{} type myNoCopy2 struct{} -type locker struct{} // MATCH "locker is unused" -type someStruct struct{ x int } // MATCH "someStruct is unused" +type locker struct{} // want `locker` +type someStruct struct{ x int } // want `someStruct` func (myNoCopy1) Lock() {} func (recv myNoCopy2) Lock() {} @@ -14,9 +14,9 @@ func (someStruct) Lock() {} type T struct { noCopy1 myNoCopy1 noCopy2 myNoCopy2 - field1 someStruct // MATCH "field1 is unused" - field2 locker // MATCH "field2 is unused" - field3 int // MATCH "field3 is unused" + field1 someStruct // want `field1` + field2 locker // want `field2` + field3 int // want `field3` } func main() { diff --git a/unused/testdata/src/nocopy/nocopy.go b/unused/testdata/src/nocopy/nocopy.go index 156edf50c..98e46d4eb 100644 --- a/unused/testdata/src/nocopy/nocopy.go +++ b/unused/testdata/src/nocopy/nocopy.go @@ -2,8 +2,8 @@ package bar type myNoCopy1 struct{} type myNoCopy2 struct{} -type locker struct{} // MATCH "locker is unused" -type someStruct struct{ x int } // MATCH "someStruct is unused" +type locker struct{} // want `locker` +type someStruct struct{ x int } // want `someStruct` func (myNoCopy1) Lock() {} func (recv myNoCopy2) Lock() {} @@ -14,7 +14,7 @@ func (someStruct) Lock() {} type T struct { noCopy1 myNoCopy1 noCopy2 myNoCopy2 - field1 someStruct // MATCH "field1 is unused" - field2 locker // MATCH "field2 is unused" - field3 int // MATCH "field3 is unused" + field1 someStruct // want `field1` + field2 locker // want `field2` + field3 int // want `field3` } diff --git a/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go b/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go index 648b7d6f0..fb577f97c 100644 --- a/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go +++ b/unused/testdata/src/pointer-type-embedding/pointer-type-embedding.go @@ -6,7 +6,7 @@ func init() { } type T0 struct { - m int // MATCH /m is unused/ + m int // want `m` n int } diff --git a/unused/testdata/src/quiet/quiet.go b/unused/testdata/src/quiet/quiet.go index dbdf47d1e..42cca0dfd 100644 --- a/unused/testdata/src/quiet/quiet.go +++ b/unused/testdata/src/quiet/quiet.go @@ -1,21 +1,21 @@ package pkg -type iface interface { // MATCH "type iface is unused" +type iface interface { // want `iface` foo() } -type t1 struct{} // MATCH "type t1 is unused" +type t1 struct{} // want `t1` func (t1) foo() {} type t2 struct{} -func (t t2) bar(arg int) (ret int) { return 0 } // MATCH "func t2.bar is unused" +func (t t2) bar(arg int) (ret int) { return 0 } // want `bar` func init() { _ = t2{} } -type t3 struct { // MATCH "type t3 is unused" +type t3 struct { // want `t3` a int b int } diff --git a/unused/testdata/src/unused_type/unused_type.go b/unused/testdata/src/unused_type/unused_type.go index eabfce4b5..0881ffe61 100644 --- a/unused/testdata/src/unused_type/unused_type.go +++ b/unused/testdata/src/unused_type/unused_type.go @@ -1,6 +1,6 @@ package pkg -type t1 struct{} // MATCH /t1 is unused/ +type t1 struct{} // want `t1` func (t1) Fn() {} @@ -12,6 +12,6 @@ func init() { (*t2).Fn(nil) } -type t3 struct{} // MATCH /t3 is unused/ +type t3 struct{} // want `t3` func (t3) fn() diff --git a/unused/unused.go b/unused/unused.go index f69bddae8..fdecc743a 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -9,7 +9,9 @@ import ( "strings" "sync" + "golang.org/x/tools/go/analysis" "honnef.co/go/tools/go/types/typeutil" + "honnef.co/go/tools/internal/passes/buildssa" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" @@ -131,28 +133,6 @@ func assert(b bool) { } } -type Checker struct { - WholeProgram bool - Debug io.Writer - - interfaces []*types.Interface - initialPackages []*lint.Pkg - scopes map[*types.Scope]*ssa.Function - - seenMu sync.Mutex - seen map[token.Position]struct{} - out []types.Object -} - -func (*Checker) Name() string { return "unused" } -func (*Checker) Prefix() string { return "U" } - -func (l *Checker) Checks() []lint.Check { - return []lint.Check{ - {ID: "U1000", FilterGenerated: true, Fn: l.Lint}, - } -} - func typString(obj types.Object) string { switch obj := obj.(type) { case *types.Func: @@ -413,77 +393,135 @@ var runtimeFuncs = map[string]bool{ "write": true, } -func (c *Checker) Init(prog *lint.Program) { - for _, pkg := range prog.AllPackages { - c.interfaces = append(c.interfaces, interfacesFromExportData(pkg.Types)...) +type pkg struct { + Fset *token.FileSet + Files []*ast.File + Pkg *types.Package + TypesInfo *types.Info + TypesSizes types.Sizes + SSA *ssa.Package + SrcFuncs []*ssa.Function +} + +type Checker struct { + mu sync.Mutex + + WholeProgram bool + Debug io.Writer + + initialPackages map[*types.Package]struct{} + allPackages map[*types.Package]struct{} + + seenMu sync.Mutex + seen map[token.Position]struct{} + + graph *Graph + out []types.Object + fset *token.FileSet +} + +func NewChecker() *Checker { + c := &Checker{ + seen: map[token.Position]struct{}{}, + initialPackages: map[*types.Package]struct{}{}, } - c.initialPackages = prog.InitialPackages - c.seen = map[token.Position]struct{}{} - c.scopes = map[*types.Scope]*ssa.Function{} - for _, pkg := range prog.InitialPackages { - for _, fn := range pkg.InitialFunctions { - if fn.Object() != nil { - scope := fn.Object().(*types.Func).Scope() - c.scopes[scope] = fn - } + return c +} + +func (c *Checker) Analyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "U1000", + Doc: "Unused code", + Run: c.Run, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + } +} + +func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { + c.mu.Lock() + defer c.mu.Unlock() + + var visit func(pkg *types.Package) + visit = func(pkg *types.Package) { + if _, ok := c.allPackages[pkg]; ok { + return + } + c.allPackages[pkg] = struct{}{} + for _, imp := range pkg.Imports() { + visit(imp) } } - // This is a hack to work in the confines of "one package per - // job". We do all the actual work in the Init function, and only - // report results in the actual checker function. - var out []types.Object + c.fset = pass.Fset + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + pkg := &pkg{ + Fset: pass.Fset, + Files: pass.Files, + Pkg: pass.Pkg, + TypesInfo: pass.TypesInfo, + TypesSizes: pass.TypesSizes, + SSA: ssapkg.Pkg, + SrcFuncs: ssapkg.SrcFuncs, + } + + c.initialPackages[pkg.Pkg] = struct{}{} + if c.WholeProgram { // (e1) all packages share a single graph - out = c.processPkgs(prog.InitialPackages...) - } else { - var wg sync.WaitGroup - var mu sync.Mutex - for _, pkg := range prog.InitialPackages { - pkg := pkg - wg.Add(1) - go func() { - res := c.processPkgs(pkg) - mu.Lock() - out = append(out, res...) - mu.Unlock() - wg.Done() - }() + if c.graph == nil { + c.graph = NewGraph() + c.graph.wholeProgram = true } - wg.Wait() + c.processPkg(pkg) + } else { + c.graph = NewGraph() + c.graph.wholeProgram = false + + c.processPkg(pkg) + c.out = append(c.out, c.results()...) } - out2 := make([]types.Object, 0, len(out)) - for _, v := range out { - if _, ok := c.seen[prog.Fset().Position(v.Pos())]; !ok { - out2 = append(out2, v) + + return nil, nil +} + +func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Problem { + name := obj.Name() + if sig, ok := obj.Type().(*types.Signature); ok && sig.Recv() != nil { + switch sig.Recv().Type().(type) { + case *types.Named, *types.Pointer: + typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) + if len(typ) > 0 && typ[0] == '*' { + name = fmt.Sprintf("(%s).%s", typ, obj.Name()) + } else if len(typ) > 0 { + name = fmt.Sprintf("%s.%s", typ, obj.Name()) + } } } - c.out = out2 + + return lint.Problem{ + Pos: lint.DisplayPosition(fset, obj.Pos()), + Message: fmt.Sprintf("%s %s is unused", typString(obj), name), + Check: "U1000", + } } -func (c *Checker) Lint(j *lint.Job) { - // The actual work is being done in Init. We only report existing - // results here. - unused := c.out - for _, u := range unused { - if u.Pkg() != j.Pkg.Types { +func (c *Checker) Result() []types.Object { + if c.WholeProgram { + c.out = c.results() + } + + out2 := make([]types.Object, 0, len(c.out)) + for _, v := range c.out { + if _, ok := c.initialPackages[v.Pkg()]; !ok { continue } - name := u.Name() - if sig, ok := u.Type().(*types.Signature); ok && sig.Recv() != nil { - switch sig.Recv().Type().(type) { - case *types.Named, *types.Pointer: - typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) - if len(typ) > 0 && typ[0] == '*' { - name = fmt.Sprintf("(%s).%s", typ, u.Name()) - } else if len(typ) > 0 { - name = fmt.Sprintf("%s.%s", typ, u.Name()) - } - } + position := c.fset.PositionFor(v.Pos(), false) + if _, ok := c.seen[position]; !ok { + out2 = append(out2, v) } - j.Errorf(u, "%s %s is unused", typString(u), name) } + return out2 } func (c *Checker) debugf(f string, v ...interface{}) { @@ -537,27 +575,15 @@ func (graph *Graph) quieten(node *Node) { } } -func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { - graph := NewGraph() - graph.wholeProgram = c.WholeProgram - graph.scopes = c.scopes - graph.initialPackages = c.initialPackages - +func (c *Checker) results() []types.Object { var out []types.Object - for _, pkg := range pkgs { - if pkg.PkgPath == "unsafe" { - continue - } - graph.entry(pkg) - } - if c.WholeProgram { var ifaces []*types.Interface var notIfaces []types.Type // implement as many interfaces as possible - graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { + c.graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: ifaces = append(ifaces, t) @@ -568,17 +594,18 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { } }) - // OPT(dh): this is not terribly efficient - ifaces = append(ifaces, c.interfaces...) + for pkg := range c.allPackages { + ifaces = append(ifaces, interfacesFromExportData(pkg)...) + } // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages for _, t := range notIfaces { - ms := graph.msCache.MethodSet(t) + ms := c.graph.msCache.MethodSet(t) for _, iface := range ifaces { - if sels, ok := graph.implements(t, iface, ms); ok { + if sels, ok := c.graph.implements(t, iface, ms); ok { for _, sel := range sels { - graph.useMethod(t, sel, t, "implements") + c.graph.useMethod(t, sel, t, "implements") } } } @@ -600,27 +627,27 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { } c.debugf("digraph{\n") - debugNode(graph.Root) - for _, node := range graph.Nodes { + debugNode(c.graph.Root) + for _, node := range c.graph.Nodes { debugNode(node) } - graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { + c.graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { debugNode(value.(*Node)) }) c.debugf("}\n") } - graph.color(graph.Root) + c.graph.color(c.graph.Root) // if a node is unused, don't report any of the node's // children as unused. for example, if a function is unused, // don't flag its receiver. if a named type is unused, don't // flag its methods. - for _, node := range graph.Nodes { - graph.quieten(node) + for _, node := range c.graph.Nodes { + c.graph.quieten(node) } - graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { - graph.quieten(value.(*Node)) + c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + c.graph.quieten(value.(*Node)) }) report := func(node *Node) { @@ -634,8 +661,9 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { } if pos != 0 { + position := c.fset.PositionFor(pos, false) c.seenMu.Lock() - c.seen[pkgs[0].Fset.Position(pos)] = struct{}{} + c.seen[position] = struct{}{} c.seenMu.Unlock() } return @@ -645,39 +673,6 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { return } - type packager1 interface { - Pkg() *types.Package - } - type packager2 interface { - Package() *ssa.Package - } - - // do not report objects from packages we aren't checking. - checkPkg: - switch obj := node.obj.(type) { - case packager1: - for _, pkg := range pkgs { - if pkg.Types == obj.Pkg() { - break checkPkg - } - } - c.debugf("n%d [color=yellow];\n", node.id) - return - case packager2: - // This happens to filter $bound and $thunk, which - // should be fine, since we wouldn't want to report - // them, anyway. Remember that this filtering is only - // for the output, it doesn't affect the reachability - // of nodes in the graph. - for _, pkg := range pkgs { - if pkg.SSA == obj.Package() { - break checkPkg - } - } - c.debugf("n%d [color=yellow];\n", node.id) - return - } - c.debugf("n%d [color=red];\n", node.id) switch obj := node.obj.(type) { case *types.Var: @@ -703,20 +698,26 @@ func (c *Checker) processPkgs(pkgs ...*lint.Pkg) []types.Object { c.debugf("n%d [color=gray];\n", node.id) } } - for _, node := range graph.Nodes { + for _, node := range c.graph.Nodes { report(node) } - graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { report(value.(*Node)) }) return out } +func (c *Checker) processPkg(pkg *pkg) { + if pkg.Pkg.Path() == "unsafe" { + return + } + c.graph.entry(pkg) +} + type Graph struct { pkg *ssa.Package msCache typeutil.MethodSetCache - scopes map[*types.Scope]*ssa.Function wholeProgram bool @@ -728,8 +729,6 @@ type Graph struct { seenTypes typeutil.Map seenFns map[*ssa.Function]struct{} - - initialPackages []*lint.Pkg } func NewGraph() *Graph { @@ -877,12 +876,7 @@ func isIrrelevant(obj interface{}) bool { func (g *Graph) isInterestingPackage(pkg *types.Package) bool { if g.wholeProgram { - for _, opkg := range g.initialPackages { - if opkg.Types == pkg { - return true - } - } - return false + return true } return pkg == g.pkg.Pkg } @@ -968,11 +962,19 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { return true } -func (g *Graph) entry(pkg *lint.Pkg) { +func (g *Graph) entry(pkg *pkg) { // TODO rename Entry g.pkg = pkg.SSA - for _, f := range pkg.Syntax { + scopes := map[*types.Scope]*ssa.Function{} + for _, fn := range pkg.SrcFuncs { + if fn.Object() != nil { + scope := fn.Object().(*types.Func).Scope() + scopes[scope] = fn + } + } + + for _, f := range pkg.Files { for _, cg := range f.Comments { for _, c := range cg.List { if strings.HasPrefix(c.Text, "//go:linkname ") { @@ -1006,7 +1008,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { surroundingFunc := func(obj types.Object) *ssa.Function { scope := obj.Parent() for scope != nil { - if fn := g.scopes[scope]; fn != nil { + if fn := scopes[scope]; fn != nil { return fn } scope = scope.Parent() @@ -1037,7 +1039,7 @@ func (g *Graph) entry(pkg *lint.Pkg) { // Find constants being used inside functions, find sinks in tests handledConsts := map[*ast.Ident]struct{}{} - for _, fn := range pkg.InitialFunctions { + for _, fn := range pkg.SrcFuncs { g.see(fn) node := fn.Syntax() if node == nil { @@ -1094,82 +1096,83 @@ func (g *Graph) entry(pkg *lint.Pkg) { } var fn *ssa.Function - pkg.Inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)}, func(n ast.Node) { - switch n := n.(type) { - case *ast.FuncDecl: - fn = pkg.SSA.Prog.FuncValue(pkg.TypesInfo.ObjectOf(n.Name).(*types.Func)) - if fn != nil { - g.see(fn) - } - case *ast.GenDecl: - switch n.Tok { - case token.CONST: - groups := lintdsl.GroupSpecs(pkg.Fset, n.Specs) - for _, specs := range groups { - if len(specs) > 1 { - cg := &ConstGroup{} - g.see(cg) - for _, spec := range specs { - for _, name := range spec.(*ast.ValueSpec).Names { - obj := pkg.TypesInfo.ObjectOf(name) - // (10.1) const groups - g.seeAndUse(obj, cg, "const group") - g.use(cg, obj, "const group") + for _, f := range pkg.Files { + ast.Inspect(f, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.FuncDecl: + fn = pkg.SSA.Prog.FuncValue(pkg.TypesInfo.ObjectOf(n.Name).(*types.Func)) + if fn != nil { + g.see(fn) + } + case *ast.GenDecl: + switch n.Tok { + case token.CONST: + groups := lintdsl.GroupSpecs(pkg.Fset, n.Specs) + for _, specs := range groups { + if len(specs) > 1 { + cg := &ConstGroup{} + g.see(cg) + for _, spec := range specs { + for _, name := range spec.(*ast.ValueSpec).Names { + obj := pkg.TypesInfo.ObjectOf(name) + // (10.1) const groups + g.seeAndUse(obj, cg, "const group") + g.use(cg, obj, "const group") + } } } } - } - case token.VAR: - for _, spec := range n.Specs { - v := spec.(*ast.ValueSpec) - for _, name := range v.Names { - T := pkg.TypesInfo.TypeOf(name) - if fn != nil { - g.seeAndUse(T, fn, "var decl") - } else { - g.seeAndUse(T, nil, "var decl") + case token.VAR: + for _, spec := range n.Specs { + v := spec.(*ast.ValueSpec) + for _, name := range v.Names { + T := pkg.TypesInfo.TypeOf(name) + if fn != nil { + g.seeAndUse(T, fn, "var decl") + } else { + g.seeAndUse(T, nil, "var decl") + } + g.typ(T) } - g.typ(T) } - } - case token.TYPE: - for _, spec := range n.Specs { - // go/types doesn't provide a way to go from a - // types.Named to the named type it was based on - // (the t1 in type t2 t1). Therefore we walk the - // AST and process GenDecls. - // - // (2.2) named types use the type they're based on - v := spec.(*ast.TypeSpec) - T := pkg.TypesInfo.TypeOf(v.Type) - obj := pkg.TypesInfo.ObjectOf(v.Name) - g.see(obj) - g.see(T) - g.use(T, obj, "type") - g.typ(obj.Type()) - g.typ(T) - - if v.Assign != 0 { - aliasFor := obj.(*types.TypeName).Type() - // (2.3) named types use all their aliases. we can't easily track uses of aliases - if isIrrelevant(aliasFor) { - // We do not track the type this is an - // alias for (for example builtins), so - // just mark the alias used. - // - // FIXME(dh): what about aliases declared inside functions? - g.use(obj, nil, "alias") - } else { - g.see(aliasFor) - g.seeAndUse(obj, aliasFor, "alias") + case token.TYPE: + for _, spec := range n.Specs { + // go/types doesn't provide a way to go from a + // types.Named to the named type it was based on + // (the t1 in type t2 t1). Therefore we walk the + // AST and process GenDecls. + // + // (2.2) named types use the type they're based on + v := spec.(*ast.TypeSpec) + T := pkg.TypesInfo.TypeOf(v.Type) + obj := pkg.TypesInfo.ObjectOf(v.Name) + g.see(obj) + g.see(T) + g.use(T, obj, "type") + g.typ(obj.Type()) + g.typ(T) + + if v.Assign != 0 { + aliasFor := obj.(*types.TypeName).Type() + // (2.3) named types use all their aliases. we can't easily track uses of aliases + if isIrrelevant(aliasFor) { + // We do not track the type this is an + // alias for (for example builtins), so + // just mark the alias used. + // + // FIXME(dh): what about aliases declared inside functions? + g.use(obj, nil, "alias") + } else { + g.see(aliasFor) + g.seeAndUse(obj, aliasFor, "alias") + } } } } } - default: - panic(fmt.Sprintf("unreachable: %T", n)) - } - }) + return true + }) + } for _, m := range g.pkg.Members { switch m := m.(type) { diff --git a/unused/unused_test.go b/unused/unused_test.go index 73c9c0cf1..5a023ddfe 100644 --- a/unused/unused_test.go +++ b/unused/unused_test.go @@ -1,18 +1,198 @@ package unused -// Copyright (c) 2013 The Go Authors. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found at -// https://developers.google.com/open-source/licenses/bsd. - import ( + "fmt" + "go/parser" + "go/token" + "go/types" + "os" + "sort" + "strconv" + "strings" "testing" + "text/scanner" - "honnef.co/go/tools/lint/testutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/analysistest" + "golang.org/x/tools/go/packages" + "honnef.co/go/tools/lint" ) +// parseExpectations parses the content of a "// want ..." comment +// and returns the expections, a mixture of diagnostics ("rx") and +// facts (name:"rx"). +func parseExpectations(text string) ([]string, error) { + var scanErr string + sc := new(scanner.Scanner).Init(strings.NewReader(text)) + sc.Error = func(s *scanner.Scanner, msg string) { + scanErr = msg // e.g. bad string escape + } + sc.Mode = scanner.ScanIdents | scanner.ScanStrings | scanner.ScanRawStrings + + scanRegexp := func(tok rune) (string, error) { + if tok != scanner.String && tok != scanner.RawString { + return "", fmt.Errorf("got %s, want regular expression", + scanner.TokenString(tok)) + } + pattern, _ := strconv.Unquote(sc.TokenText()) // can't fail + return pattern, nil + } + + var expects []string + for { + tok := sc.Scan() + switch tok { + case scanner.String, scanner.RawString: + rx, err := scanRegexp(tok) + if err != nil { + return nil, err + } + expects = append(expects, rx) + + case scanner.EOF: + if scanErr != "" { + return nil, fmt.Errorf("%s", scanErr) + } + return expects, nil + + default: + return nil, fmt.Errorf("unexpected %s", scanner.TokenString(tok)) + } + } + + return expects, nil +} + +func check(t *testing.T, fset *token.FileSet, diagnostics []types.Object) { + type key struct { + file string + line int + } + + files := map[string]struct{}{} + for _, d := range diagnostics { + files[fset.Position(d.Pos()).Filename] = struct{}{} + } + + want := make(map[key][]string) + + // processComment parses expectations out of comments. + processComment := func(filename string, linenum int, text string) { + text = strings.TrimSpace(text) + + // Any comment starting with "want" is treated + // as an expectation, even without following whitespace. + if rest := strings.TrimPrefix(text, "want"); rest != text { + expects, err := parseExpectations(rest) + if err != nil { + t.Errorf("%s:%d: in 'want' comment: %s", filename, linenum, err) + return + } + if expects != nil { + want[key{filename, linenum}] = expects + } + } + } + + // Extract 'want' comments from Go files. + fset2 := token.NewFileSet() + for f := range files { + af, err := parser.ParseFile(fset2, f, nil, parser.ParseComments) + if err != nil { + t.Fatal(err) + } + for _, cgroup := range af.Comments { + for _, c := range cgroup.List { + + text := strings.TrimPrefix(c.Text, "//") + if text == c.Text { + continue // not a //-comment + } + + // Hack: treat a comment of the form "//...// want..." + // as if it starts at 'want'. + // This allows us to add comments on comments, + // as required when testing the buildtag analyzer. + if i := strings.Index(text, "// want"); i >= 0 { + text = text[i+len("// "):] + } + + // It's tempting to compute the filename + // once outside the loop, but it's + // incorrect because it can change due + // to //line directives. + posn := fset2.Position(c.Pos()) + processComment(posn.Filename, posn.Line, text) + } + } + } + + checkMessage := func(posn token.Position, name, message string) { + k := key{posn.Filename, posn.Line} + expects := want[k] + var unmatched []string + for i, exp := range expects { + if exp == message { + // matched: remove the expectation. + expects[i] = expects[len(expects)-1] + expects = expects[:len(expects)-1] + want[k] = expects + return + } + unmatched = append(unmatched, fmt.Sprintf("%q", exp)) + } + if unmatched == nil { + t.Errorf("%v: unexpected: %v", posn, message) + } else { + t.Errorf("%v: %q does not match pattern %s", + posn, message, strings.Join(unmatched, " or ")) + } + } + + // Check the diagnostics match expectations. + for _, f := range diagnostics { + posn := fset.Position(f.Pos()) + checkMessage(posn, "", f.Name()) + } + + // Reject surplus expectations. + // + // Sometimes an Analyzer reports two similar diagnostics on a + // line with only one expectation. The reader may be confused by + // the error message. + // TODO(adonovan): print a better error: + // "got 2 diagnostics here; each one needs its own expectation". + var surplus []string + for key, expects := range want { + for _, exp := range expects { + err := fmt.Sprintf("%s:%d: no diagnostic was reported matching %q", key.file, key.line, exp) + surplus = append(surplus, err) + } + } + sort.Strings(surplus) + for _, err := range surplus { + t.Errorf("%s", err) + } +} + func TestAll(t *testing.T) { - c := &Checker{} - testutil.TestAll(t, c, "") + c := NewChecker() + r, err := lint.NewRunner() + if err != nil { + t.Fatal(err) + } + + dir := analysistest.TestData() + cfg := &packages.Config{ + Dir: dir, + Tests: true, + Env: append(os.Environ(), "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off"), + } + pkgs, err := r.Run(cfg, []string{"./..."}, []*analysis.Analyzer{c.Analyzer()}) + if err != nil { + t.Fatal(err) + } + + res := c.Result() + check(t, pkgs[0].Fset, res) } From c654f830d424d16d36a8b835ac7915f116f4439a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 26 Apr 2019 23:58:55 +0200 Subject: [PATCH 129/254] staticcheck/vrp: short-circuit widening in presence of too many constants --- staticcheck/vrp/vrp.go | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/staticcheck/vrp/vrp.go b/staticcheck/vrp/vrp.go index a0011ed08..3c138e512 100644 --- a/staticcheck/vrp/vrp.go +++ b/staticcheck/vrp/vrp.go @@ -722,16 +722,22 @@ func (g *Graph) widen(c Constraint, consts []Z) bool { } nlc := NInfinity nuc := PInfinity - for _, co := range consts { - if co.Cmp(ni.Lower) <= 0 { - nlc = co - break + + // Don't get stuck widening for an absurd amount of time due + // to an excess number of constants, as may be present in + // table-based scanners. + if len(consts) < 1000 { + for _, co := range consts { + if co.Cmp(ni.Lower) <= 0 { + nlc = co + break + } } - } - for _, co := range consts { - if co.Cmp(ni.Upper) >= 0 { - nuc = co - break + for _, co := range consts { + if co.Cmp(ni.Upper) >= 0 { + nuc = co + break + } } } From 08d9dfb857eaeea627ad646f150095c8ceb80add Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 05:08:40 +0200 Subject: [PATCH 130/254] Support unused's whole program mode Unused required some changes to have a correctly working whole program mode. We had to let go of using *ssa.Functions to identify functions, as we cannot persist them across packages anymore. Instead, we switched to strings for deduplication and finding the "owning" types.Object of functions, even those that do not have a types.Object recorded in the SSA graph. To enable whole program mode, staticcheck accepts the new -unused.whole-program flag. An alternative approach we tried was to run two instances of unused, one in normal mode and one in exported mode. However, the cost of whole program mode is significant and we don't want to subject all users to it. When unused runs in whole program mode, its check name changes to U1001. This is so that linter directives and configuration files can address both modes separately. --- cmd/staticcheck/staticcheck.go | 8 +- unused/unused.go | 151 ++++++++++++++++++--------------- 2 files changed, 90 insertions(+), 69 deletions(-) diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 24a369e24..6400eb383 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -15,6 +15,7 @@ import ( func main() { fs := lintutil.FlagSet("staticcheck") + wholeProgram := fs.Bool("unused.whole-program", false, "Run unused in whole program mode") fs.Parse(os.Args[1:]) var cs []*analysis.Analyzer @@ -28,7 +29,10 @@ func main() { cs = append(cs, v) } - cums := []lint.CumulativeChecker{unused.NewChecker()} - + u := unused.NewChecker() + if *wholeProgram { + u.WholeProgram = true + } + cums := []lint.CumulativeChecker{u} lintutil.ProcessFlagSet(cs, cums, fs) } diff --git a/unused/unused.go b/unused/unused.go index fdecc743a..7b3433115 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -430,8 +430,12 @@ func NewChecker() *Checker { } func (c *Checker) Analyzer() *analysis.Analyzer { + name := "U1000" + if c.WholeProgram { + name = "U1001" + } return &analysis.Analyzer{ - Name: "U1000", + Name: name, Doc: "Unused code", Run: c.Run, Requires: []*analysis.Analyzer{buildssa.Analyzer}, @@ -474,6 +478,8 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { c.graph.wholeProgram = true } c.processPkg(pkg) + c.graph.seenFns = map[string]struct{}{} + c.graph.pkg = nil } else { c.graph = NewGraph() c.graph.wholeProgram = false @@ -499,10 +505,14 @@ func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Prob } } + checkName := "U1000" + if c.WholeProgram { + checkName = "U1001" + } return lint.Problem{ Pos: lint.DisplayPosition(fset, obj.Pos()), Message: fmt.Sprintf("%s %s is unused", typString(obj), name), - Check: "U1000", + Check: checkName, } } @@ -535,7 +545,7 @@ func (graph *Graph) quieten(node *Node) { return } switch obj := node.obj.(type) { - case *ssa.Function: + case *types.Func: sig := obj.Type().(*types.Signature) if sig.Recv() != nil { if node, ok := graph.nodeMaybe(sig.Recv()); ok { @@ -554,7 +564,7 @@ func (graph *Graph) quieten(node *Node) { } case *types.Named: for i := 0; i < obj.NumMethods(); i++ { - m := graph.pkg.Prog.FuncValue(obj.Method(i)) + m := obj.Method(i) if node, ok := graph.nodeMaybe(m); ok { node.quiet = true } @@ -656,8 +666,6 @@ func (c *Checker) results() []types.Object { switch obj := node.obj.(type) { case types.Object: pos = obj.Pos() - case *ssa.Function: - pos = obj.Pos() } if pos != 0 { @@ -684,16 +692,6 @@ func (c *Checker) results() []types.Object { if obj.Name() != "_" { out = append(out, obj) } - case *ssa.Function: - if obj == nil { - // TODO(dh): how does this happen? - return - } - if obj.Object() == nil { - // Closures - return - } - out = append(out, obj.Object()) default: c.debugf("n%d [color=gray];\n", node.id) } @@ -728,13 +726,13 @@ type Graph struct { Nodes map[interface{}]*Node seenTypes typeutil.Map - seenFns map[*ssa.Function]struct{} + seenFns map[string]struct{} } func NewGraph() *Graph { g := &Graph{ Nodes: map[interface{}]*Node{}, - seenFns: map[*ssa.Function]struct{}{}, + seenFns: map[string]struct{}{}, } g.Root = g.newNode(nil) return g @@ -766,7 +764,7 @@ type Node struct { quiet bool } -func (g *Graph) nodeMaybe(obj interface{}) (*Node, bool) { +func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) { if t, ok := obj.(types.Type); ok { if v := g.TypeNodes.At(t); v != nil { return v.(*Node), true @@ -903,12 +901,6 @@ func (g *Graph) use(used, by interface{}, reason string) { } assert(used != nil) - if _, ok := used.(*types.Func); ok { - assert(g.pkg.Prog.FuncValue(used.(*types.Func)) == nil) - } - if _, ok := by.(*types.Func); ok { - assert(g.pkg.Prog.FuncValue(by.(*types.Func)) == nil) - } if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { if !g.isInterestingPackage(obj.Pkg()) { return @@ -940,12 +932,17 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { // object isn't exported, the question is moot return false } + path := g.pkg.Prog.Fset.Position(obj.Pos()).Filename if g.wholeProgram { + // Example functions without "Output:" comments aren't being + // run and thus don't show up in the graph. + if strings.HasSuffix(path, "_test.go") && strings.HasPrefix(obj.Name(), "Example") { + return true + } // whole program mode tracks exported identifiers accurately return false } - path := g.pkg.Prog.Fset.Position(obj.Pos()).Filename if g.pkg.Pkg.Name() == "main" && !strings.HasSuffix(path, "_test.go") { // exported identifiers in package main can't be imported. // However, test functions can be called, and xtest packages @@ -953,6 +950,8 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { return false } + // TODO(dh): the following comment is no longer true + // // at one point we only considered exported identifiers in // *_test.go files if they were Benchmark, Example or Test // functions. However, this doesn't work when we look at one @@ -987,12 +986,12 @@ func (g *Graph) entry(pkg *pkg) { fields := strings.Fields(c.Text) if len(fields) == 3 { if m, ok := pkg.SSA.Members[fields[1]]; ok { - var obj interface{} + var obj types.Object switch m := m.(type) { case *ssa.Global: obj = m.Object() case *ssa.Function: - obj = m + obj = m.Object() default: panic(fmt.Sprintf("unhandled type: %T", m)) } @@ -1040,7 +1039,9 @@ func (g *Graph) entry(pkg *pkg) { // Find constants being used inside functions, find sinks in tests handledConsts := map[*ast.Ident]struct{}{} for _, fn := range pkg.SrcFuncs { - g.see(fn) + if fn.Object() != nil { + g.see(fn.Object()) + } node := fn.Syntax() if node == nil { continue @@ -1054,7 +1055,7 @@ func (g *Graph) entry(pkg *pkg) { } switch obj := obj.(type) { case *types.Const: - g.seeAndUse(obj, fn, "used constant") + g.seeAndUse(obj, owningObject(fn), "used constant") } case *ast.AssignStmt: for _, expr := range node.Lhs { @@ -1074,7 +1075,7 @@ func (g *Graph) entry(pkg *pkg) { // (4.9) functions use package-level variables they assign to iff in tests (sinks for benchmarks) // (9.7) variable _reads_ use variables, writes do not, except in tests - g.seeAndUse(obj, fn, "test sink") + g.seeAndUse(obj, owningObject(fn), "test sink") } } } @@ -1095,15 +1096,13 @@ func (g *Graph) entry(pkg *pkg) { g.seeAndUse(obj, nil, "used constant") } - var fn *ssa.Function + var fn *types.Func for _, f := range pkg.Files { ast.Inspect(f, func(n ast.Node) bool { switch n := n.(type) { case *ast.FuncDecl: - fn = pkg.SSA.Prog.FuncValue(pkg.TypesInfo.ObjectOf(n.Name).(*types.Func)) - if fn != nil { - g.see(fn) - } + fn = pkg.TypesInfo.ObjectOf(n.Name).(*types.Func) + g.see(fn) case *ast.GenDecl: switch n.Tok { case token.CONST: @@ -1187,23 +1186,29 @@ func (g *Graph) entry(pkg *pkg) { } } case *ssa.Function: - g.see(m) + mObj := owningObject(m) + if mObj != nil { + g.see(mObj) + } if m.Name() == "init" { // (1.5) packages use init functions - g.use(m, nil, "init function") + // + // This is handled implicitly. The generated init + // function has no object, thus everything in it will + // be owned by the package. } // This branch catches top-level functions, not methods. if m.Object() != nil && g.trackExportedIdentifier(m.Object()) { // (1.2) packages use exported functions (unless in package main) - g.use(m, nil, "exported top-level function") + g.use(mObj, nil, "exported top-level function") } if m.Name() == "main" && g.pkg.Pkg.Name() == "main" { // (1.7) packages use the main function iff in the main package - g.use(m, nil, "main function") + g.use(mObj, nil, "main function") } if g.pkg.Pkg.Path() == "runtime" && runtimeFuncs[m.Name()] { // (9.8) runtime functions that may be called from user code via the compiler - g.use(m, nil, "runtime function") + g.use(mObj, nil, "runtime function") } if m.Syntax() != nil { doc := m.Syntax().(*ast.FuncDecl).Doc @@ -1211,7 +1216,7 @@ func (g *Graph) entry(pkg *pkg) { for _, cmt := range doc.List { if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { // (1.6) packages use functions exported to cgo - g.use(m, nil, "cgo exported") + g.use(mObj, nil, "cgo exported") } } } @@ -1278,35 +1283,45 @@ func (g *Graph) useMethod(t types.Type, sel *types.Selection, by interface{}, re for _, idx := range path[:len(path)-1] { next := base.Field(idx) // (6.3) structs use embedded fields that help implement interfaces + g.see(base) g.seeAndUse(next, base, "provides method") base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct) } } - if fn := g.pkg.Prog.FuncValue(obj.(*types.Func)); fn != nil { - // actual function - g.seeAndUse(fn, by, reason) - } else { - // interface method - g.seeAndUse(obj, by, reason) + g.seeAndUse(obj, by, reason) +} + +func owningObject(fn *ssa.Function) types.Object { + if fn.Object() != nil { + return fn.Object() } + if fn.Parent() != nil { + return owningObject(fn.Parent()) + } + return nil } func (g *Graph) function(fn *ssa.Function) { if fn.Package() != nil && fn.Package() != g.pkg { return } - if _, ok := g.seenFns[fn]; ok { + + name := fn.RelString(nil) + if _, ok := g.seenFns[name]; ok { return } - g.seenFns[fn] = struct{}{} + g.seenFns[name] = struct{}{} // (4.1) functions use all their arguments, return parameters and receivers - g.seeAndUse(fn.Signature, fn, "function signature") + g.seeAndUse(fn.Signature, owningObject(fn), "function signature") g.signature(fn.Signature) g.instructions(fn) for _, anon := range fn.AnonFuncs { // (4.2) functions use anonymous functions defined beneath them - g.seeAndUse(anon, fn, "anonymous function") + // + // This fact is expressed implicitly. Anonymous functions have + // no types.Object, so their owner is the surrounding + // function. g.function(anon) } } @@ -1399,15 +1414,14 @@ func (g *Graph) typ(t types.Type) { g.seeAndUse(t, t.Obj(), "named type") for i := 0; i < t.NumMethods(); i++ { - meth := g.pkg.Prog.FuncValue(t.Method(i)) - g.see(meth) + g.see(t.Method(i)) // don't use trackExportedIdentifier here, we care about // all exported methods, even in package main or in tests. - if meth.Object() != nil && meth.Object().Exported() && !g.wholeProgram { + if t.Method(i).Exported() && !g.wholeProgram { // (2.1) named types use exported methods - g.use(meth, t, "exported method") + g.use(t.Method(i), t, "exported method") } - g.function(meth) + g.function(g.pkg.Prog.FuncValue(t.Method(i))) } g.typ(t.Underlying()) @@ -1484,6 +1498,7 @@ func (g *Graph) signature(sig *types.Signature) { } func (g *Graph) instructions(fn *ssa.Function) { + fnObj := owningObject(fn) for _, b := range fn.Blocks { for _, instr := range b.Instrs { ops := instr.Operands(nil) @@ -1502,16 +1517,18 @@ func (g *Graph) instructions(fn *ssa.Function) { // (4.5) functions use functions they call // (9.5) instructions use their operands // (4.4) functions use functions they return. we assume that someone else will call the returned function - g.seeAndUse(v, fn, "instruction operand") + if owningObject(v) != nil { + g.seeAndUse(owningObject(v), fnObj, "instruction operand") + } g.function(v) case *ssa.Const: // (9.6) instructions use their operands' types - g.seeAndUse(v.Type(), fn, "constant's type") + g.seeAndUse(v.Type(), fnObj, "constant's type") g.typ(v.Type()) case *ssa.Global: if v.Object() != nil { // (9.5) instructions use their operands - g.seeAndUse(v.Object(), fn, "instruction operand") + g.seeAndUse(v.Object(), fnObj, "instruction operand") } } }) @@ -1522,7 +1539,7 @@ func (g *Graph) instructions(fn *ssa.Function) { // (4.8) instructions use their types // (9.4) conversions use the type they convert to - g.seeAndUse(v.Type(), fn, "instruction") + g.seeAndUse(v.Type(), fnObj, "instruction") g.typ(v.Type()) } } @@ -1531,12 +1548,12 @@ func (g *Graph) instructions(fn *ssa.Function) { st := instr.X.Type().Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access - g.seeAndUse(field, fn, "field access") + g.seeAndUse(field, fnObj, "field access") case *ssa.FieldAddr: st := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access - g.seeAndUse(field, fn, "field access") + g.seeAndUse(field, fnObj, "field access") case *ssa.Store: // nothing to do, handled generically by operands case *ssa.Call: @@ -1565,7 +1582,7 @@ func (g *Graph) instructions(fn *ssa.Function) { ms := g.msCache.MethodSet(vv.Type()) for i := 0; i < ms.Len(); i++ { if ms.At(i).Obj().Exported() { - g.useMethod(vv.Type(), ms.At(i), fn, "net/rpc.Register") + g.useMethod(vv.Type(), ms.At(i), fnObj, "net/rpc.Register") } } }) @@ -1575,7 +1592,7 @@ func (g *Graph) instructions(fn *ssa.Function) { } } else { // (4.5) functions use functions/interface methods they call - g.seeAndUse(c.Method, fn, "interface call") + g.seeAndUse(c.Method, fnObj, "interface call") } case *ssa.Return: // nothing to do, handled generically by operands @@ -1615,7 +1632,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - g.seeAndUse(st.Field(i), fn, "unsafe conversion") + g.seeAndUse(st.Field(i), fnObj, "unsafe conversion") } } } @@ -1626,7 +1643,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - g.seeAndUse(st.Field(i), fn, "unsafe conversion") + g.seeAndUse(st.Field(i), fnObj, "unsafe conversion") } } } From 159cb4b24ef1513b9f75e1fa1354c9c52740632c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 06:28:03 +0200 Subject: [PATCH 131/254] lint: don't crash if config couldn't be loaded --- lint/runner.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lint/runner.go b/lint/runner.go index 332c805e9..58a6f12b5 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -532,7 +532,9 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { for _, ac := range acs { pkg.problems = append(pkg.problems, ac.problems...) } - pkg.cfg = pkg.results[config.Analyzer].v.(*config.Config) + if pkg.results[config.Analyzer].v != nil { + pkg.cfg = pkg.results[config.Analyzer].v.(*config.Config) + } pkg.gen = pkg.results[IsGeneratedAnalyzer].v.(map[string]bool) // In a previous version of the code, we would throw away all type From 686a886fd626f54f2a305a35deae7cf718400f78 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 06:28:30 +0200 Subject: [PATCH 132/254] lint: improve error handling Make errors more expressive but at the same time less noisy. Deduplicate errors for failed dependency (we have >100 analyzes, we don't need >100 reports that the same dependency failed), don't print errors for failed imports if they're in the set of initial packages. Closes gh-428 --- config/config.go | 6 ++++- lint/runner.go | 61 ++++++++++++++++++++++++++++++++++++++++++------ 2 files changed, 59 insertions(+), 8 deletions(-) diff --git a/config/config.go b/config/config.go index 4ac006b9a..3ffbcb2dc 100644 --- a/config/config.go +++ b/config/config.go @@ -1,6 +1,7 @@ package config import ( + "fmt" "os" "path/filepath" "reflect" @@ -21,7 +22,10 @@ var Analyzer = &analysis.Analyzer{ path := pass.Fset.PositionFor(pass.Files[0].Pos(), true).Filename dir := filepath.Dir(path) cfg, err := Load(dir) - return &cfg, err + if err != nil { + return nil, fmt.Errorf("error loading staticcheck.conf: %s", err) + } + return &cfg, nil }, RunDespiteErrors: true, ResultType: reflect.TypeOf((*Config)(nil)), diff --git a/lint/runner.go b/lint/runner.go index 58a6f12b5..f32f63969 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -209,6 +209,25 @@ func (r *Runner) loadCachedFacts(a *analysis.Analyzer, pkg *Package) ([]Fact, bo return facts, true } +type dependencyError struct { + dep string + err error +} + +func (err dependencyError) nested() dependencyError { + if o, ok := err.err.(dependencyError); ok { + return o.nested() + } + return err +} + +func (err dependencyError) Error() string { + if o, ok := err.err.(dependencyError); ok { + return o.Error() + } + return fmt.Sprintf("error running dependency %s: %s", err.dep, err.err) +} + func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) { if !ac.pkg.fromSource { panic(fmt.Sprintf("internal error: %s was not loaded from source", ac.pkg)) @@ -230,7 +249,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter ret, err := r.runAnalysis(acReq) if err != nil { // We couldn't run a dependency, no point in going on - return nil, err + return nil, dependencyError{req.Name, err} } pass.ResultOf[req] = ret @@ -442,6 +461,16 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { return nil } +type analysisError struct { + analyzer *analysis.Analyzer + pkg *Package + err error +} + +func (err analysisError) Error() string { + return fmt.Sprintf("error running analyzer %s on %s: %s", err.analyzer, err.pkg, err.err) +} + func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { r.builtMu.Lock() res := r.built[pkg] @@ -472,11 +501,15 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { for _, imp := range pkg.Imports { r.processPkg(imp, analyzers) if len(imp.errs) > 0 { - var s string - for _, err := range imp.errs { - s += "\n\t" + err.Error() + if imp.initial { + pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s", imp, pkg)) + } else { + var s string + for _, err := range imp.errs { + s += "\n\t" + err.Error() + } + pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s: %s", imp, pkg, s)) } - pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s: %s", imp, pkg, s)) return } } @@ -511,18 +544,32 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { // facts will have been loaded from source. if pkg.initial || len(a.FactTypes) > 0 { if _, err := r.runAnalysis(ac); err != nil { - errs[i] = fmt.Errorf("error running analyzer %s on %s: %s", a, pkg, err) + errs[i] = analysisError{a, pkg, err} return } } }() } wg.Wait() + + depErrors := map[dependencyError]int{} for _, err := range errs { - if err != nil { + switch err := err.(type) { + case analysisError: + switch err := err.err.(type) { + case dependencyError: + depErrors[err.nested()]++ + default: + pkg.errs = append(pkg.errs, err) + } + default: pkg.errs = append(pkg.errs, err) } } + for err, count := range depErrors { + pkg.errs = append(pkg.errs, + fmt.Errorf("could not run %s@%s, preventing %d analyzers from running: %s", err.dep, pkg, count, err.err)) + } // We can't process ignores at this point because `unused` needs // to see more than one package to make its decision. From 625b3338fe9a35c5ce3add6f71fe72a2d9d00f69 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 10:05:06 +0200 Subject: [PATCH 133/254] simple, staticcheck: improve check docs --- simple/doc.go | 249 ++++++++++++++++++++++++++------------------- staticcheck/doc.go | 57 ++++++++++- 2 files changed, 197 insertions(+), 109 deletions(-) diff --git a/simple/doc.go b/simple/doc.go index b84b59d8b..5db94f267 100644 --- a/simple/doc.go +++ b/simple/doc.go @@ -2,19 +2,20 @@ package simple var docS1000 = `Use plain channel send or receive -Select statements with a single case can be replaced with a simple send or receive. +Select statements with a single case can be replaced with a simple +send or receive. Before: -select { -case x := <-ch: - fmt.Println(x) -} + select { + case x := <-ch: + fmt.Println(x) + } After: -x := <-ch -fmt.Println(x) + x := <-ch + fmt.Println(x) Available since 2017.1 @@ -26,13 +27,13 @@ Use copy() for copying elements from one slice to another. Before: -for i, x := range src { - dst[i] = x -} + for i, x := range src { + dst[i] = x + } After: -copy(dst, src) + copy(dst, src) Available since 2017.1 @@ -42,11 +43,11 @@ var docS1002 = `Omit comparison with boolean constant Before: -if x == true {} + if x == true {} After: -if x {} + if x {} Available since 2017.1 @@ -56,11 +57,11 @@ var docS1003 = `Replace with strings.Contains Before: -if strings.Index(x, y) != -1 {} + if strings.Index(x, y) != -1 {} After: -if strings.Contains(x, y) {} + if strings.Contains(x, y) {} Available since 2017.1 @@ -70,11 +71,11 @@ var docS1004 = `Replace with bytes.Equal Before: -if bytes.Compare(x, y) == 0 {} + if bytes.Compare(x, y) == 0 {} After: -if bytes.Equal(x, y) {} + if bytes.Equal(x, y) {} Available since 2017.1 @@ -86,15 +87,15 @@ In many cases, assigning to the blank identifier is unnecessary. Before: -for _ = range s {} -x, _ = someMap[key] -_ = <-ch + for _ = range s {} + x, _ = someMap[key] + _ = <-ch After: -for range s{} -x = someMap[key] -<-ch + for range s{} + x = someMap[key] + <-ch Available since 2017.1 @@ -110,17 +111,20 @@ Available since var docS1007 = `Simplify regular expression by using raw string literal -Raw string literals use ` + "`" + ` instead of " and do not support any escape sequences. This means that the backslash (\) can be used freely, without the need of escaping. +Raw string literals use ` + "`" + ` instead of " and do not support +any escape sequences. This means that the backslash (\) can be used +freely, without the need of escaping. -Since regular expressions have their own escape sequences, raw strings can improve their readability. +Since regular expressions have their own escape sequences, raw strings +can improve their readability. Before: -regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z") + regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z") After: -regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `) + regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `) Available since 2017.1 @@ -130,14 +134,14 @@ var docS1008 = `Simplify returning boolean expression Before: -if { - return true -} -return false + if { + return true + } + return false After: -return + return Available since 2017.1 @@ -145,15 +149,17 @@ Available since var docS1009 = `Omit redundant nil check on slices -The len function is defined for all slices, even nil ones, which have a length of zero. It is not necessary to check if a slice is not nil before checking that its length is not zero. +The len function is defined for all slices, even nil ones, which have +a length of zero. It is not necessary to check if a slice is not nil +before checking that its length is not zero. Before: -if x != nil && len(x) != 0 {} + if x != nil && len(x) != 0 {} After: -if len(x) != 0 {} + if len(x) != 0 {} Available since 2017.1 @@ -161,7 +167,8 @@ Available since var docS1010 = `Omit default slice index -When slicing, the second index defaults to the length of the value, making s[n:len(s)] and s[n:] equivalent. +When slicing, the second index defaults to the length of the value, +making s[n:len(s)] and s[n:] equivalent. Available since 2017.1 @@ -171,13 +178,13 @@ var docS1011 = `Use a single append to concatenate two slices Before: -for _, e := range y { - x = append(x, e) -} + for _, e := range y { + x = append(x, e) + } After: -x = append(x, y...) + x = append(x, y...) Available since 2017.1 @@ -185,15 +192,16 @@ Available since var docS1012 = `Replace with time.Since(x) -The time.Since helper has the same effect as using time.Now().Sub(x) but is easier to read. +The time.Since helper has the same effect as using time.Now().Sub(x) +but is easier to read. Before: -time.Now().Sub(x) + time.Now().Sub(x) After: -time.Since(x) + time.Since(x) Available since 2017.1 @@ -201,20 +209,24 @@ Available since var docS1016 = `Use a type conversion -Two struct types with identical fields can be converted between each other. In older versions of Go, the fields had to have identical struct tags. Since Go 1.8, however, struct tags are ignored during conversions. It is thus not necessary to manually copy every field individually. +Two struct types with identical fields can be converted between each +other. In older versions of Go, the fields had to have identical +struct tags. Since Go 1.8, however, struct tags are ignored during +conversions. It is thus not necessary to manually copy every field +individually. Before: -var x T1 -y := T2{ - Field1: x.Field1, - Field2: x.Field2, -} + var x T1 + y := T2{ + Field1: x.Field1, + Field2: x.Field2, + } After: -var x T1 -y := T2(x) + var x T1 + y := T2(x) Available since 2017.1 @@ -222,17 +234,21 @@ Available since var docS1017 = `Replace with strings.TrimPrefix -Instead of using strings.HasPrefix and manual slicing, use the strings.TrimPrefix function. If the string doesn't start with the prefix, the original string will be returned. Using strings.TrimPrefix reduces complexity, and avoids common bugs, such as off-by-one mistakes. +Instead of using strings.HasPrefix and manual slicing, use the +strings.TrimPrefix function. If the string doesn't start with the +prefix, the original string will be returned. Using strings.TrimPrefix +reduces complexity, and avoids common bugs, such as off-by-one +mistakes. Before: -if strings.HasPrefix(str, prefix) { - str = str[len(prefix):] -} + if strings.HasPrefix(str, prefix) { + str = str[len(prefix):] + } After: -str = strings.TrimPrefix(str, prefix) + str = strings.TrimPrefix(str, prefix) Available since 2017.1 @@ -240,17 +256,19 @@ Available since var docS1018 = `Replace with copy() -copy() permits using the same source and destination slice, even with overlapping ranges. This makes it ideal for sliding elements in a slice. +copy() permits using the same source and destination slice, even with +overlapping ranges. This makes it ideal for sliding elements in a +slice. Before: -for i := 0; i < n; i++ { - bs[i] = bs[offset+i] -} + for i := 0; i < n; i++ { + bs[i] = bs[offset+i] + } After: -copy(bs[:n], bs[offset:]) + copy(bs[:n], bs[offset:]) Available since 2017.1 @@ -258,7 +276,9 @@ Available since var docS1019 = `Simplify make call -The make function has default values for the length and capacity arguments. For channels and maps, the length defaults to zero. Additionally, for slices the capacity defaults to the length. +The make function has default values for the length and capacity +arguments. For channels and maps, the length defaults to zero. +Additionally, for slices the capacity defaults to the length. Available since 2017.1 @@ -268,11 +288,11 @@ var docS1020 = `Omit redundant nil check in type assertion Before: -if _, ok := i.(T); ok && i != nil {} + if _, ok := i.(T); ok && i != nil {} After: -if _, ok := i.(T); ok {} + if _, ok := i.(T); ok {} Available since 2017.1 @@ -282,21 +302,24 @@ var docS1021 = `Merge variable declaration and assignment Before: -var x uint -x = 1 + var x uint + x = 1 After: -var x uint = 1 + var x uint = 1 Available since 2017.1 ` var docS1023 = `Omit redundant control flow -Functions that have no return value do not need a return statement as the final statement of the function. +Functions that have no return value do not need a return statement as +the final statement of the function. -Switches in Go do not have automatic fallthrough, unlike languages like C. It is not necessary to have a break statement as the final statement in a case block. +Switches in Go do not have automatic fallthrough, unlike languages +like C. It is not necessary to have a break statement as the final +statement in a case block. Available since 2017.1 @@ -304,15 +327,16 @@ Available since var docS1024 = `Replace with time.Until(x) -The time.Until helper has the same effect as using x.Sub(time.Now()) but is easier to read. +The time.Until helper has the same effect as using x.Sub(time.Now()) +but is easier to read. Before: -x.Sub(time.Now()) + x.Sub(time.Now()) After: -time.Until(x) + time.Until(x) Available since 2017.1 @@ -320,30 +344,33 @@ Available since var docS1025 = `Don't use fmt.Sprintf("%s", x) unnecessarily -In many instances, there are easier and more efficient ways of getting a value's string representation. Whenever a value's underlying type is a string already, or the type has a String method, they should be used directly. +In many instances, there are easier and more efficient ways of getting +a value's string representation. Whenever a value's underlying type is +a string already, or the type has a String method, they should be used +directly. Given the following shared definitions -type T1 string -type T2 int + type T1 string + type T2 int -func (T2) String() string { return "Hello, world" } + func (T2) String() string { return "Hello, world" } -var x string -var y T1 -var z T2 + var x string + var y T1 + var z T2 we can simplify the following -fmt.Sprintf("%s", x) -fmt.Sprintf("%s", y) -fmt.Sprintf("%s", z) + fmt.Sprintf("%s", x) + fmt.Sprintf("%s", y) + fmt.Sprintf("%s", z) to -x -string(y) -z.String() + x + string(y) + z.String() Available since 2017.1 @@ -353,11 +380,11 @@ var docS1028 = `replace with fmt.Errorf Before: -errors.New(fmt.Sprintf(...)) + errors.New(fmt.Sprintf(...)) After: -fmt.Errorf(...) + fmt.Errorf(...) Available since 2017.1 @@ -365,15 +392,19 @@ Available since var docS1029 = `Range over the string -Ranging over a string will yield byte offsets and runes. If the offset isn't used, this is functionally equivalent to converting the string to a slice of runes and ranging over that. Ranging directly over the string will be more performant, however, as it avoids allocating a new slice, the size of which depends on the length of the string. +Ranging over a string will yield byte offsets and runes. If the offset +isn't used, this is functionally equivalent to converting the string +to a slice of runes and ranging over that. Ranging directly over the +string will be more performant, however, as it avoids allocating a new +slice, the size of which depends on the length of the string. Before: -for _, r := range []rune(s) {} + for _, r := range []rune(s) {} After: -for _, r := range s {} + for _, r := range s {} Available since 2017.1 @@ -381,7 +412,9 @@ Available since var docS1030 = `Use bytes.Buffer.String or bytes.Buffer.Bytes -bytes.Buffer has both a String and a Bytes method. It is never necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply use the other method. +bytes.Buffer has both a String and a Bytes method. It is never +necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply +use the other method. Available since 2017.1 @@ -389,21 +422,23 @@ Available since var docS1031 = `Omit redundant nil check around loop -You can use range on nil slices and maps, the loop will simply never execute. This makes an additional nil check around the loop unnecessary. +You can use range on nil slices and maps, the loop will simply never +execute. This makes an additional nil check around the loop +unnecessary. Before: -if s != nil { - for _, x := range s { - ... - } -} + if s != nil { + for _, x := range s { + ... + } + } After: -for _, x := range s { - ... -} + for _, x := range s { + ... + } Available since 2017.1 @@ -411,15 +446,17 @@ Available since var docS1032 = `Replace with sort.Ints(x), sort.Float64s(x), sort.Strings(x) -The sort.Ints, sort.Float64s and sort.Strings functions are easier to read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) and sort.Sort(sort.StringSlice(x)). +The sort.Ints, sort.Float64s and sort.Strings functions are easier to +read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) +and sort.Sort(sort.StringSlice(x)). Before: -sort.Sort(sort.StringSlice(x)) + sort.Sort(sort.StringSlice(x)) After: -sort.Strings(x) + sort.Strings(x) Available since 2019.1 @@ -427,12 +464,14 @@ Available since var docS1033 = `Unnecessary guard around call to delete +Calling delete on a nil map is a no-op. + Available since: - Unreleased + Unreleased ` var docS1034 = `Use result of type assertion to simplify cases Available since: - Unreleased + Unreleased ` diff --git a/staticcheck/doc.go b/staticcheck/doc.go index e0153e210..c5776d2ff 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -109,6 +109,27 @@ Available since var docSA1008 = `Non-canonical key in http.Header map +Keys in http.Header maps are canonical, meaning they follow a specific +combination of uppercase and lowercase letters. Methods such as +http.Header.Add and http.Header.Del convert inputs into this canonical +form before manipulating the map. + +When manipulating http.Header maps directly, as opposed to using the +provided methods, care should be taken to stick to canonical form in +order to avoid inconsistencies. The following piece of code +demonstrates one such inconsistency: + + h := http.Header{} + h["etag"] = []string{"1234"} + h.Add("etag", "5678") + fmt.Println(h) + + // Output: + // map[Etag:[5678] etag:[1234]] + +The easiest way of obtaining the canonical form of a key is to use +http.CanonicalHeaderKey. + Available since 2017.1 ` @@ -218,13 +239,24 @@ Available since 2017.1 ` -var docSA1024 = `A string cutset contains duplicate characters, suggesting TrimPrefix or TrimSuffix should be used instead of TrimLeft or TrimRight +var docSA1024 = `A string cutset contains duplicate characters + +The strings.TrimLeft and strings.TrimRight functions take cutsets, not +prefixes. A cutset is treated as a set of characters to remove from a +string. For example, + + strings.TrimLeft("42133word", "1234")) + +will result in the string "word" – any characters that are 1, 2, 3 or +4 are cut from the left of the string. + +In order to remove one string from another, use strings.TrimPrefix instead. Available since 2017.1 ` -var docSA1025 = `It is not possible to use Reset's return value correctly +var docSA1025 = `It is not possible to use (*time.Timer).Reset's return value correctly Available since 2019.1 @@ -258,6 +290,23 @@ Available since var docSA2001 = `Empty critical section, did you mean to defer the unlock? +Empty critical sections of the kind + + mu.Lock() + mu.Unlock() + +are very often a typo, and the following was intended instead: + + mu.Lock() + defer mu.Unlock() + +Do note that sometimes empty critical sections can be useful, as a +form of signaling to wait on another goroutine. Many times, there are +simpler ways of achieving the same effect. When that isn't the case, +the code should be amply commented to avoid confusion. Combining such +comments with a //lint:ignore directive can be used to suppress this +rare false positive. + Available since 2017.1 ` @@ -570,13 +619,13 @@ Available since var docSA5008 = `Invalid struct tag Available since - Unreleased + Unreleased ` var docSA5009 = `Invalid Printf call Available since - Unreleased + Unreleased ` var docSA6000 = `Using regexp.Match or related in a loop, should use regexp.Compile From e463bee4c9092b7f4d80cf97b0364e7c2f2201ef Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 10:27:37 +0200 Subject: [PATCH 134/254] lint: trim the cache --- lint/runner.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lint/runner.go b/lint/runner.go index f32f63969..3904bbbdf 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -310,6 +310,8 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy return nil, err } + defer r.cache.Trim() + m := map[*packages.Package]*Package{} packages.Visit(loaded, nil, func(l *packages.Package) { m[l] = &Package{ From 9ab68137a2a2ef74252ad7c21eea8fca8bedde10 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 10:31:01 +0200 Subject: [PATCH 135/254] lint: don't crash because of nil errors Fixes a regression introduced in 7e48d76e72a953e942c2ca6accec8259e3e3f371 --- lint/runner.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lint/runner.go b/lint/runner.go index 3904bbbdf..c3c4a3ab0 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -556,6 +556,9 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { depErrors := map[dependencyError]int{} for _, err := range errs { + if err == nil { + continue + } switch err := err.(type) { case analysisError: switch err := err.err.(type) { From 22f090dc18d8c66f7fd20c27c73c538257f68cb5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 11:47:22 +0200 Subject: [PATCH 136/254] Minor documentation tweaks --- simple/doc.go | 4 ++-- staticcheck/doc.go | 13 ++++++------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/simple/doc.go b/simple/doc.go index 5db94f267..09d3d4e34 100644 --- a/simple/doc.go +++ b/simple/doc.go @@ -466,12 +466,12 @@ var docS1033 = `Unnecessary guard around call to delete Calling delete on a nil map is a no-op. -Available since: +Available since Unreleased ` var docS1034 = `Use result of type assertion to simplify cases -Available since: +Available since Unreleased ` diff --git a/staticcheck/doc.go b/staticcheck/doc.go index c5776d2ff..17682a795 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -21,7 +21,7 @@ Available since var docSA1003 = `Unsupported argument to functions in encoding/binary The encoding/binary package can only serialize types with known sizes. -This precludes the use of the 'int' and 'uint' types, as their sizes +This precludes the use of the int and uint types, as their sizes differ on different architectures. Furthermore, it doesn't support serializing maps, channels, strings, or functions. @@ -43,7 +43,7 @@ large durations. These can be combined with arithmetic to express arbitrary durations, for example '5 * time.Second' for 5 seconds. If you truly meant to sleep for a tiny amount of time, use -'n * time.Nanosecond" to signal to staticcheck that you did mean to sleep +'n * time.Nanosecond' to signal to staticcheck that you did mean to sleep for some amount of nanoseconds. Available since @@ -86,11 +86,11 @@ enter a string such as and you printed it with -fmt.Printf(s) + fmt.Printf(s) it would lead to the following output: - Interest rate: 5%!(NOVERB). + Interest rate: 5%!(NOVERB). Similarly, forming the first parameter via string concatenation with user input should be avoided for the same reason. When printing user @@ -193,7 +193,6 @@ avoid missing signals, the channel should be buffered and of the appropriate size. For a channel used for notification of just one signal value, a buffer of size 1 is sufficient. - Available since 2017.1 ` @@ -734,7 +733,7 @@ Available since Unreleased ` -var docSA9001 = `Defers in 'for range' loops may not run when you expect them to +var docSA9001 = `Defers in range loops may not run when you expect them to Available since 2017.1 @@ -790,7 +789,7 @@ Wrong type in variable declarations The most obvious issue with such incorrect enumerations expresses itself as a compile error: -package pkg + package pkg const ( EnumFirst uint8 = 1 From f38cff83b434f244ef71f3b7582e2da3f0247fd4 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 23:55:37 +0200 Subject: [PATCH 137/254] unused: actually keep track of packages --- unused/unused.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/unused/unused.go b/unused/unused.go index 7b3433115..454c78e53 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -424,6 +424,7 @@ func NewChecker() *Checker { c := &Checker{ seen: map[token.Position]struct{}{}, initialPackages: map[*types.Package]struct{}{}, + allPackages: map[*types.Package]struct{}{}, } return c @@ -456,6 +457,7 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { visit(imp) } } + visit(pass.Pkg) c.fset = pass.Fset ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) From ff79159081df1cd46f4058a3f4f2bf4285e68ba6 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 27 Apr 2019 23:58:46 +0200 Subject: [PATCH 138/254] unused: don't crash in whole-program mode if no packages were analyzed --- unused/unused.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/unused/unused.go b/unused/unused.go index 454c78e53..279621614 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -591,6 +591,10 @@ func (c *Checker) results() []types.Object { var out []types.Object if c.WholeProgram { + if c.graph == nil { + // We never analyzed any packages + return nil + } var ifaces []*types.Interface var notIfaces []types.Type From 8ac70338216ce311653321f572d5ad13934fa0de Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 1 May 2019 08:56:58 +0200 Subject: [PATCH 139/254] lint: isolate facts of different analyses --- lint/runner.go | 354 ++++++++++++++++++++++++++++++++++--------------- 1 file changed, 249 insertions(+), 105 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index c3c4a3ab0..6ceab9e94 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -32,13 +32,16 @@ type Package struct { hash string resultsMu sync.Mutex - results map[*analysis.Analyzer]*result + results []*result cfg *config.Config gen map[string]bool problems []Problem ignores []Ignore errs []error + + facts []map[types.Object][]analysis.Fact + pkgFacts [][]analysis.Fact } type result struct { @@ -55,20 +58,72 @@ type Runner struct { ld loader.Loader cache *cache.Cache - factsMu sync.RWMutex + builtMu sync.Mutex + built map[*Package]*buildResult + + analyzerIDs analyzerIDs +} + +type analyzerIDs struct { + m map[*analysis.Analyzer]int +} + +func (ids analyzerIDs) get(a *analysis.Analyzer) int { + n, ok := ids.m[a] + if !ok { + panic(fmt.Sprintf("no ID for analyzer %s", a)) + } + return n +} + +type Fact struct { + Path string + Fact analysis.Fact +} + +type newFact struct { + obj types.Object + fact analysis.Fact +} + +type analysisAction struct { + analyzer *analysis.Analyzer + pkg *Package + newFacts []newFact + problems []Problem + facts map[types.Object][]analysis.Fact pkgFacts map[*types.Package][]analysis.Fact +} - builtMu sync.Mutex - built map[*Package]*buildResult +func (ac *analysisAction) allObjectFacts() []analysis.ObjectFact { + out := make([]analysis.ObjectFact, 0, len(ac.facts)) + for obj, facts := range ac.facts { + for _, fact := range facts { + out = append(out, analysis.ObjectFact{ + Object: obj, + Fact: fact, + }) + } + } + return out +} + +func (ac *analysisAction) allPackageFacts() []analysis.PackageFact { + out := make([]analysis.PackageFact, 0, len(ac.pkgFacts)) + for pkg, facts := range ac.pkgFacts { + for _, fact := range facts { + out = append(out, analysis.PackageFact{ + Package: pkg, + Fact: fact, + }) + } + } + return out } -func (r *Runner) importObjectFact(obj types.Object, fact analysis.Fact) bool { - r.factsMu.RLock() - defer r.factsMu.RUnlock() - // OPT(dh): consider looking for the fact in the analysisAction - // first, to avoid lock contention - for _, f := range r.facts[obj] { +func (ac *analysisAction) importObjectFact(obj types.Object, fact analysis.Fact) bool { + for _, f := range ac.facts[obj] { if reflect.TypeOf(f) == reflect.TypeOf(fact) { reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) return true @@ -77,10 +132,8 @@ func (r *Runner) importObjectFact(obj types.Object, fact analysis.Fact) bool { return false } -func (r *Runner) importPackageFact(pkg *types.Package, fact analysis.Fact) bool { - r.factsMu.RLock() - defer r.factsMu.RUnlock() - for _, f := range r.pkgFacts[pkg] { +func (ac *analysisAction) importPackageFact(pkg *types.Package, fact analysis.Fact) bool { + for _, f := range ac.pkgFacts[pkg] { if reflect.TypeOf(f) == reflect.TypeOf(fact) { reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) return true @@ -89,33 +142,14 @@ func (r *Runner) importPackageFact(pkg *types.Package, fact analysis.Fact) bool return false } -func (r *Runner) exportObjectFact(ac *analysisAction, obj types.Object, fact analysis.Fact) { - r.factsMu.Lock() - r.facts[obj] = append(r.facts[obj], fact) - r.factsMu.Unlock() - path, err := objectpath.For(obj) - if err == nil { - ac.newFacts = append(ac.newFacts, Fact{string(path), fact}) - } -} - -func (r *Runner) exportPackageFact(ac *analysisAction, fact analysis.Fact) { - r.factsMu.Lock() - r.pkgFacts[ac.pkg.Types] = append(r.pkgFacts[ac.pkg.Types], fact) - r.factsMu.Unlock() - ac.newFacts = append(ac.newFacts, Fact{"", fact}) -} - -type Fact struct { - Path string - Fact analysis.Fact +func (ac *analysisAction) exportObjectFact(obj types.Object, fact analysis.Fact) { + ac.facts[obj] = append(ac.facts[obj], fact) + ac.newFacts = append(ac.newFacts, newFact{obj, fact}) } -type analysisAction struct { - analyzer *analysis.Analyzer - pkg *Package - newFacts []Fact - problems []Problem +func (ac *analysisAction) exportPackageFact(fact analysis.Fact) { + ac.pkgFacts[ac.pkg.Types] = append(ac.pkgFacts[ac.pkg.Types], fact) + ac.newFacts = append(ac.newFacts, newFact{nil, fact}) } func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) { @@ -129,7 +163,7 @@ func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) { func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) { ac.pkg.resultsMu.Lock() - res := ac.pkg.results[ac.analyzer] + res := ac.pkg.results[r.analyzerIDs.get(ac.analyzer)] if res != nil { ac.pkg.resultsMu.Unlock() <-res.ready @@ -138,7 +172,7 @@ func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) { res = &result{ ready: make(chan struct{}), } - ac.pkg.results[ac.analyzer] = res + ac.pkg.results[r.analyzerIDs.get(ac.analyzer)] = res ac.pkg.resultsMu.Unlock() defer func() { @@ -163,17 +197,15 @@ func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) { TypesInfo: ac.pkg.TypesInfo, TypesSizes: ac.pkg.TypesSizes, ResultOf: map[*analysis.Analyzer]interface{}{}, - ImportObjectFact: r.importObjectFact, - ImportPackageFact: r.importPackageFact, - ExportObjectFact: func(obj types.Object, fact analysis.Fact) { - r.exportObjectFact(ac, obj, fact) - }, - ExportPackageFact: func(fact analysis.Fact) { - r.exportPackageFact(ac, fact) - }, + ImportObjectFact: ac.importObjectFact, + ImportPackageFact: ac.importPackageFact, + ExportObjectFact: ac.exportObjectFact, + ExportPackageFact: ac.exportPackageFact, Report: func(d analysis.Diagnostic) { ac.report(pass, d) }, + AllObjectFacts: ac.allObjectFacts, + AllPackageFacts: ac.allPackageFacts, } if !ac.pkg.initial { @@ -228,6 +260,36 @@ func (err dependencyError) Error() string { return fmt.Sprintf("error running dependency %s: %s", err.dep, err.err) } +func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysisAction { + ac := &analysisAction{ + analyzer: a, + pkg: pkg, + facts: map[types.Object][]analysis.Fact{}, + pkgFacts: map[*types.Package][]analysis.Fact{}, + } + + seen := map[*Package]struct{}{} + var dfs func(*Package) + // OPT(dh): ideally, we'd merge facts when creating the Packages + dfs = func(pkg *Package) { + if _, ok := seen[pkg]; ok { + return + } + seen[pkg] = struct{}{} + for obj, facts := range pkg.facts[r.analyzerIDs.get(a)] { + ac.facts[obj] = facts + } + // XXX copy + ac.pkgFacts[pkg.Types] = pkg.pkgFacts[r.analyzerIDs.get(a)] + for _, imp := range pkg.Imports { + dfs(imp) + } + } + dfs(pkg) + + return ac +} + func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) { if !ac.pkg.fromSource { panic(fmt.Sprintf("internal error: %s was not loaded from source", ac.pkg)) @@ -245,7 +307,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter req = append(req, IsGeneratedAnalyzer, config.Analyzer) } for _, req := range req { - acReq := &analysisAction{analyzer: req, pkg: ac.pkg} + acReq := r.makeAnalysisAction(req, ac.pkg) ret, err := r.runAnalysis(acReq) if err != nil { // We couldn't run a dependency, no point in going on @@ -261,10 +323,32 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter return nil, err } + for _, fact := range ac.newFacts { + if fact.obj == nil { + id := r.analyzerIDs.get(ac.analyzer) + ac.pkg.pkgFacts[id] = append(ac.pkg.pkgFacts[id], fact.fact) + } else { + m := ac.pkg.facts[r.analyzerIDs.get(ac.analyzer)] + m[fact.obj] = append(m[fact.obj], fact.fact) + } + } + // Persist facts to cache if len(ac.analyzer.FactTypes) > 0 { + facts := make([]Fact, 0, len(ac.newFacts)) + for _, fact := range ac.newFacts { + if fact.obj == nil { + facts = append(facts, Fact{"", fact.fact}) + } else { + path, err := objectpath.For(fact.obj) + if err != nil { + continue + } + facts = append(facts, Fact{string(path), fact.fact}) + } + } buf := &bytes.Buffer{} - if err := gob.NewEncoder(buf).Encode(ac.newFacts); err != nil { + if err := gob.NewEncoder(buf).Encode(facts); err != nil { return nil, err } aID, err := passActionID(ac.pkg, ac.analyzer) @@ -287,18 +371,32 @@ func NewRunner() (*Runner, error) { } return &Runner{ - cache: cache, - facts: map[types.Object][]analysis.Fact{}, - pkgFacts: map[*types.Package][]analysis.Fact{}, - built: map[*Package]*buildResult{}, + cache: cache, + built: map[*Package]*buildResult{}, }, nil } func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer) ([]*Package, error) { - for _, a := range analyzers { + r.analyzerIDs = analyzerIDs{m: map[*analysis.Analyzer]int{}} + id := 0 + seen := map[*analysis.Analyzer]struct{}{} + var dfs func(a *analysis.Analyzer) + dfs = func(a *analysis.Analyzer) { + if _, ok := seen[a]; ok { + return + } + seen[a] = struct{}{} + r.analyzerIDs.m[a] = id + id++ for _, f := range a.FactTypes { gob.Register(f) } + for _, req := range a.Requires { + dfs(req) + } + } + for _, a := range analyzers { + dfs(a) } var dcfg packages.Config @@ -315,9 +413,14 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy m := map[*packages.Package]*Package{} packages.Visit(loaded, nil, func(l *packages.Package) { m[l] = &Package{ - Package: l, - Imports: map[string]*Package{}, - results: map[*analysis.Analyzer]*result{}, + Package: l, + Imports: map[string]*Package{}, + results: make([]*result, len(r.analyzerIDs.m)), + facts: make([]map[types.Object][]analysis.Fact, len(r.analyzerIDs.m)), + pkgFacts: make([][]analysis.Fact, len(r.analyzerIDs.m)), + } + for i := range m[l].facts { + m[l].facts[i] = map[types.Object][]analysis.Fact{} } for _, err := range l.Errors { m[l].errs = append(m[l].errs, err) @@ -385,27 +488,14 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { if pkg.Types != nil { panic(fmt.Sprintf("internal error: %s has already been loaded", pkg.Package)) } - // Load type information - if pkg.initial { - // Load package from source - pkg.fromSource = true - return r.ld.LoadFromSource(pkg.Package) - } - var allFacts []Fact - failed := false for _, a := range analyzers { - if len(a.FactTypes) > 0 { - facts, ok := r.loadCachedFacts(a, pkg) - if !ok { - failed = true - break - } - allFacts = append(allFacts, facts...) - } + pkg.facts[r.analyzerIDs.get(a)] = map[types.Object][]analysis.Fact{} } - if failed { + // Load type information + if pkg.initial { + // Load package from source pkg.fromSource = true return r.ld.LoadFromSource(pkg.Package) } @@ -420,6 +510,9 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { // get the compile errors. If loading from source succeeds // we discard the result, anyway. Otherwise we'll fail // when trying to reload from export data later. + // + // FIXME(dh): we no longer reload from export data, so + // theoretically we should be able to continue pkg.fromSource = true if err := r.ld.LoadFromSource(pkg.Package); err != nil { return err @@ -433,33 +526,61 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { return fmt.Errorf("could not load export data: %s", err) } - for _, f := range allFacts { - if f.Path == "" { - // This is a package fact - r.factsMu.Lock() - r.pkgFacts[pkg.Types] = append(r.pkgFacts[pkg.Types], f.Fact) - r.factsMu.Unlock() - continue + failed := false + seen := map[*analysis.Analyzer]struct{}{} + var dfs func(*analysis.Analyzer) + dfs = func(a *analysis.Analyzer) { + if _, ok := seen[a]; ok { + return } - obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.Path)) - if err != nil { - // Be lenient about these errors. For example, when - // analysing io/ioutil from source, we may get a fact - // for methods on the devNull type, and objectpath - // will happily create a path for them. However, when - // we later load io/ioutil from export data, the path - // no longer resolves. - // - // If an exported type embeds the unexported type, - // then (part of) the unexported type will become part - // of the type information and our path will resolve - // again. - continue + seen[a] = struct{}{} + + if len(a.FactTypes) > 0 { + facts, ok := r.loadCachedFacts(a, pkg) + if !ok { + failed = true + return + } + + for _, f := range facts { + if f.Path == "" { + // This is a package fact + pkg.pkgFacts[r.analyzerIDs.get(a)] = append(pkg.pkgFacts[r.analyzerIDs.get(a)], f.Fact) + continue + } + obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.Path)) + if err != nil { + // Be lenient about these errors. For example, when + // analysing io/ioutil from source, we may get a fact + // for methods on the devNull type, and objectpath + // will happily create a path for them. However, when + // we later load io/ioutil from export data, the path + // no longer resolves. + // + // If an exported type embeds the unexported type, + // then (part of) the unexported type will become part + // of the type information and our path will resolve + // again. + continue + } + pkg.facts[r.analyzerIDs.get(a)][obj] = append(pkg.facts[r.analyzerIDs.get(a)][obj], f.Fact) + } } - r.factsMu.Lock() - r.facts[obj] = append(r.facts[obj], f.Fact) - r.factsMu.Unlock() + + for _, req := range a.Requires { + dfs(req) + } + } + for _, a := range analyzers { + dfs(a) } + + if failed { + pkg.fromSource = true + // XXX we added facts to the maps, we need to get rid of those + return r.ld.LoadFromSource(pkg.Package) + } + return nil } @@ -538,13 +659,13 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { for i, a := range analyzers { i := i a := a - ac := &analysisAction{analyzer: a, pkg: pkg} + ac := r.makeAnalysisAction(a, pkg) acs = append(acs, ac) go func() { defer wg.Done() // Only initial packages and packages with missing // facts will have been loaded from source. - if pkg.initial || len(a.FactTypes) > 0 { + if pkg.initial || hasFacts(a) { if _, err := r.runAnalysis(ac); err != nil { errs[i] = analysisError{a, pkg, err} return @@ -584,10 +705,10 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { for _, ac := range acs { pkg.problems = append(pkg.problems, ac.problems...) } - if pkg.results[config.Analyzer].v != nil { - pkg.cfg = pkg.results[config.Analyzer].v.(*config.Config) + if pkg.results[r.analyzerIDs.get(config.Analyzer)].v != nil { + pkg.cfg = pkg.results[r.analyzerIDs.get(config.Analyzer)].v.(*config.Config) } - pkg.gen = pkg.results[IsGeneratedAnalyzer].v.(map[string]bool) + pkg.gen = pkg.results[r.analyzerIDs.get(IsGeneratedAnalyzer)].v.(map[string]bool) // In a previous version of the code, we would throw away all type // information and reload it from export data. That was @@ -597,6 +718,29 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { // from processPkg. } +func hasFacts(a *analysis.Analyzer) bool { + ret := false + seen := map[*analysis.Analyzer]struct{}{} + var dfs func(*analysis.Analyzer) + dfs = func(a *analysis.Analyzer) { + if _, ok := seen[a]; ok { + return + } + seen[a] = struct{}{} + if len(a.FactTypes) > 0 { + ret = true + } + for _, req := range a.Requires { + if ret { + break + } + dfs(req) + } + } + dfs(a) + return ret +} + func parseDirective(s string) (cmd string, args []string) { if !strings.HasPrefix(s, "//lint:") { return "", nil From e0a8563c560e67e5449c3d1609a3c13a01af96c9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 1 May 2019 12:03:49 +0200 Subject: [PATCH 140/254] lint: analyse dependencies for transitive facts --- lint/runner.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 6ceab9e94..eff2d4649 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -527,13 +527,13 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { } failed := false - seen := map[*analysis.Analyzer]struct{}{} + seen := make([]bool, len(r.analyzerIDs.m)) var dfs func(*analysis.Analyzer) dfs = func(a *analysis.Analyzer) { - if _, ok := seen[a]; ok { + if seen[r.analyzerIDs.get(a)] { return } - seen[a] = struct{}{} + seen[r.analyzerIDs.get(a)] = true if len(a.FactTypes) > 0 { facts, ok := r.loadCachedFacts(a, pkg) @@ -665,7 +665,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { defer wg.Done() // Only initial packages and packages with missing // facts will have been loaded from source. - if pkg.initial || hasFacts(a) { + if pkg.initial || r.hasFacts(a) { if _, err := r.runAnalysis(ac); err != nil { errs[i] = analysisError{a, pkg, err} return @@ -718,15 +718,15 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { // from processPkg. } -func hasFacts(a *analysis.Analyzer) bool { +func (r *Runner) hasFacts(a *analysis.Analyzer) bool { ret := false - seen := map[*analysis.Analyzer]struct{}{} + seen := make([]bool, len(r.analyzerIDs.m)) var dfs func(*analysis.Analyzer) dfs = func(a *analysis.Analyzer) { - if _, ok := seen[a]; ok { + if seen[r.analyzerIDs.get(a)] { return } - seen[a] = struct{}{} + seen[r.analyzerIDs.get(a)] = true if len(a.FactTypes) > 0 { ret = true } From c8a52c4546d626c5172eac48d6beac7f4d45db58 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 1 May 2019 12:04:08 +0200 Subject: [PATCH 141/254] lint: copy package facts into analysisAction --- lint/runner.go | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index eff2d4649..0c9e3c99a 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -24,6 +24,14 @@ import ( "honnef.co/go/tools/loader" ) +// OPT(dh): for a dependency tree A->B->C->D, if we have cached data +// for B, there should be no need to load C and D individually. Go's +// export data for B contains all the data we need on types, and our +// fact cache could store the union of B, C and D in B. +// +// This may change unused's behavior, however, as it may observe fewer +// interfaces from transitive dependencies. + type Package struct { *packages.Package Imports map[string]*Package @@ -279,8 +287,8 @@ func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysi for obj, facts := range pkg.facts[r.analyzerIDs.get(a)] { ac.facts[obj] = facts } - // XXX copy - ac.pkgFacts[pkg.Types] = pkg.pkgFacts[r.analyzerIDs.get(a)] + ac.pkgFacts[pkg.Types] = make([]analysis.Fact, len(pkg.pkgFacts[r.analyzerIDs.get(a)])) + copy(ac.pkgFacts[pkg.Types], pkg.pkgFacts[r.analyzerIDs.get(a)]) for _, imp := range pkg.Imports { dfs(imp) } From 2f66d383cd5ca26da9ee5410b7f99cf67d69d09e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 2 May 2019 10:17:38 +0200 Subject: [PATCH 142/254] lint: clean up revised fact handling --- lint/runner.go | 66 ++++++++++++++++++++++++++++++++++---------------- 1 file changed, 45 insertions(+), 21 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 0c9e3c99a..2c12c015e 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -48,6 +48,7 @@ type Package struct { ignores []Ignore errs []error + // these slices are indexed by analysis facts []map[types.Object][]analysis.Fact pkgFacts [][]analysis.Fact } @@ -77,16 +78,13 @@ type analyzerIDs struct { } func (ids analyzerIDs) get(a *analysis.Analyzer) int { - n, ok := ids.m[a] - if !ok { - panic(fmt.Sprintf("no ID for analyzer %s", a)) - } - return n + return ids.m[a] } type Fact struct { - Path string - Fact analysis.Fact + PkgPath string + ObjPath string + Fact analysis.Fact } type newFact struct { @@ -276,19 +274,22 @@ func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysi pkgFacts: map[*types.Package][]analysis.Fact{}, } + // Populate facts in analysisAction with COW versions of the facts + // stored in the package. A package's initial set of facts for an + // analysis is the union of all the facts for that analysis + // produced on the package's dependencies. + for obj, facts := range pkg.facts[r.analyzerIDs.get(a)] { + ac.facts[obj] = facts[0:len(facts):len(facts)] + } seen := map[*Package]struct{}{} var dfs func(*Package) - // OPT(dh): ideally, we'd merge facts when creating the Packages dfs = func(pkg *Package) { if _, ok := seen[pkg]; ok { return } seen[pkg] = struct{}{} - for obj, facts := range pkg.facts[r.analyzerIDs.get(a)] { - ac.facts[obj] = facts - } - ac.pkgFacts[pkg.Types] = make([]analysis.Fact, len(pkg.pkgFacts[r.analyzerIDs.get(a)])) - copy(ac.pkgFacts[pkg.Types], pkg.pkgFacts[r.analyzerIDs.get(a)]) + s := pkg.pkgFacts[r.analyzerIDs.get(a)] + ac.pkgFacts[pkg.Types] = s[0:len(s):len(s)] for _, imp := range pkg.Imports { dfs(imp) } @@ -331,6 +332,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter return nil, err } + // Merge new facts into the package. for _, fact := range ac.newFacts { if fact.obj == nil { id := r.analyzerIDs.get(ac.analyzer) @@ -346,13 +348,13 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter facts := make([]Fact, 0, len(ac.newFacts)) for _, fact := range ac.newFacts { if fact.obj == nil { - facts = append(facts, Fact{"", fact.fact}) + facts = append(facts, Fact{fact.obj.Pkg().Path(), "", fact.fact}) } else { path, err := objectpath.For(fact.obj) if err != nil { continue } - facts = append(facts, Fact{string(path), fact.fact}) + facts = append(facts, Fact{fact.obj.Pkg().Path(), string(path), fact.fact}) } } buf := &bytes.Buffer{} @@ -492,15 +494,18 @@ func parsePos(pos string) token.Position { } } +// loadPkg loads a Go package. If the package is in the set of initial +// packages, it will be loaded from source, otherwise it will be +// loaded from export data. In the case that the package was loaded +// from export data, cached facts will also be loaded. +// +// Currently, only cached facts for this package will be loaded, not +// for any of its dependencies. func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { if pkg.Types != nil { panic(fmt.Sprintf("internal error: %s has already been loaded", pkg.Package)) } - for _, a := range analyzers { - pkg.facts[r.analyzerIDs.get(a)] = map[types.Object][]analysis.Fact{} - } - // Load type information if pkg.initial { // Load package from source @@ -551,12 +556,19 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { } for _, f := range facts { - if f.Path == "" { + if pkg.PkgPath != f.PkgPath { + // TODO(dh): for now we load all packages in the + // dependency graph and don't utilize the fact + // that a package contains all the facts of its + // dependencies. + continue + } + if f.ObjPath == "" { // This is a package fact pkg.pkgFacts[r.analyzerIDs.get(a)] = append(pkg.pkgFacts[r.analyzerIDs.get(a)], f.Fact) continue } - obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.Path)) + obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.ObjPath)) if err != nil { // Be lenient about these errors. For example, when // analysing io/ioutil from source, we may get a fact @@ -602,6 +614,9 @@ func (err analysisError) Error() string { return fmt.Sprintf("error running analyzer %s on %s: %s", err.analyzer, err.pkg, err.err) } +// processPkg processes a package. This involves loading the package, +// either from export data or from source. For packages loaded from +// source, the provides analyzers will be run on the package. func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { r.builtMu.Lock() res := r.built[pkg] @@ -654,6 +669,15 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { return } + // A package's object facts is the union of all of its dependencies. + for _, imp := range pkg.Imports { + for ai, m := range imp.facts { + for obj, facts := range m { + pkg.facts[ai][obj] = facts[0:len(facts):len(facts)] + } + } + } + if !pkg.fromSource { // Nothing left to do for the package. return From 6b07219ee0adbb89e84d3cbd05a1a1d02cbba7d8 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 2 May 2019 10:24:12 +0200 Subject: [PATCH 143/254] lint: simplify fact handling further --- lint/runner.go | 41 +++++++++++++++++------------------------ 1 file changed, 17 insertions(+), 24 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 2c12c015e..eab1d995d 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -93,18 +93,18 @@ type newFact struct { } type analysisAction struct { - analyzer *analysis.Analyzer - pkg *Package - newFacts []newFact - problems []Problem + analyzer *analysis.Analyzer + analyzerID int + pkg *Package + newFacts []newFact + problems []Problem - facts map[types.Object][]analysis.Fact pkgFacts map[*types.Package][]analysis.Fact } func (ac *analysisAction) allObjectFacts() []analysis.ObjectFact { - out := make([]analysis.ObjectFact, 0, len(ac.facts)) - for obj, facts := range ac.facts { + out := make([]analysis.ObjectFact, 0, len(ac.pkg.facts[ac.analyzerID])) + for obj, facts := range ac.pkg.facts[ac.analyzerID] { for _, fact := range facts { out = append(out, analysis.ObjectFact{ Object: obj, @@ -129,7 +129,7 @@ func (ac *analysisAction) allPackageFacts() []analysis.PackageFact { } func (ac *analysisAction) importObjectFact(obj types.Object, fact analysis.Fact) bool { - for _, f := range ac.facts[obj] { + for _, f := range ac.pkg.facts[ac.analyzerID][obj] { if reflect.TypeOf(f) == reflect.TypeOf(fact) { reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) return true @@ -149,8 +149,7 @@ func (ac *analysisAction) importPackageFact(pkg *types.Package, fact analysis.Fa } func (ac *analysisAction) exportObjectFact(obj types.Object, fact analysis.Fact) { - ac.facts[obj] = append(ac.facts[obj], fact) - ac.newFacts = append(ac.newFacts, newFact{obj, fact}) + ac.pkg.facts[ac.analyzerID][obj] = append(ac.pkg.facts[ac.analyzerID][obj], fact) } func (ac *analysisAction) exportPackageFact(fact analysis.Fact) { @@ -267,20 +266,15 @@ func (err dependencyError) Error() string { } func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysisAction { + aid := r.analyzerIDs.get(a) ac := &analysisAction{ - analyzer: a, - pkg: pkg, - facts: map[types.Object][]analysis.Fact{}, - pkgFacts: map[*types.Package][]analysis.Fact{}, + analyzer: a, + analyzerID: aid, + pkg: pkg, + pkgFacts: map[*types.Package][]analysis.Fact{}, } - // Populate facts in analysisAction with COW versions of the facts - // stored in the package. A package's initial set of facts for an - // analysis is the union of all the facts for that analysis - // produced on the package's dependencies. - for obj, facts := range pkg.facts[r.analyzerIDs.get(a)] { - ac.facts[obj] = facts[0:len(facts):len(facts)] - } + // Merge all package facts of dependencies seen := map[*Package]struct{}{} var dfs func(*Package) dfs = func(pkg *Package) { @@ -288,7 +282,7 @@ func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysi return } seen[pkg] = struct{}{} - s := pkg.pkgFacts[r.analyzerIDs.get(a)] + s := pkg.pkgFacts[aid] ac.pkgFacts[pkg.Types] = s[0:len(s):len(s)] for _, imp := range pkg.Imports { dfs(imp) @@ -338,8 +332,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter id := r.analyzerIDs.get(ac.analyzer) ac.pkg.pkgFacts[id] = append(ac.pkg.pkgFacts[id], fact.fact) } else { - m := ac.pkg.facts[r.analyzerIDs.get(ac.analyzer)] - m[fact.obj] = append(m[fact.obj], fact.fact) + panic("unexpected new object fact") } } From 22a174b906af9ef0bcd42bbede1f3a5c035c0910 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 2 May 2019 10:27:30 +0200 Subject: [PATCH 144/254] lint: skip work involving facts for analyses without facts --- lint/runner.go | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index eab1d995d..9dd71fb63 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -271,10 +271,14 @@ func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysi analyzer: a, analyzerID: aid, pkg: pkg, - pkgFacts: map[*types.Package][]analysis.Fact{}, + } + + if len(a.FactTypes) == 0 { + return ac } // Merge all package facts of dependencies + ac.pkgFacts = map[*types.Package][]analysis.Fact{} seen := map[*Package]struct{}{} var dfs func(*Package) dfs = func(pkg *Package) { @@ -326,18 +330,18 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter return nil, err } - // Merge new facts into the package. - for _, fact := range ac.newFacts { - if fact.obj == nil { - id := r.analyzerIDs.get(ac.analyzer) - ac.pkg.pkgFacts[id] = append(ac.pkg.pkgFacts[id], fact.fact) - } else { - panic("unexpected new object fact") + if len(ac.analyzer.FactTypes) > 0 { + // Merge new facts into the package. + for _, fact := range ac.newFacts { + if fact.obj == nil { + id := r.analyzerIDs.get(ac.analyzer) + ac.pkg.pkgFacts[id] = append(ac.pkg.pkgFacts[id], fact.fact) + } else { + panic("unexpected new object fact") + } } - } - // Persist facts to cache - if len(ac.analyzer.FactTypes) > 0 { + // Persist facts to cache facts := make([]Fact, 0, len(ac.newFacts)) for _, fact := range ac.newFacts { if fact.obj == nil { From 8facc3f78314fb3e074869932547c6dba88e7c3d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 2 May 2019 10:50:42 +0200 Subject: [PATCH 145/254] lint: add a sanity checking mode --- lint/runner.go | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/lint/runner.go b/lint/runner.go index 9dd71fb63..5d0f3841a 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -24,6 +24,9 @@ import ( "honnef.co/go/tools/loader" ) +// If enabled, abuse of the go/analysis API will lead to panics +const sanityCheck = true + // OPT(dh): for a dependency tree A->B->C->D, if we have cached data // for B, there should be no need to load C and D individually. Go's // export data for B contains all the data we need on types, and our @@ -129,6 +132,9 @@ func (ac *analysisAction) allPackageFacts() []analysis.PackageFact { } func (ac *analysisAction) importObjectFact(obj types.Object, fact analysis.Fact) bool { + if sanityCheck && len(ac.analyzer.FactTypes) == 0 { + panic("analysis doesn't export any facts") + } for _, f := range ac.pkg.facts[ac.analyzerID][obj] { if reflect.TypeOf(f) == reflect.TypeOf(fact) { reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) @@ -139,6 +145,9 @@ func (ac *analysisAction) importObjectFact(obj types.Object, fact analysis.Fact) } func (ac *analysisAction) importPackageFact(pkg *types.Package, fact analysis.Fact) bool { + if sanityCheck && len(ac.analyzer.FactTypes) == 0 { + panic("analysis doesn't export any facts") + } for _, f := range ac.pkgFacts[pkg] { if reflect.TypeOf(f) == reflect.TypeOf(fact) { reflect.ValueOf(fact).Elem().Set(reflect.ValueOf(f).Elem()) @@ -149,10 +158,16 @@ func (ac *analysisAction) importPackageFact(pkg *types.Package, fact analysis.Fa } func (ac *analysisAction) exportObjectFact(obj types.Object, fact analysis.Fact) { + if sanityCheck && len(ac.analyzer.FactTypes) == 0 { + panic("analysis doesn't export any facts") + } ac.pkg.facts[ac.analyzerID][obj] = append(ac.pkg.facts[ac.analyzerID][obj], fact) } func (ac *analysisAction) exportPackageFact(fact analysis.Fact) { + if sanityCheck && len(ac.analyzer.FactTypes) == 0 { + panic("analysis doesn't export any facts") + } ac.pkgFacts[ac.pkg.Types] = append(ac.pkgFacts[ac.pkg.Types], fact) ac.newFacts = append(ac.newFacts, newFact{nil, fact}) } From ffe3be182e1fb9307acbccb3375e77c868d53e51 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 2 May 2019 11:08:16 +0200 Subject: [PATCH 146/254] staticcheck, facts: split fact-finding code into own analyzes --- facts/deprecated.go | 144 ++++++++++++++++++++ facts/purity.go | 175 +++++++++++++++++++++++++ staticcheck/analysis.go | 23 ++-- staticcheck/lint.go | 282 ++-------------------------------------- 4 files changed, 339 insertions(+), 285 deletions(-) create mode 100644 facts/deprecated.go create mode 100644 facts/purity.go diff --git a/facts/deprecated.go b/facts/deprecated.go new file mode 100644 index 000000000..8587b0e0e --- /dev/null +++ b/facts/deprecated.go @@ -0,0 +1,144 @@ +package facts + +import ( + "go/ast" + "go/token" + "go/types" + "reflect" + "strings" + + "golang.org/x/tools/go/analysis" +) + +type IsDeprecated struct{ Msg string } + +func (*IsDeprecated) AFact() {} +func (d *IsDeprecated) String() string { return "Deprecated: " + d.Msg } + +type DeprecatedResult struct { + Objects map[types.Object]*IsDeprecated + Packages map[*types.Package]*IsDeprecated +} + +var Deprecated = &analysis.Analyzer{ + Name: "fact_deprecated", + Doc: "Mark deprecated objects", + Run: deprecated, + FactTypes: []analysis.Fact{(*IsDeprecated)(nil)}, + ResultType: reflect.TypeOf(DeprecatedResult{}), +} + +func deprecated(pass *analysis.Pass) (interface{}, error) { + var names []*ast.Ident + + extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { + for _, doc := range docs { + if doc == nil { + continue + } + parts := strings.Split(doc.Text(), "\n\n") + last := parts[len(parts)-1] + if !strings.HasPrefix(last, "Deprecated: ") { + continue + } + alt := last[len("Deprecated: "):] + alt = strings.Replace(alt, "\n", " ", -1) + return alt + } + return "" + } + doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) { + alt := extractDeprecatedMessage(docs) + if alt == "" { + return + } + + for _, name := range names { + obj := pass.TypesInfo.ObjectOf(name) + pass.ExportObjectFact(obj, &IsDeprecated{alt}) + } + } + + var docs []*ast.CommentGroup + for _, f := range pass.Files { + docs = append(docs, f.Doc) + } + if alt := extractDeprecatedMessage(docs); alt != "" { + // Don't mark package syscall as deprecated, even though + // it is. A lot of people still use it for simple + // constants like SIGKILL, and I am not comfortable + // telling them to use x/sys for that. + if pass.Pkg.Path() != "syscall" { + pass.ExportPackageFact(&IsDeprecated{alt}) + } + } + + docs = docs[:0] + for _, f := range pass.Files { + fn := func(node ast.Node) bool { + if node == nil { + return true + } + var ret bool + switch node := node.(type) { + case *ast.GenDecl: + switch node.Tok { + case token.TYPE, token.CONST, token.VAR: + docs = append(docs, node.Doc) + return true + default: + return false + } + case *ast.FuncDecl: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = false + case *ast.TypeSpec: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = true + case *ast.ValueSpec: + docs = append(docs, node.Doc) + names = node.Names + ret = false + case *ast.File: + return true + case *ast.StructType: + for _, field := range node.Fields.List { + doDocs(field.Names, []*ast.CommentGroup{field.Doc}) + } + return false + case *ast.InterfaceType: + for _, field := range node.Methods.List { + doDocs(field.Names, []*ast.CommentGroup{field.Doc}) + } + return false + default: + return false + } + if len(names) == 0 || len(docs) == 0 { + return ret + } + doDocs(names, docs) + + docs = docs[:0] + names = nil + return ret + } + ast.Inspect(f, fn) + } + + out := DeprecatedResult{ + Objects: map[types.Object]*IsDeprecated{}, + Packages: map[*types.Package]*IsDeprecated{}, + } + + for _, fact := range pass.AllObjectFacts() { + out.Objects[fact.Object] = fact.Fact.(*IsDeprecated) + } + for _, fact := range pass.AllPackageFacts() { + out.Packages[fact.Package] = fact.Fact.(*IsDeprecated) + } + + return out, nil +} diff --git a/facts/purity.go b/facts/purity.go new file mode 100644 index 000000000..861ca4110 --- /dev/null +++ b/facts/purity.go @@ -0,0 +1,175 @@ +package facts + +import ( + "go/token" + "go/types" + "reflect" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/functions" + "honnef.co/go/tools/internal/passes/buildssa" + "honnef.co/go/tools/ssa" +) + +type IsPure struct{} + +func (*IsPure) AFact() {} +func (d *IsPure) String() string { return "is pure" } + +type PurityResult map[*types.Func]*IsPure + +var Purity = &analysis.Analyzer{ + Name: "fact_purity", + Doc: "Mark pure functions", + Run: purity, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + FactTypes: []analysis.Fact{(*IsPure)(nil)}, + ResultType: reflect.TypeOf(PurityResult{}), +} + +var pureStdlib = map[string]struct{}{ + "errors.New": {}, + "fmt.Errorf": {}, + "fmt.Sprintf": {}, + "fmt.Sprint": {}, + "sort.Reverse": {}, + "strings.Map": {}, + "strings.Repeat": {}, + "strings.Replace": {}, + "strings.Title": {}, + "strings.ToLower": {}, + "strings.ToLowerSpecial": {}, + "strings.ToTitle": {}, + "strings.ToTitleSpecial": {}, + "strings.ToUpper": {}, + "strings.ToUpperSpecial": {}, + "strings.Trim": {}, + "strings.TrimFunc": {}, + "strings.TrimLeft": {}, + "strings.TrimLeftFunc": {}, + "strings.TrimPrefix": {}, + "strings.TrimRight": {}, + "strings.TrimRightFunc": {}, + "strings.TrimSpace": {}, + "strings.TrimSuffix": {}, + "(*net/http.Request).WithContext": {}, +} + +func purity(pass *analysis.Pass) (interface{}, error) { + seen := map[*ssa.Function]struct{}{} + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg + var check func(ssafn *ssa.Function) (ret bool) + check = func(ssafn *ssa.Function) (ret bool) { + if ssafn.Object() == nil { + // TODO(dh): support closures + return false + } + if pass.ImportObjectFact(ssafn.Object(), new(IsPure)) { + return true + } + if ssafn.Pkg != ssapkg { + // Function is in another package but wasn't marked as + // pure, ergo it isn't pure + return false + } + // Break recursion + if _, ok := seen[ssafn]; ok { + return false + } + + seen[ssafn] = struct{}{} + defer func() { + if ret { + pass.ExportObjectFact(ssafn.Object(), &IsPure{}) + } + }() + + if functions.IsStub(ssafn) { + return false + } + + if _, ok := pureStdlib[ssafn.Object().(*types.Func).FullName()]; ok { + return true + } + + if ssafn.Signature.Results().Len() == 0 { + // A function with no return values is empty or is doing some + // work we cannot see (for example because of build tags); + // don't consider it pure. + return false + } + + for _, param := range ssafn.Params { + if _, ok := param.Type().Underlying().(*types.Basic); !ok { + return false + } + } + + if ssafn.Blocks == nil { + return false + } + checkCall := func(common *ssa.CallCommon) bool { + if common.IsInvoke() { + return false + } + builtin, ok := common.Value.(*ssa.Builtin) + if !ok { + if common.StaticCallee() != ssafn { + if common.StaticCallee() == nil { + return false + } + if !check(common.StaticCallee()) { + return false + } + } + } else { + switch builtin.Name() { + case "len", "cap", "make", "new": + default: + return false + } + } + return true + } + for _, b := range ssafn.Blocks { + for _, ins := range b.Instrs { + switch ins := ins.(type) { + case *ssa.Call: + if !checkCall(ins.Common()) { + return false + } + case *ssa.Defer: + if !checkCall(&ins.Call) { + return false + } + case *ssa.Select: + return false + case *ssa.Send: + return false + case *ssa.Go: + return false + case *ssa.Panic: + return false + case *ssa.Store: + return false + case *ssa.FieldAddr: + return false + case *ssa.UnOp: + if ins.Op == token.MUL || ins.Op == token.AND { + return false + } + } + } + } + return true + } + for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { + check(ssafn) + } + + out := PurityResult{} + for _, fact := range pass.AllObjectFacts() { + out[fact.Object.(*types.Func)] = fact.Fact.(*IsPure) + } + return out, nil +} diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index b62e5fec7..bfb53c11b 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -3,6 +3,7 @@ package staticcheck import ( "flag" + "honnef.co/go/tools/facts" "honnef.co/go/tools/internal/passes/buildssa" "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" @@ -145,12 +146,11 @@ var Analyzers = map[string]*analysis.Analyzer{ Flags: newFlagSet(), }, "SA1019": { - Name: "SA1019", - Run: CheckDeprecated, - Doc: docSA1019, - Requires: []*analysis.Analyzer{inspect.Analyzer}, - FactTypes: []analysis.Fact{(*IsDeprecated)(nil)}, - Flags: newFlagSet(), + Name: "SA1019", + Run: CheckDeprecated, + Doc: docSA1019, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Deprecated}, + Flags: newFlagSet(), }, "SA1020": { Name: "SA1020", @@ -352,12 +352,11 @@ var Analyzers = map[string]*analysis.Analyzer{ Flags: newFlagSet(), }, "SA4017": { - Name: "SA4017", - Run: CheckPureFunctions, - Doc: docSA4017, - Requires: []*analysis.Analyzer{buildssa.Analyzer}, - FactTypes: []analysis.Fact{(*IsPure)(nil)}, - Flags: newFlagSet(), + Name: "SA4017", + Run: CheckPureFunctions, + Doc: docSA4017, + Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Purity}, + Flags: newFlagSet(), }, "SA4018": { Name: "SA4018", diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 84119d18f..f7e250c2b 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -20,6 +20,7 @@ import ( . "honnef.co/go/tools/arg" "honnef.co/go/tools/deprecated" + "honnef.co/go/tools/facts" "honnef.co/go/tools/functions" "honnef.co/go/tools/internal/passes/buildssa" "honnef.co/go/tools/internal/sharedcheck" @@ -735,112 +736,6 @@ func fieldPath(start types.Type, indices []int) string { return p } -type IsDeprecated struct{ Msg string } - -func (*IsDeprecated) AFact() {} -func (d *IsDeprecated) String() string { return "Deprecated: " + d.Msg } - -func checkDeprecatedMark(pass *analysis.Pass) { - var names []*ast.Ident - - extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { - for _, doc := range docs { - if doc == nil { - continue - } - parts := strings.Split(doc.Text(), "\n\n") - last := parts[len(parts)-1] - if !strings.HasPrefix(last, "Deprecated: ") { - continue - } - alt := last[len("Deprecated: "):] - alt = strings.Replace(alt, "\n", " ", -1) - return alt - } - return "" - } - doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) { - alt := extractDeprecatedMessage(docs) - if alt == "" { - return - } - - for _, name := range names { - obj := pass.TypesInfo.ObjectOf(name) - pass.ExportObjectFact(obj, &IsDeprecated{alt}) - } - } - - var docs []*ast.CommentGroup - for _, f := range pass.Files { - docs = append(docs, f.Doc) - } - if alt := extractDeprecatedMessage(docs); alt != "" { - // Don't mark package syscall as deprecated, even though - // it is. A lot of people still use it for simple - // constants like SIGKILL, and I am not comfortable - // telling them to use x/sys for that. - if pass.Pkg.Path() != "syscall" { - pass.ExportPackageFact(&IsDeprecated{alt}) - } - } - - docs = docs[:0] - for _, f := range pass.Files { - fn := func(node ast.Node) bool { - if node == nil { - return true - } - var ret bool - switch node := node.(type) { - case *ast.GenDecl: - switch node.Tok { - case token.TYPE, token.CONST, token.VAR: - docs = append(docs, node.Doc) - return true - default: - return false - } - case *ast.FuncDecl: - docs = append(docs, node.Doc) - names = []*ast.Ident{node.Name} - ret = false - case *ast.TypeSpec: - docs = append(docs, node.Doc) - names = []*ast.Ident{node.Name} - ret = true - case *ast.ValueSpec: - docs = append(docs, node.Doc) - names = node.Names - ret = false - case *ast.File: - return true - case *ast.StructType: - for _, field := range node.Fields.List { - doDocs(field.Names, []*ast.CommentGroup{field.Doc}) - } - return false - case *ast.InterfaceType: - for _, field := range node.Methods.List { - doDocs(field.Names, []*ast.CommentGroup{field.Doc}) - } - return false - default: - return false - } - if len(names) == 0 || len(docs) == 0 { - return ret - } - doDocs(names, docs) - - docs = docs[:0] - names = nil - return ret - } - ast.Inspect(f, fn) - } -} - func isInLoop(b *ssa.BasicBlock) bool { sets := functions.FindLoops(b.Parent()) for _, set := range sets { @@ -2655,154 +2550,9 @@ func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) { return nil, nil } -type IsPure struct{} - -func (*IsPure) AFact() {} -func (*IsPure) String() string { return "IsPure" } - -var pureStdlib = map[string]struct{}{ - "errors.New": {}, - "fmt.Errorf": {}, - "fmt.Sprintf": {}, - "fmt.Sprint": {}, - "sort.Reverse": {}, - "strings.Map": {}, - "strings.Repeat": {}, - "strings.Replace": {}, - "strings.Title": {}, - "strings.ToLower": {}, - "strings.ToLowerSpecial": {}, - "strings.ToTitle": {}, - "strings.ToTitleSpecial": {}, - "strings.ToUpper": {}, - "strings.ToUpperSpecial": {}, - "strings.Trim": {}, - "strings.TrimFunc": {}, - "strings.TrimLeft": {}, - "strings.TrimLeftFunc": {}, - "strings.TrimPrefix": {}, - "strings.TrimRight": {}, - "strings.TrimRightFunc": {}, - "strings.TrimSpace": {}, - "strings.TrimSuffix": {}, - "(*net/http.Request).WithContext": {}, -} - -func checkPureFunctionsMark(pass *analysis.Pass) { - seen := map[*ssa.Function]struct{}{} - ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg - var check func(ssafn *ssa.Function) (ret bool) - check = func(ssafn *ssa.Function) (ret bool) { - if ssafn.Object() == nil { - // TODO(dh): support closures - return false - } - if pass.ImportObjectFact(ssafn.Object(), new(IsPure)) { - return true - } - if ssafn.Pkg != ssapkg { - // Function is in another package but wasn't marked as - // pure, ergo it isn't pure - return false - } - // Break recursion - if _, ok := seen[ssafn]; ok { - return false - } - - seen[ssafn] = struct{}{} - defer func() { - if ret { - pass.ExportObjectFact(ssafn.Object(), &IsPure{}) - } - }() - - if functions.IsStub(ssafn) { - return false - } - - if _, ok := pureStdlib[ssafn.Object().(*types.Func).FullName()]; ok { - return true - } - - if ssafn.Signature.Results().Len() == 0 { - // A function with no return values is empty or is doing some - // work we cannot see (for example because of build tags); - // don't consider it pure. - return false - } - - for _, param := range ssafn.Params { - if _, ok := param.Type().Underlying().(*types.Basic); !ok { - return false - } - } - - if ssafn.Blocks == nil { - return false - } - checkCall := func(common *ssa.CallCommon) bool { - if common.IsInvoke() { - return false - } - builtin, ok := common.Value.(*ssa.Builtin) - if !ok { - if common.StaticCallee() != ssafn { - if common.StaticCallee() == nil { - return false - } - if !check(common.StaticCallee()) { - return false - } - } - } else { - switch builtin.Name() { - case "len", "cap", "make", "new": - default: - return false - } - } - return true - } - for _, b := range ssafn.Blocks { - for _, ins := range b.Instrs { - switch ins := ins.(type) { - case *ssa.Call: - if !checkCall(ins.Common()) { - return false - } - case *ssa.Defer: - if !checkCall(&ins.Call) { - return false - } - case *ssa.Select: - return false - case *ssa.Send: - return false - case *ssa.Go: - return false - case *ssa.Panic: - return false - case *ssa.Store: - return false - case *ssa.FieldAddr: - return false - case *ssa.UnOp: - if ins.Op == token.MUL || ins.Op == token.AND { - return false - } - } - } - } - return true - } - for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { - check(ssafn) - } -} - func CheckPureFunctions(pass *analysis.Pass) (interface{}, error) { - checkPureFunctionsMark(pass) + pure := pass.ResultOf[facts.Purity].(facts.PurityResult) + fnLoop: for _, ssafn := range pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs { if IsInTest(pass, ssafn) { @@ -2840,7 +2590,7 @@ fnLoop: // TODO(dh): support anonymous functions continue } - if pass.ImportObjectFact(callee.Object(), new(IsPure)) { + if _, ok := pure[callee.Object().(*types.Func)]; ok { pass.Reportf(ins.Pos(), "%s is a pure function but its return value is ignored", callee.Name()) continue } @@ -2850,20 +2600,8 @@ fnLoop: return nil, nil } -func isDeprecated(pass *analysis.Pass, ident *ast.Ident) (bool, string) { - obj := pass.TypesInfo.ObjectOf(ident) - if obj.Pkg() == nil { - return false, "" - } - var depr IsDeprecated - if pass.ImportObjectFact(obj, &depr) { - return true, depr.Msg - } - return false, "" -} - func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { - checkDeprecatedMark(pass) + deprs := pass.ResultOf[facts.Deprecated].(facts.DeprecatedResult) // Selectors can appear outside of function literals, e.g. when // declaring package level variables. @@ -2895,7 +2633,7 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { // Don't flag stuff in our own package return true } - if ok, alt := isDeprecated(pass, sel.Sel); ok { + if depr, ok := deprs.Objects[obj]; ok { // Look for the first available alternative, not the first // version something was deprecated in. If a function was // deprecated in Go 1.6, an alternative has been available @@ -2908,14 +2646,13 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { } if tfn != nil { - var depr IsDeprecated - if pass.ImportObjectFact(tfn, &depr) { + if _, ok := deprs.Objects[tfn]; ok { // functions that are deprecated may use deprecated // symbols return true } } - pass.Reportf(sel.Pos(), "%s is deprecated: %s", Render(pass, sel), alt) + pass.Reportf(sel.Pos(), "%s is deprecated: %s", Render(pass, sel), depr.Msg) return true } return true @@ -2931,8 +2668,7 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { p := node.Path.Value path := p[1 : len(p)-1] imp := imps[path] - var depr IsDeprecated - if pass.ImportPackageFact(imp, &depr) { + if depr, ok := deprs.Packages[imp]; ok { pass.Reportf(node.Pos(), "Package %s is deprecated: %s", path, depr.Msg) } } From a19cfe8322cbd8410ea72bef0564c70f72719dda Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 2 May 2019 12:13:22 +0200 Subject: [PATCH 147/254] lint: actually persist object facts to cache --- lint/runner.go | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 5d0f3841a..8e248f2bb 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -357,18 +357,26 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter } // Persist facts to cache - facts := make([]Fact, 0, len(ac.newFacts)) + var facts []Fact + // TODO(dh): also store package facts of dependencies for _, fact := range ac.newFacts { if fact.obj == nil { facts = append(facts, Fact{fact.obj.Pkg().Path(), "", fact.fact}) } else { - path, err := objectpath.For(fact.obj) - if err != nil { - continue - } - facts = append(facts, Fact{fact.obj.Pkg().Path(), string(path), fact.fact}) + panic("unexpected object fact") } } + for obj, afacts := range ac.pkg.facts[ac.analyzerID] { + pkgpath := obj.Pkg().Path() + path, err := objectpath.For(obj) + if err != nil { + continue + } + for _, fact := range afacts { + facts = append(facts, Fact{pkgpath, string(path), fact}) + } + } + buf := &bytes.Buffer{} if err := gob.NewEncoder(buf).Encode(facts); err != nil { return nil, err From 2e24ccd9327869ebe190ec55e61471d4c5855afd Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 2 May 2019 13:30:47 +0200 Subject: [PATCH 148/254] lint: don't store facts of dependencies in cache We currently have no way of making use of these facts when loading from export data, so don't waste time and disk space on the data. --- lint/runner.go | 25 +++++++++---------------- 1 file changed, 9 insertions(+), 16 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 8e248f2bb..7519caea2 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -85,9 +85,8 @@ func (ids analyzerIDs) get(a *analysis.Analyzer) int { } type Fact struct { - PkgPath string - ObjPath string - Fact analysis.Fact + Path string + Fact analysis.Fact } type newFact struct { @@ -358,22 +357,23 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter // Persist facts to cache var facts []Fact - // TODO(dh): also store package facts of dependencies for _, fact := range ac.newFacts { if fact.obj == nil { - facts = append(facts, Fact{fact.obj.Pkg().Path(), "", fact.fact}) + facts = append(facts, Fact{"", fact.fact}) } else { panic("unexpected object fact") } } for obj, afacts := range ac.pkg.facts[ac.analyzerID] { - pkgpath := obj.Pkg().Path() + if obj.Pkg() != ac.pkg.Package.Types { + continue + } path, err := objectpath.For(obj) if err != nil { continue } for _, fact := range afacts { - facts = append(facts, Fact{pkgpath, string(path), fact}) + facts = append(facts, Fact{string(path), fact}) } } @@ -576,19 +576,12 @@ func (r *Runner) loadPkg(pkg *Package, analyzers []*analysis.Analyzer) error { } for _, f := range facts { - if pkg.PkgPath != f.PkgPath { - // TODO(dh): for now we load all packages in the - // dependency graph and don't utilize the fact - // that a package contains all the facts of its - // dependencies. - continue - } - if f.ObjPath == "" { + if f.Path == "" { // This is a package fact pkg.pkgFacts[r.analyzerIDs.get(a)] = append(pkg.pkgFacts[r.analyzerIDs.get(a)], f.Fact) continue } - obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.ObjPath)) + obj, err := objectpath.Object(pkg.Types, objectpath.Path(f.Path)) if err != nil { // Be lenient about these errors. For example, when // analysing io/ioutil from source, we may get a fact From 4f009094f37e4d715dff066471489c5c48c2f9b0 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 3 May 2019 18:34:22 +0200 Subject: [PATCH 149/254] unused: we may still encounter interesting objects from export data --- unused/unused.go | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 279621614..98cba11e7 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -529,6 +529,8 @@ func (c *Checker) Result() []types.Object { continue } position := c.fset.PositionFor(v.Pos(), false) + position.Column = 1 + position.Offset = 0 if _, ok := c.seen[position]; !ok { out2 = append(out2, v) } @@ -669,13 +671,27 @@ func (c *Checker) results() []types.Object { report := func(node *Node) { if node.seen { var pos token.Pos - switch obj := node.obj.(type) { - case types.Object: + if obj, ok := node.obj.(types.Object); ok { pos = obj.Pos() } - if pos != 0 { + if pos != token.NoPos { position := c.fset.PositionFor(pos, false) + // All packages passed on the command line are being + // loaded from source. However, thanks to tests and + // test variants of packages, we encounter the same + // object many different times. Worse, some of these + // forms may have been loaded from export data + // (despite being a variant of a package we've loaded + // from source…). Objects from export data do not have + // column information, so we force it to one, so that + // objects loaded from source and from export have the + // same position. + // + // SImilarly, the "offset" differs, too. + + position.Column = 1 + position.Offset = 0 c.seenMu.Lock() c.seen[position] = struct{}{} c.seenMu.Unlock() From be65da1b119be738287ce2292fa234fed12393c1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 3 May 2019 18:54:06 +0200 Subject: [PATCH 150/254] lint: make sure injected analyzers have valid IDs --- lint/runner.go | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 7519caea2..52e12a38f 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -81,7 +81,11 @@ type analyzerIDs struct { } func (ids analyzerIDs) get(a *analysis.Analyzer) int { - return ids.m[a] + id, ok := ids.m[a] + if !ok { + panic(fmt.Sprintf("no analyzer ID for %s", a.Name)) + } + return id } type Fact struct { @@ -311,6 +315,8 @@ func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysi return ac } +var injectedAnalyses = []*analysis.Analyzer{IsGeneratedAnalyzer, config.Analyzer} + func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) { if !ac.pkg.fromSource { panic(fmt.Sprintf("internal error: %s was not loaded from source", ac.pkg)) @@ -325,7 +331,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter // required by interna of the runner. Analyses that themselves // make use of either have an explicit dependency so that other // runners work correctly, too. - req = append(req, IsGeneratedAnalyzer, config.Analyzer) + req = append(req, injectedAnalyses...) } for _, req := range req { acReq := r.makeAnalysisAction(req, ac.pkg) @@ -428,6 +434,9 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy for _, a := range analyzers { dfs(a) } + for _, a := range injectedAnalyses { + dfs(a) + } var dcfg packages.Config if cfg != nil { From bfc2b9cd003ccd81fc5921406a1cea0e15bb78cb Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 3 May 2019 18:58:31 +0200 Subject: [PATCH 151/254] unused: don't be over-eager in deduplicating objects Multiple objects may be defined on the same line, and since we're discarding column information, their positions are no longer unique. Incorporate the object's string representation to account for this. --- unused/unused.go | 55 +++++++++++++++++++++++++++--------------------- 1 file changed, 31 insertions(+), 24 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 98cba11e7..97a2b1d36 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -403,6 +403,11 @@ type pkg struct { SrcFuncs []*ssa.Function } +type seenKey struct { + s string + pos token.Position +} + type Checker struct { mu sync.Mutex @@ -413,7 +418,7 @@ type Checker struct { allPackages map[*types.Package]struct{} seenMu sync.Mutex - seen map[token.Position]struct{} + seen map[seenKey]struct{} graph *Graph out []types.Object @@ -422,7 +427,7 @@ type Checker struct { func NewChecker() *Checker { c := &Checker{ - seen: map[token.Position]struct{}{}, + seen: map[seenKey]struct{}{}, initialPackages: map[*types.Package]struct{}{}, allPackages: map[*types.Package]struct{}{}, } @@ -531,7 +536,8 @@ func (c *Checker) Result() []types.Object { position := c.fset.PositionFor(v.Pos(), false) position.Column = 1 position.Offset = 0 - if _, ok := c.seen[position]; !ok { + k := seenKey{v.String(), position} + if _, ok := c.seen[k]; !ok { out2 = append(out2, v) } } @@ -673,28 +679,29 @@ func (c *Checker) results() []types.Object { var pos token.Pos if obj, ok := node.obj.(types.Object); ok { pos = obj.Pos() - } - if pos != token.NoPos { - position := c.fset.PositionFor(pos, false) - // All packages passed on the command line are being - // loaded from source. However, thanks to tests and - // test variants of packages, we encounter the same - // object many different times. Worse, some of these - // forms may have been loaded from export data - // (despite being a variant of a package we've loaded - // from source…). Objects from export data do not have - // column information, so we force it to one, so that - // objects loaded from source and from export have the - // same position. - // - // SImilarly, the "offset" differs, too. - - position.Column = 1 - position.Offset = 0 - c.seenMu.Lock() - c.seen[position] = struct{}{} - c.seenMu.Unlock() + if pos != token.NoPos { + position := c.fset.PositionFor(pos, false) + // All packages passed on the command line are being + // loaded from source. However, thanks to tests and + // test variants of packages, we encounter the same + // object many different times. Worse, some of these + // forms may have been loaded from export data + // (despite being a variant of a package we've loaded + // from source…). Objects from export data do not have + // column information, so we force it to one, so that + // objects loaded from source and from export have the + // same position. + // + // Similarly, the "offset" differs, too. + + position.Column = 1 + position.Offset = 0 + k := seenKey{obj.String(), position} + c.seenMu.Lock() + c.seen[k] = struct{}{} + c.seenMu.Unlock() + } } return } From af525d38f2aeeda4ece77ad18475d992167c2334 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 3 May 2019 19:08:37 +0200 Subject: [PATCH 152/254] facts: move relevant tests into facts package --- facts/facts_test.go | 15 +++++++++++++++ facts/testdata/src/Deprecated/Deprecated.go | 5 +++++ facts/testdata/src/Purity/CheckPureFunctions.go | 17 +++++++++++++++++ .../src/CheckDeprecated_go14/CheckDeprecated.go | 2 +- .../src/CheckDeprecated_go18/CheckDeprecated.go | 2 +- .../CheckPureFunctions/CheckPureFunctions.go | 2 +- 6 files changed, 40 insertions(+), 3 deletions(-) create mode 100644 facts/facts_test.go create mode 100644 facts/testdata/src/Deprecated/Deprecated.go create mode 100644 facts/testdata/src/Purity/CheckPureFunctions.go diff --git a/facts/facts_test.go b/facts/facts_test.go new file mode 100644 index 000000000..6e7de1afa --- /dev/null +++ b/facts/facts_test.go @@ -0,0 +1,15 @@ +package facts + +import ( + "testing" + + "golang.org/x/tools/go/analysis/analysistest" +) + +func TestDeprecated(t *testing.T) { + analysistest.Run(t, analysistest.TestData(), Deprecated, "Deprecated") +} + +func TestPurity(t *testing.T) { + analysistest.Run(t, analysistest.TestData(), Purity, "Purity") +} diff --git a/facts/testdata/src/Deprecated/Deprecated.go b/facts/testdata/src/Deprecated/Deprecated.go new file mode 100644 index 000000000..14f463d85 --- /dev/null +++ b/facts/testdata/src/Deprecated/Deprecated.go @@ -0,0 +1,5 @@ +package pkg + +// Deprecated: Don't use this. +func fn2() { // want fn2:`Deprecated: Don't use this\.` +} diff --git a/facts/testdata/src/Purity/CheckPureFunctions.go b/facts/testdata/src/Purity/CheckPureFunctions.go new file mode 100644 index 000000000..b31f153e0 --- /dev/null +++ b/facts/testdata/src/Purity/CheckPureFunctions.go @@ -0,0 +1,17 @@ +package pkg + +func foo(a, b int) int { return a + b } // want foo:"is pure" +func bar(a, b int) int { + println(a + b) + return a + b +} + +func empty() {} +func stubPointer() *int { return nil } +func stubInt() int { return 0 } + +func fn3() { + empty() + stubPointer() + stubInt() +} diff --git a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go index bd2420707..1e8c272ab 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go @@ -28,7 +28,7 @@ func fn1(err error) { } // Deprecated: Don't use this. -func fn2() { // want fn2:`Deprecated: Don't use this\.` +func fn2() { _ = syscall.StringByteSlice("") anon := func(x int) { diff --git a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go index 06754df19..470d46bad 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go @@ -28,7 +28,7 @@ func fn1(err error) { } // Deprecated: Don't use this. -func fn2() { // want fn2:`Deprecated: Don't use this\.` +func fn2() { _ = syscall.StringByteSlice("") anon := func(x int) { diff --git a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go index 5a269e829..a5fbd1444 100644 --- a/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go +++ b/staticcheck/testdata/src/CheckPureFunctions/CheckPureFunctions.go @@ -17,7 +17,7 @@ func fn2() { r.WithContext(context.Background()) // want `is a pure function but its return value is ignored` } -func foo(a, b int) int { return a + b } // want foo:"IsPure" +func foo(a, b int) int { return a + b } func bar(a, b int) int { println(a + b) return a + b From 7847ae53c86720a3c099bf768b66672a8f6f28c4 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 4 May 2019 12:55:41 +0200 Subject: [PATCH 153/254] lint, facts: move analyzers into facts package --- {lint => facts}/generated.go | 20 ++++++++++++- facts/token.go | 24 +++++++++++++++ functions/pure.go | 13 ++++++-- lint/analysis.go | 39 ------------------------ lint/lintdsl/lintdsl.go | 7 +++-- lint/runner.go | 7 +++-- simple/analysis.go | 58 ++++++++++++++++++------------------ staticcheck/analysis.go | 15 +++++----- stylecheck/analysis.go | 12 ++++---- 9 files changed, 104 insertions(+), 91 deletions(-) rename {lint => facts}/generated.go (60%) create mode 100644 facts/token.go delete mode 100644 lint/analysis.go diff --git a/lint/generated.go b/facts/generated.go similarity index 60% rename from lint/generated.go rename to facts/generated.go index 655328296..9da7b4d86 100644 --- a/lint/generated.go +++ b/facts/generated.go @@ -1,10 +1,13 @@ -package lint +package facts import ( "bufio" "bytes" "io" "os" + "reflect" + + "golang.org/x/tools/go/analysis" ) var ( @@ -42,3 +45,18 @@ func isGenerated(path string) bool { } return false } + +var Generated = &analysis.Analyzer{ + Name: "isgenerated", + Doc: "annotate file names that have been code generated", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[string]bool{} + for _, f := range pass.Files { + path := pass.Fset.PositionFor(f.Pos(), false).Filename + m[path] = isGenerated(path) + } + return m, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(map[string]bool{}), +} diff --git a/facts/token.go b/facts/token.go new file mode 100644 index 000000000..26e76ff73 --- /dev/null +++ b/facts/token.go @@ -0,0 +1,24 @@ +package facts + +import ( + "go/ast" + "go/token" + "reflect" + + "golang.org/x/tools/go/analysis" +) + +var TokenFile = &analysis.Analyzer{ + Name: "tokenfileanalyzer", + Doc: "creates a mapping of *token.File to *ast.File", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[*token.File]*ast.File{} + for _, af := range pass.Files { + tf := pass.Fset.File(af.Pos()) + m[tf] = af + } + return m, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(map[*token.File]*ast.File{}), +} diff --git a/functions/pure.go b/functions/pure.go index cf914e3bb..8bc558771 100644 --- a/functions/pure.go +++ b/functions/pure.go @@ -1,10 +1,19 @@ package functions import ( - "honnef.co/go/tools/lint/lintdsl" "honnef.co/go/tools/ssa" ) +func filterDebug(instr []ssa.Instruction) []ssa.Instruction { + var out []ssa.Instruction + for _, ins := range instr { + if _, ok := ins.(*ssa.DebugRef); !ok { + out = append(out, ins) + } + } + return out +} + // IsStub reports whether a function is a stub. A function is // considered a stub if it has no instructions or exactly one // instruction, which must be either returning only constant values or @@ -16,7 +25,7 @@ func IsStub(fn *ssa.Function) bool { if len(fn.Blocks) > 1 { return false } - instrs := lintdsl.FilterDebug(fn.Blocks[0].Instrs) + instrs := filterDebug(fn.Blocks[0].Instrs) if len(instrs) != 1 { return false } diff --git a/lint/analysis.go b/lint/analysis.go deleted file mode 100644 index 6e914e02a..000000000 --- a/lint/analysis.go +++ /dev/null @@ -1,39 +0,0 @@ -package lint - -import ( - "go/ast" - "go/token" - "reflect" - - "golang.org/x/tools/go/analysis" -) - -var IsGeneratedAnalyzer = &analysis.Analyzer{ - Name: "isgenerated", - Doc: "annotate file names that have been code generated", - Run: func(pass *analysis.Pass) (interface{}, error) { - m := map[string]bool{} - for _, f := range pass.Files { - path := pass.Fset.PositionFor(f.Pos(), false).Filename - m[path] = isGenerated(path) - } - return m, nil - }, - RunDespiteErrors: true, - ResultType: reflect.TypeOf(map[string]bool{}), -} - -var TokenFileAnalyzer = &analysis.Analyzer{ - Name: "tokenfileanalyzer", - Doc: "creates a mapping of *token.File to *ast.File", - Run: func(pass *analysis.Pass) (interface{}, error) { - m := map[*token.File]*ast.File{} - for _, af := range pass.Files { - tf := pass.Fset.File(af.Pos()) - m[tf] = af - } - return m, nil - }, - RunDespiteErrors: true, - ResultType: reflect.TypeOf(map[*token.File]*ast.File{}), -} diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 31cf2eeea..4eb2b40fb 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -14,6 +14,7 @@ import ( "strings" "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/facts" "honnef.co/go/tools/lint" "honnef.co/go/tools/ssa" ) @@ -355,7 +356,7 @@ func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Fiel func File(pass *analysis.Pass, node lint.Positioner) *ast.File { pass.Fset.PositionFor(node.Pos(), true) - m := pass.ResultOf[lint.TokenFileAnalyzer].(map[*token.File]*ast.File) + m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File) return m[pass.Fset.File(node.Pos())] } @@ -363,13 +364,13 @@ func File(pass *analysis.Pass, node lint.Positioner) *ast.File { // //line directives. func IsGenerated(pass *analysis.Pass, pos token.Pos) bool { file := pass.Fset.PositionFor(pos, false).Filename - m := pass.ResultOf[lint.IsGeneratedAnalyzer].(map[string]bool) + m := pass.ResultOf[facts.Generated].(map[string]bool) return m[file] } func ReportfFG(pass *analysis.Pass, pos token.Pos, f string, args ...interface{}) { file := lint.DisplayPosition(pass.Fset, pos).Filename - m := pass.ResultOf[lint.IsGeneratedAnalyzer].(map[string]bool) + m := pass.ResultOf[facts.Generated].(map[string]bool) if m[file] { return } diff --git a/lint/runner.go b/lint/runner.go index 52e12a38f..22efdcb99 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -20,6 +20,7 @@ import ( "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/objectpath" "honnef.co/go/tools/config" + "honnef.co/go/tools/facts" "honnef.co/go/tools/internal/cache" "honnef.co/go/tools/loader" ) @@ -315,7 +316,7 @@ func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysi return ac } -var injectedAnalyses = []*analysis.Analyzer{IsGeneratedAnalyzer, config.Analyzer} +var injectedAnalyses = []*analysis.Analyzer{facts.Generated, config.Analyzer} func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) { if !ac.pkg.fromSource { @@ -326,7 +327,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter // First analyze it with dependencies var req []*analysis.Analyzer req = append(req, ac.analyzer.Requires...) - if pass.Analyzer != IsGeneratedAnalyzer && pass.Analyzer != config.Analyzer { + if pass.Analyzer != facts.Generated && pass.Analyzer != config.Analyzer { // Ensure all packages have the generated map and config. This is // required by interna of the runner. Analyses that themselves // make use of either have an explicit dependency so that other @@ -762,7 +763,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { if pkg.results[r.analyzerIDs.get(config.Analyzer)].v != nil { pkg.cfg = pkg.results[r.analyzerIDs.get(config.Analyzer)].v.(*config.Config) } - pkg.gen = pkg.results[r.analyzerIDs.get(IsGeneratedAnalyzer)].v.(map[string]bool) + pkg.gen = pkg.results[r.analyzerIDs.get(facts.Generated)].v.(map[string]bool) // In a previous version of the code, we would throw away all type // information and reload it from export data. That was diff --git a/simple/analysis.go b/simple/analysis.go index 9449e2e74..d2a5959b2 100644 --- a/simple/analysis.go +++ b/simple/analysis.go @@ -5,8 +5,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "honnef.co/go/tools/facts" "honnef.co/go/tools/internal/passes/buildssa" - "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" ) @@ -21,161 +21,161 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "S1000", Run: LintSingleCaseSelect, Doc: docS1000, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1001": { Name: "S1001", Run: LintLoopCopy, Doc: docS1001, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1002": { Name: "S1002", Run: LintIfBoolCmp, Doc: docS1002, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1003": { Name: "S1003", Run: LintStringsContains, Doc: docS1003, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1004": { Name: "S1004", Run: LintBytesCompare, Doc: docS1004, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1005": { Name: "S1005", Run: LintUnnecessaryBlank, Doc: docS1005, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1006": { Name: "S1006", Run: LintForTrue, Doc: docS1006, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1007": { Name: "S1007", Run: LintRegexpRaw, Doc: docS1007, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1008": { Name: "S1008", Run: LintIfReturn, Doc: docS1008, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1009": { Name: "S1009", Run: LintRedundantNilCheckWithLen, Doc: docS1009, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1010": { Name: "S1010", Run: LintSlicing, Doc: docS1010, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1011": { Name: "S1011", Run: LintLoopAppend, Doc: docS1011, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1012": { Name: "S1012", Run: LintTimeSince, Doc: docS1012, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1016": { Name: "S1016", Run: LintSimplerStructConversion, Doc: docS1016, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1017": { Name: "S1017", Run: LintTrim, Doc: docS1017, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1018": { Name: "S1018", Run: LintLoopSlide, Doc: docS1018, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1019": { Name: "S1019", Run: LintMakeLenCap, Doc: docS1019, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1020": { Name: "S1020", Run: LintAssertNotNil, Doc: docS1020, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1021": { Name: "S1021", Run: LintDeclareAssign, Doc: docS1021, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1023": { Name: "S1023", Run: LintRedundantBreak, Doc: docS1023, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1024": { Name: "S1024", Run: LintTimeUntil, Doc: docS1024, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1025": { Name: "S1025", Run: LintRedundantSprintf, Doc: docS1025, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1028": { Name: "S1028", Run: LintErrorsNewSprintf, Doc: docS1028, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1029": { @@ -189,35 +189,35 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "S1030", Run: LintBytesBufferConversions, Doc: docS1030, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1031": { Name: "S1031", Run: LintNilCheckAroundRange, Doc: docS1031, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1032": { Name: "S1032", Run: LintSortHelpers, Doc: docS1032, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1033": { Name: "S1033", Run: LintGuardedDelete, Doc: docS1033, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1034": { Name: "S1034", Run: LintSimplifyTypeSwitch, Doc: docS1034, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, } diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index bfb53c11b..1e0513bf3 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -5,7 +5,6 @@ import ( "honnef.co/go/tools/facts" "honnef.co/go/tools/internal/passes/buildssa" - "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" "golang.org/x/tools/go/analysis" @@ -250,7 +249,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "SA4000", Run: CheckLhsRhsIdentical, Doc: docSA4000, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.TokenFileAnalyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.TokenFile, facts.Generated}, Flags: newFlagSet(), }, "SA4001": { @@ -348,7 +347,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "SA4016", Run: CheckSillyBitwiseOps, Doc: docSA4016, - Requires: []*analysis.Analyzer{buildssa.Analyzer, lint.TokenFileAnalyzer}, + Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile}, Flags: newFlagSet(), }, "SA4017": { @@ -362,14 +361,14 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "SA4018", Run: CheckSelfAssignment, Doc: docSA4018, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, "SA4019": { Name: "SA4019", Run: CheckDuplicateBuildConstraints, Doc: docSA4019, - Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{facts.Generated}, Flags: newFlagSet(), }, "SA4020": { @@ -383,7 +382,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "SA4021", Run: CheckSingleArgAppend, Doc: docSA4021, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, @@ -505,7 +504,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "SA9003", Run: CheckEmptyBranch, Doc: docSA9003, - Requires: []*analysis.Analyzer{buildssa.Analyzer, lint.TokenFileAnalyzer, lint.IsGeneratedAnalyzer}, + Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile, facts.Generated}, Flags: newFlagSet(), }, "SA9004": { @@ -520,7 +519,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "SA9005", Run: callChecker(checkNoopMarshal), Doc: docSA9005, - Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, } diff --git a/stylecheck/analysis.go b/stylecheck/analysis.go index 5a36ec77c..8a205e39d 100644 --- a/stylecheck/analysis.go +++ b/stylecheck/analysis.go @@ -6,8 +6,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "honnef.co/go/tools/config" + "honnef.co/go/tools/facts" "honnef.co/go/tools/internal/passes/buildssa" - "honnef.co/go/tools/lint" "honnef.co/go/tools/lint/lintutil" ) @@ -29,14 +29,14 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "ST1001", Run: CheckDotImports, Doc: docST1001, - Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, config.Analyzer}, + Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer}, Flags: newFlagSet(), }, "ST1003": { Name: "ST1003", Run: CheckNames, Doc: docST1003, - Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, config.Analyzer}, + Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer}, Flags: newFlagSet(), }, "ST1005": { @@ -77,14 +77,14 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "ST1013", Run: CheckHTTPStatusCodes, Doc: docST1013, - Requires: []*analysis.Analyzer{lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer, config.Analyzer}, + Requires: []*analysis.Analyzer{facts.Generated, facts.TokenFile, config.Analyzer}, Flags: newFlagSet(), }, "ST1015": { Name: "ST1015", Run: CheckDefaultCaseOrder, Doc: docST1015, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, "ST1016": { @@ -98,7 +98,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "ST1017", Run: CheckYodaConditions, Doc: docST1017, - Requires: []*analysis.Analyzer{inspect.Analyzer, lint.IsGeneratedAnalyzer, lint.TokenFileAnalyzer}, + Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, "ST1018": { From 1f2ef40ad37ea4a2e9e4776c3874f44bb12078fa Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 4 May 2019 13:11:07 +0200 Subject: [PATCH 154/254] all: fix staticcheck findings --- cmd/keyify/keyify.go | 2 ++ go/types/typeutil/callee_test.go | 2 +- ssa/builder.go | 2 +- ssa/dom.go | 6 ++++-- ssa/emit.go | 1 + unused/unused.go | 1 + 6 files changed, 10 insertions(+), 4 deletions(-) diff --git a/cmd/keyify/keyify.go b/cmd/keyify/keyify.go index 3ae5d271b..a552d6f54 100644 --- a/cmd/keyify/keyify.go +++ b/cmd/keyify/keyify.go @@ -187,6 +187,8 @@ func keyify( if fRecursive { if val2, ok := val.(*ast.CompositeLit); ok { if _, ok := pkg.TypeOf(val2.Type).Underlying().(*types.Struct); ok { + // FIXME(dh): this code is obviously wrong. But + // what were we intending to do here? var lines int numLines += lines val, lines = keyify(pkg, val2) diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go index 6875d699f..2201eee71 100644 --- a/go/types/typeutil/callee_test.go +++ b/go/types/typeutil/callee_test.go @@ -63,7 +63,7 @@ func noncalls() { Uses: make(map[*ast.Ident]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), } - cfg := &types.Config{Importer: importer.For("source", nil)} + cfg := &types.Config{Importer: importer.ForCompiler(fset, "source", nil)} if _, err := cfg.Check("p", fset, []*ast.File{f}, info); err != nil { t.Fatal(err) } diff --git a/ssa/builder.go b/ssa/builder.go index 032819a2a..537b7e3ff 100644 --- a/ssa/builder.go +++ b/ssa/builder.go @@ -58,7 +58,7 @@ var ( tString = types.Typ[types.String] tUntypedNil = types.Typ[types.UntypedNil] tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators - tEface = types.NewInterface(nil, nil).Complete() + tEface = types.NewInterfaceType(nil, nil).Complete() // SSA Value constants. vZero = intConst(0) diff --git a/ssa/dom.go b/ssa/dom.go index 12ef4308f..a036be87c 100644 --- a/ssa/dom.go +++ b/ssa/dom.go @@ -53,7 +53,7 @@ func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre // func (f *Function) DomPreorder() []*BasicBlock { n := len(f.Blocks) - order := make(byDomPreorder, n, n) + order := make(byDomPreorder, n) copy(order, f.Blocks) sort.Sort(order) return order @@ -123,7 +123,7 @@ func buildDomTree(f *Function) { n := len(f.Blocks) // Allocate space for 5 contiguous [n]*BasicBlock arrays: // sdom, parent, ancestor, preorder, buckets. - space := make([]*BasicBlock, 5*n, 5*n) + space := make([]*BasicBlock, 5*n) lt := ltState{ sdom: space[0:n], parent: space[n : 2*n], @@ -310,6 +310,7 @@ func sanityCheckDomTree(f *Function) { // Printing functions ---------------------------------------- // printDomTree prints the dominator tree as text, using indentation. +//lint:ignore U1000 used during debugging func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) for _, child := range v.dom.children { @@ -319,6 +320,7 @@ func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { // printDomTreeDot prints the dominator tree of f in AT&T GraphViz // (.dot) format. +//lint:ignore U1000 used during debugging func printDomTreeDot(buf *bytes.Buffer, f *Function) { fmt.Fprintln(buf, "//", f) fmt.Fprintln(buf, "digraph domtree {") diff --git a/ssa/emit.go b/ssa/emit.go index 1036988ad..6bf9ec32d 100644 --- a/ssa/emit.go +++ b/ssa/emit.go @@ -127,6 +127,7 @@ func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value { x = emitConv(f, x, y.Type()) } else if _, ok := y.(*Const); ok { y = emitConv(f, y, x.Type()) + //lint:ignore SA9003 no-op } else { // other cases, e.g. channels. No-op. } diff --git a/unused/unused.go b/unused/unused.go index 97a2b1d36..78c11d271 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1219,6 +1219,7 @@ func (g *Graph) entry(pkg *pkg) { if mObj != nil { g.see(mObj) } + //lint:ignore SA9003 handled implicitly if m.Name() == "init" { // (1.5) packages use init functions // From b2376133555bedb0dc964cb3eae31a044d15fce9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 4 May 2019 15:08:15 +0200 Subject: [PATCH 155/254] unused: only some exported functions in tests are used --- unused/unused.go | 45 +++++++++++++++++++++++++++++++++------------ 1 file changed, 33 insertions(+), 12 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 78c11d271..9215bc152 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -482,6 +482,7 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { // (e1) all packages share a single graph if c.graph == nil { c.graph = NewGraph() + c.graph.initialPackages = c.initialPackages c.graph.wholeProgram = true } c.processPkg(pkg) @@ -489,6 +490,7 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { c.graph.pkg = nil } else { c.graph = NewGraph() + c.graph.initialPackages = c.initialPackages c.graph.wholeProgram = false c.processPkg(pkg) @@ -743,8 +745,9 @@ func (c *Checker) processPkg(pkg *pkg) { } type Graph struct { - pkg *ssa.Package - msCache typeutil.MethodSetCache + initialPackages map[*types.Package]struct{} + pkg *ssa.Package + msCache typeutil.MethodSetCache wholeProgram bool @@ -915,7 +918,14 @@ func (g *Graph) see(obj interface{}) { assert(obj != nil) if obj, ok := obj.(types.Object); ok && obj.Pkg() != nil { - if !g.isInterestingPackage(obj.Pkg()) { + found := false + for pkg := range g.initialPackages { + if obj.Pkg() == pkg { + found = true + break + } + } + if !found { return } } @@ -931,7 +941,14 @@ func (g *Graph) use(used, by interface{}, reason string) { assert(used != nil) if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { - if !g.isInterestingPackage(obj.Pkg()) { + found := false + for pkg := range g.initialPackages { + if obj.Pkg() == pkg { + found = true + break + } + } + if !found { return } } @@ -956,6 +973,9 @@ func (g *Graph) seeAndUse(used, by interface{}, reason string) { g.use(used, by, reason) } +// trackExportedIdentifier reports whether obj should be considered +// used due to being exported, checking various conditions that affect +// the decision. func (g *Graph) trackExportedIdentifier(obj types.Object) bool { if !obj.Exported() { // object isn't exported, the question is moot @@ -979,14 +999,15 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { return false } - // TODO(dh): the following comment is no longer true - // - // at one point we only considered exported identifiers in - // *_test.go files if they were Benchmark, Example or Test - // functions. However, this doesn't work when we look at one - // package at a time, because objects exported in a test variant - // of a package may be used by the xtest package. The only - // solution would be to look at multiple packages at once + if strings.HasSuffix(path, "_test.go") { + if strings.HasPrefix(obj.Name(), "Test") || + strings.HasPrefix(obj.Name(), "Benchmark") || + strings.HasPrefix(obj.Name(), "Example") { + return true + } + return false + } + return true } From 46fd16210933107348a34d2d771d952380ac8bd4 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 5 May 2019 18:02:20 +0200 Subject: [PATCH 156/254] Use ssa.BlockSet --- functions/loops.go | 12 ++++++++---- ssa/lift.go | 30 +++++++++++++++++------------- staticcheck/lint.go | 2 +- 3 files changed, 26 insertions(+), 18 deletions(-) diff --git a/functions/loops.go b/functions/loops.go index 92514af9a..15877a2f9 100644 --- a/functions/loops.go +++ b/functions/loops.go @@ -2,7 +2,7 @@ package functions import "honnef.co/go/tools/ssa" -type Loop map[*ssa.BasicBlock]bool +type Loop struct{ ssa.BlockSet } func FindLoops(fn *ssa.Function) []Loop { if fn.Blocks == nil { @@ -18,12 +18,16 @@ func FindLoops(fn *ssa.Function) []Loop { // n is a back-edge to h // h is the loop header if n == h { - sets = append(sets, Loop{n: true}) + set := Loop{} + set.Add(n) + sets = append(sets, set) continue } - set := Loop{h: true, n: true} + set := Loop{} + set.Add(h) + set.Add(n) for _, b := range allPredsBut(n, h, nil) { - set[b] = true + set.Add(b) } sets = append(sets, set) } diff --git a/ssa/lift.go b/ssa/lift.go index 048e9b032..531358fa3 100644 --- a/ssa/lift.go +++ b/ssa/lift.go @@ -341,10 +341,10 @@ func phiHasDirectReferrer(phi *Phi) bool { return false } -type blockSet struct{ big.Int } // (inherit methods from Int) +type BlockSet struct{ big.Int } // (inherit methods from Int) // add adds b to the set and returns true if the set changed. -func (s *blockSet) add(b *BasicBlock) bool { +func (s *BlockSet) Add(b *BasicBlock) bool { i := b.Index if s.Bit(i) != 0 { return false @@ -353,9 +353,13 @@ func (s *blockSet) add(b *BasicBlock) bool { return true } +func (s *BlockSet) Has(b *BasicBlock) bool { + return s.Bit(b.Index) == 1 +} + // take removes an arbitrary element from a set s and // returns its index, or returns -1 if empty. -func (s *blockSet) take() int { +func (s *BlockSet) Take() int { l := s.BitLen() for i := 0; i < l; i++ { if s.Bit(i) == 1 { @@ -403,7 +407,7 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool // Compute defblocks, the set of blocks containing a // definition of the alloc cell. - var defblocks blockSet + var defblocks BlockSet for _, instr := range *alloc.Referrers() { // Bail out if we discover the alloc is not liftable; // the only operations permitted to use the alloc are @@ -416,7 +420,7 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool if instr.Addr != alloc { panic("Alloc.Referrers is inconsistent") } - defblocks.add(instr.Block()) + defblocks.Add(instr.Block()) case *UnOp: if instr.Op != token.MUL { return false // not a load @@ -431,7 +435,7 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool } } // The Alloc itself counts as a (zero) definition of the cell. - defblocks.add(alloc.Block()) + defblocks.Add(alloc.Block()) if debugLifting { fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name()) @@ -448,18 +452,18 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool // // TODO(adonovan): opt: recycle slice storage for W, // hasAlready, defBlocks across liftAlloc calls. - var hasAlready blockSet + var hasAlready BlockSet // Initialize W and work to defblocks. - var work blockSet = defblocks // blocks seen - var W blockSet // blocks to do + var work BlockSet = defblocks // blocks seen + var W BlockSet // blocks to do W.Set(&defblocks.Int) // Traverse iterated dominance frontier, inserting φ-nodes. - for i := W.take(); i != -1; i = W.take() { + for i := W.Take(); i != -1; i = W.Take() { u := fn.Blocks[i] for _, v := range df[u.Index] { - if hasAlready.add(v) { + if hasAlready.Add(v) { // Create φ-node. // It will be prepended to v.Instrs later, if needed. phi := &Phi{ @@ -478,8 +482,8 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool } newPhis[v] = append(newPhis[v], newPhi{phi, alloc}) - if work.add(v) { - W.add(v) + if work.Add(v) { + W.Add(v) } } } diff --git a/staticcheck/lint.go b/staticcheck/lint.go index f7e250c2b..eac5532b1 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -739,7 +739,7 @@ func fieldPath(start types.Type, indices []int) string { func isInLoop(b *ssa.BasicBlock) bool { sets := functions.FindLoops(b.Parent()) for _, set := range sets { - if set[b] { + if set.Has(b) { return true } } From 6e44b578bcea1fef5e2a3eaaf7b81d320304f59f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 6 May 2019 15:07:28 +0200 Subject: [PATCH 157/254] lint: smarter parallelism in runner --- lint/runner.go | 42 ++++++++++++++++++++++++++++++------------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 22efdcb99..047defe12 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -1,5 +1,26 @@ package lint +/* +Parallelism + +Runner implements parallel processing of packages by spawning one +goroutine per package in the dependency graph, without any semaphores. +Each goroutine initially waits on the completion of all of its +dependencies, thus establishing correct order of processing. Once all +dependencies finish processing, the goroutine will load the package +from export data or source – this loading is guarded by a semaphore, +sized according to the number of CPU cores. This way, we only have as +many packages occupying memory and CPU resources as there are actual +cores to process them. + +This combination of unbounded goroutines but bounded package loading +means that if we have many parallel, independent subgraphs, they will +all execute in parallel, while not wasting resources for long linear +chains or trying to process more subgraphs in parallel than the system +can handle. + +*/ + import ( "bytes" "encoding/gob" @@ -75,6 +96,8 @@ type Runner struct { built map[*Package]*buildResult analyzerIDs analyzerIDs + + loadSem chan struct{} } type analyzerIDs struct { @@ -450,6 +473,7 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy defer r.cache.Trim() + var allPkgs []*Package m := map[*packages.Package]*Package{} packages.Visit(loaded, nil, func(l *packages.Package) { m[l] = &Package{ @@ -459,6 +483,7 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy facts: make([]map[types.Object][]analysis.Fact, len(r.analyzerIDs.m)), pkgFacts: make([][]analysis.Fact, len(r.analyzerIDs.m)), } + allPkgs = append(allPkgs, m[l]) for i := range m[l].facts { m[l].facts[i] = map[types.Object][]analysis.Fact{} } @@ -481,21 +506,12 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy } var wg sync.WaitGroup - wg.Add(len(pkgs)) - // OPT(dh): The ideal number of parallel jobs depends on the shape - // of the graph. We may risk having one goroutine doing work and - // all other goroutines being blocked on its completion. At the - // same time, Go dependency graphs aren't always very amiable - // towards parallelism. For example, on the standard library, we - // only achieve about 400% CPU usage (out of a possible 800% on - // this machine), and only 2x scaling. - sem := make(chan struct{}, runtime.GOMAXPROCS(-1)) - for _, pkg := range pkgs { + wg.Add(len(allPkgs)) + r.loadSem = make(chan struct{}, runtime.GOMAXPROCS(-1)) + for _, pkg := range allPkgs { pkg := pkg - sem <- struct{}{} go func() { r.processPkg(pkg, analyzers) - <-sem wg.Done() }() } @@ -687,6 +703,8 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { return } + r.loadSem <- struct{}{} + defer func() { <-r.loadSem }() if err := r.loadPkg(pkg, analyzers); err != nil { pkg.errs = append(pkg.errs, err) return From 46c8267889fc4bfe7f7cbffcaa1267696529991a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 6 May 2019 15:59:44 +0200 Subject: [PATCH 158/254] unused: avoid holding a global lock when not in whole-program mode --- unused/unused.go | 105 ++++++++++++++++++++--------------------------- 1 file changed, 44 insertions(+), 61 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 9215bc152..b7be78a16 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -409,20 +409,20 @@ type seenKey struct { } type Checker struct { - mu sync.Mutex - WholeProgram bool Debug io.Writer + mu sync.Mutex initialPackages map[*types.Package]struct{} allPackages map[*types.Package]struct{} + fset *token.FileSet + out []types.Object seenMu sync.Mutex seen map[seenKey]struct{} + // only used in whole-program mode graph *Graph - out []types.Object - fset *token.FileSet } func NewChecker() *Checker { @@ -450,8 +450,6 @@ func (c *Checker) Analyzer() *analysis.Analyzer { func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { c.mu.Lock() - defer c.mu.Unlock() - var visit func(pkg *types.Package) visit = func(pkg *types.Package) { if _, ok := c.allPackages[pkg]; ok { @@ -463,8 +461,8 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { } } visit(pass.Pkg) + c.mu.Unlock() - c.fset = pass.Fset ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) pkg := &pkg{ Fset: pass.Fset, @@ -476,25 +474,36 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { SrcFuncs: ssapkg.SrcFuncs, } + c.mu.Lock() + if c.fset == nil { + c.fset = pass.Fset + } else { + assert(c.fset == pass.Fset) + } c.initialPackages[pkg.Pkg] = struct{}{} + c.mu.Unlock() if c.WholeProgram { // (e1) all packages share a single graph + c.mu.Lock() if c.graph == nil { c.graph = NewGraph() - c.graph.initialPackages = c.initialPackages c.graph.wholeProgram = true } - c.processPkg(pkg) + // TODO fine-grained locking in whole-program mode + c.processPkg(c.graph, pkg) c.graph.seenFns = map[string]struct{}{} c.graph.pkg = nil + c.mu.Unlock() } else { - c.graph = NewGraph() - c.graph.initialPackages = c.initialPackages - c.graph.wholeProgram = false + graph := NewGraph() + graph.wholeProgram = false - c.processPkg(pkg) - c.out = append(c.out, c.results()...) + c.processPkg(graph, pkg) + // guard both c.out as well as c.results + c.mu.Lock() + c.out = append(c.out, c.results(graph)...) + c.mu.Unlock() } return nil, nil @@ -527,7 +536,7 @@ func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Prob func (c *Checker) Result() []types.Object { if c.WholeProgram { - c.out = c.results() + c.out = c.results(c.graph) } out2 := make([]types.Object, 0, len(c.out)) @@ -597,11 +606,11 @@ func (graph *Graph) quieten(node *Node) { } } -func (c *Checker) results() []types.Object { +func (c *Checker) results(graph *Graph) []types.Object { var out []types.Object if c.WholeProgram { - if c.graph == nil { + if graph == nil { // We never analyzed any packages return nil } @@ -609,7 +618,7 @@ func (c *Checker) results() []types.Object { var notIfaces []types.Type // implement as many interfaces as possible - c.graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { + graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: ifaces = append(ifaces, t) @@ -627,11 +636,11 @@ func (c *Checker) results() []types.Object { // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages for _, t := range notIfaces { - ms := c.graph.msCache.MethodSet(t) + ms := graph.msCache.MethodSet(t) for _, iface := range ifaces { - if sels, ok := c.graph.implements(t, iface, ms); ok { + if sels, ok := graph.implements(t, iface, ms); ok { for _, sel := range sels { - c.graph.useMethod(t, sel, t, "implements") + graph.useMethod(t, sel, t, "implements") } } } @@ -653,27 +662,27 @@ func (c *Checker) results() []types.Object { } c.debugf("digraph{\n") - debugNode(c.graph.Root) - for _, node := range c.graph.Nodes { + debugNode(graph.Root) + for _, node := range graph.Nodes { debugNode(node) } - c.graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { + graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { debugNode(value.(*Node)) }) c.debugf("}\n") } - c.graph.color(c.graph.Root) + graph.color(graph.Root) // if a node is unused, don't report any of the node's // children as unused. for example, if a function is unused, // don't flag its receiver. if a named type is unused, don't // flag its methods. - for _, node := range c.graph.Nodes { - c.graph.quieten(node) + for _, node := range graph.Nodes { + graph.quieten(node) } - c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { - c.graph.quieten(value.(*Node)) + graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + graph.quieten(value.(*Node)) }) report := func(node *Node) { @@ -727,27 +736,26 @@ func (c *Checker) results() []types.Object { c.debugf("n%d [color=gray];\n", node.id) } } - for _, node := range c.graph.Nodes { + for _, node := range graph.Nodes { report(node) } - c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { report(value.(*Node)) }) return out } -func (c *Checker) processPkg(pkg *pkg) { +func (c *Checker) processPkg(graph *Graph, pkg *pkg) { if pkg.Pkg.Path() == "unsafe" { return } - c.graph.entry(pkg) + graph.entry(pkg) } type Graph struct { - initialPackages map[*types.Package]struct{} - pkg *ssa.Package - msCache typeutil.MethodSetCache + pkg *ssa.Package + msCache typeutil.MethodSetCache wholeProgram bool @@ -917,19 +925,6 @@ func (g *Graph) see(obj interface{}) { } assert(obj != nil) - if obj, ok := obj.(types.Object); ok && obj.Pkg() != nil { - found := false - for pkg := range g.initialPackages { - if obj.Pkg() == pkg { - found = true - break - } - } - if !found { - return - } - } - // add new node to graph g.node(obj) } @@ -940,18 +935,6 @@ func (g *Graph) use(used, by interface{}, reason string) { } assert(used != nil) - if obj, ok := used.(types.Object); ok && obj.Pkg() != nil { - found := false - for pkg := range g.initialPackages { - if obj.Pkg() == pkg { - found = true - break - } - } - if !found { - return - } - } if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { if !g.isInterestingPackage(obj.Pkg()) { return From cc38182c2864aac981d18d409b4fec830184e0cd Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 7 May 2019 20:22:41 +0200 Subject: [PATCH 159/254] lint: print statistics when receiving signal Print statistics (such as packages processed) when receiving a signal. On Linux that signal is SIGUSR1, on BSDs the signal is SIGINFO. --- lint/lint.go | 3 ++- lint/lintutil/stats.go | 7 +++++++ lint/lintutil/stats_bsd.go | 10 ++++++++++ lint/lintutil/stats_linux.go | 8 ++++++++ lint/lintutil/util.go | 35 +++++++++++++++++++++++++++++++++++ lint/runner.go | 35 +++++++++++++++++++++++++++++------ lint/stats.go | 19 +++++++++++++++++++ 7 files changed, 110 insertions(+), 7 deletions(-) create mode 100644 lint/lintutil/stats.go create mode 100644 lint/lintutil/stats_bsd.go create mode 100644 lint/lintutil/stats_linux.go create mode 100644 lint/stats.go diff --git a/lint/lint.go b/lint/lint.go index 72f69ba83..de22a02c7 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -95,6 +95,7 @@ type Linter struct { CumulativeCheckers []CumulativeChecker GoVersion int Config config.Config + Stats Stats } type CumulativeChecker interface { @@ -110,7 +111,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error analyzers = append(analyzers, cum.Analyzer()) } - r, err := NewRunner() + r, err := NewRunner(&l.Stats) if err != nil { return nil, err } diff --git a/lint/lintutil/stats.go b/lint/lintutil/stats.go new file mode 100644 index 000000000..59aae3246 --- /dev/null +++ b/lint/lintutil/stats.go @@ -0,0 +1,7 @@ +// +build !darwin,!dragonfly,!freebsd,!netbsd,!openbsd,!linux + +package lintutil + +import "os" + +var infoSignals = []os.Signal{} diff --git a/lint/lintutil/stats_bsd.go b/lint/lintutil/stats_bsd.go new file mode 100644 index 000000000..3a62ede03 --- /dev/null +++ b/lint/lintutil/stats_bsd.go @@ -0,0 +1,10 @@ +// +build darwin dragonfly freebsd netbsd openbsd + +package lintutil + +import ( + "os" + "syscall" +) + +var infoSignals = []os.Signal{syscall.SIGINFO} diff --git a/lint/lintutil/stats_linux.go b/lint/lintutil/stats_linux.go new file mode 100644 index 000000000..d792f48f8 --- /dev/null +++ b/lint/lintutil/stats_linux.go @@ -0,0 +1,8 @@ +package lintutil + +import ( + "os" + "syscall" +) + +var infoSignals = []os.Signal{syscall.SIGUSR1} diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index ab8942589..9ed4d7f57 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -17,11 +17,13 @@ import ( "io" "log" "os" + "os/signal" "regexp" "runtime" "runtime/pprof" "strconv" "strings" + "sync/atomic" "honnef.co/go/tools/config" "honnef.co/go/tools/internal/cache" @@ -309,6 +311,39 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string if opt.LintTests { cfg.Tests = true } + + printStats := func() { + // Individual stats are read atomically, but overall there + // is no synchronisation. For printing rough progress + // information, this doesn't matter. + switch atomic.LoadUint64(&l.Stats.State) { + case lint.StateInitializing: + fmt.Fprintln(os.Stderr, "Status: initializing") + case lint.StateGraph: + fmt.Fprintln(os.Stderr, "Status: loading package graph") + case lint.StateProcessing: + fmt.Fprintf(os.Stderr, "Packages: %d/%d initial, %d/%d total; Workers: %d/%d; Problems: %d\n", + atomic.LoadUint64(&l.Stats.ProcessedInitialPackages), + atomic.LoadUint64(&l.Stats.InitialPackages), + atomic.LoadUint64(&l.Stats.ProcessedPackages), + atomic.LoadUint64(&l.Stats.TotalPackages), + atomic.LoadUint64(&l.Stats.ActiveWorkers), + atomic.LoadUint64(&l.Stats.TotalWorkers), + atomic.LoadUint64(&l.Stats.Problems), + ) + } + } + if len(infoSignals) > 0 { + ch := make(chan os.Signal, 1) + signal.Notify(ch, infoSignals...) + defer signal.Stop(ch) + go func() { + for range ch { + printStats() + } + }() + } + return l.Lint(cfg, paths) } diff --git a/lint/runner.go b/lint/runner.go index 047defe12..516b77906 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -36,6 +36,7 @@ import ( "strconv" "strings" "sync" + "sync/atomic" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" @@ -97,7 +98,10 @@ type Runner struct { analyzerIDs analyzerIDs + // limits parallelism of loading packages loadSem chan struct{} + + stats *Stats } type analyzerIDs struct { @@ -424,7 +428,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter return ret, nil } -func NewRunner() (*Runner, error) { +func NewRunner(stats *Stats) (*Runner, error) { cache, err := cache.Default() if err != nil { return nil, err @@ -433,6 +437,7 @@ func NewRunner() (*Runner, error) { return &Runner{ cache: cache, built: map[*Package]*buildResult{}, + stats: stats, }, nil } @@ -466,7 +471,9 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy if cfg != nil { dcfg = *cfg } - loaded, err := r.ld.Graph(dcfg, patterns...) + + atomic.StoreUint64(&r.stats.State, StateGraph) + initialPkgs, err := r.ld.Graph(dcfg, patterns...) if err != nil { return nil, err } @@ -475,7 +482,7 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy var allPkgs []*Package m := map[*packages.Package]*Package{} - packages.Visit(loaded, nil, func(l *packages.Package) { + packages.Visit(initialPkgs, nil, func(l *packages.Package) { m[l] = &Package{ Package: l, Imports: map[string]*Package{}, @@ -499,19 +506,29 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy m[l].errs = append(m[l].errs, err) } }) - pkgs := make([]*Package, len(loaded)) - for i, l := range loaded { + pkgs := make([]*Package, len(initialPkgs)) + for i, l := range initialPkgs { pkgs[i] = m[l] pkgs[i].initial = true } + atomic.StoreUint64(&r.stats.InitialPackages, uint64(len(initialPkgs))) + atomic.StoreUint64(&r.stats.TotalPackages, uint64(len(allPkgs))) + atomic.StoreUint64(&r.stats.State, StateProcessing) + var wg sync.WaitGroup wg.Add(len(allPkgs)) r.loadSem = make(chan struct{}, runtime.GOMAXPROCS(-1)) + atomic.StoreUint64(&r.stats.TotalWorkers, uint64(cap(r.loadSem))) for _, pkg := range allPkgs { pkg := pkg go func() { r.processPkg(pkg, analyzers) + + if pkg.initial { + atomic.AddUint64(&r.stats.ProcessedInitialPackages, 1) + } + atomic.AddUint64(&r.stats.Problems, uint64(len(pkg.problems))) wg.Done() }() } @@ -676,6 +693,8 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { pkg.TypesInfo = nil pkg.Syntax = nil pkg.results = nil + + atomic.AddUint64(&r.stats.ProcessedPackages, 1) close(res.done) }() @@ -704,7 +723,11 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { } r.loadSem <- struct{}{} - defer func() { <-r.loadSem }() + atomic.AddUint64(&r.stats.ActiveWorkers, 1) + defer func() { + <-r.loadSem + atomic.AddUint64(&r.stats.ActiveWorkers, ^uint64(0)) + }() if err := r.loadPkg(pkg, analyzers); err != nil { pkg.errs = append(pkg.errs, err) return diff --git a/lint/stats.go b/lint/stats.go new file mode 100644 index 000000000..d975517ff --- /dev/null +++ b/lint/stats.go @@ -0,0 +1,19 @@ +package lint + +const ( + StateInitializing = 0 + StateGraph = 1 + StateProcessing = 2 +) + +type Stats struct { + State uint64 + + InitialPackages uint64 + TotalPackages uint64 + ProcessedPackages uint64 + ProcessedInitialPackages uint64 + Problems uint64 + ActiveWorkers uint64 + TotalWorkers uint64 +} From 0db536f9d6e494e8284aa2b3faff37ebf7e71846 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 8 May 2019 15:11:33 +0200 Subject: [PATCH 160/254] lint/lintutil: support signals on all relevant platforms Closes gh-469 --- lint/lintutil/stats.go | 2 +- lint/lintutil/{stats_linux.go => stats_posix.go} | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) rename lint/lintutil/{stats_linux.go => stats_posix.go} (71%) diff --git a/lint/lintutil/stats.go b/lint/lintutil/stats.go index 59aae3246..ba8caf0af 100644 --- a/lint/lintutil/stats.go +++ b/lint/lintutil/stats.go @@ -1,4 +1,4 @@ -// +build !darwin,!dragonfly,!freebsd,!netbsd,!openbsd,!linux +// +build !aix,!android,!darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd,!solaris package lintutil diff --git a/lint/lintutil/stats_linux.go b/lint/lintutil/stats_posix.go similarity index 71% rename from lint/lintutil/stats_linux.go rename to lint/lintutil/stats_posix.go index d792f48f8..53f21c666 100644 --- a/lint/lintutil/stats_linux.go +++ b/lint/lintutil/stats_posix.go @@ -1,3 +1,5 @@ +// +build aix android linux solaris + package lintutil import ( From 64410d9e05863fca3ea27efff70b61fe4fa5f844 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 8 May 2019 15:14:00 +0200 Subject: [PATCH 161/254] unused: fix tests --- unused/unused_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/unused/unused_test.go b/unused/unused_test.go index 5a023ddfe..003e946bd 100644 --- a/unused/unused_test.go +++ b/unused/unused_test.go @@ -177,7 +177,8 @@ func check(t *testing.T, fset *token.FileSet, diagnostics []types.Object) { func TestAll(t *testing.T) { c := NewChecker() - r, err := lint.NewRunner() + var stats lint.Stats + r, err := lint.NewRunner(&stats) if err != nil { t.Fatal(err) } From 64115f0557bbc35c6408ff9f188556b6fc579b91 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 8 May 2019 15:21:17 +0200 Subject: [PATCH 162/254] all: fix issues found by vet --- go/types/typeutil/map_test.go | 10 +++++----- ssa/example_test.go | 4 ++-- ssa/sanity.go | 6 +++--- unused/unused_test.go | 2 -- 4 files changed, 10 insertions(+), 12 deletions(-) diff --git a/go/types/typeutil/map_test.go b/go/types/typeutil/map_test.go index 905376d8c..dcc10d9a0 100644 --- a/go/types/typeutil/map_test.go +++ b/go/types/typeutil/map_test.go @@ -43,11 +43,11 @@ func TestMap(t *testing.T) { var tmap *typeutil.Map // All methods but Set are safe on on (*T)(nil). - tmap.Len() - tmap.At(tPStr1) - tmap.Delete(tPStr1) - tmap.KeysString() - tmap.String() + _ = tmap.Len() + _ = tmap.At(tPStr1) + _ = tmap.Delete(tPStr1) + _ = tmap.KeysString() + _ = tmap.String() tmap = new(typeutil.Map) diff --git a/ssa/example_test.go b/ssa/example_test.go index 3dc9093b8..7558859bd 100644 --- a/ssa/example_test.go +++ b/ssa/example_test.go @@ -48,7 +48,7 @@ func main() { // with similar functionality. It is located at // golang.org/x/tools/cmd/ssadump. // -func ExampleBuildPackage() { +func Example_buildPackage() { // Parse the source files. fset := token.NewFileSet() f, err := parser.ParseFile(fset, "hello.go", hello, parser.ParseComments) @@ -118,7 +118,7 @@ func ExampleBuildPackage() { // for the entire program. This is what you'd typically use for a // whole-program analysis. // -func ExampleLoadProgram() { +func Example_loadProgram() { // Load cmd/cover and its dependencies. var conf loader.Config conf.Import("cmd/cover") diff --git a/ssa/sanity.go b/ssa/sanity.go index c56b2682c..47b2517e0 100644 --- a/ssa/sanity.go +++ b/ssa/sanity.go @@ -410,8 +410,8 @@ func (s *sanity) checkFunction(fn *Function) bool { s.errorf("nil Prog") } - fn.String() // must not crash - fn.RelString(fn.pkg()) // must not crash + _ = fn.String() // must not crash + _ = fn.RelString(fn.pkg()) // must not crash // All functions have a package, except delegates (which are // shared across packages, or duplicated as weak symbols in a @@ -490,7 +490,7 @@ func sanityCheckPackage(pkg *Package) { if pkg.Pkg == nil { panic(fmt.Sprintf("Package %s has no Object", pkg)) } - pkg.String() // must not crash + _ = pkg.String() // must not crash for name, mem := range pkg.Members { if name != mem.Name() { diff --git a/unused/unused_test.go b/unused/unused_test.go index 003e946bd..8cf5d8fb1 100644 --- a/unused/unused_test.go +++ b/unused/unused_test.go @@ -59,8 +59,6 @@ func parseExpectations(text string) ([]string, error) { return nil, fmt.Errorf("unexpected %s", scanner.TokenString(tok)) } } - - return expects, nil } func check(t *testing.T, fset *token.FileSet, diagnostics []types.Object) { From d5a43d77d59c4c0806c133848f9c4293c68ee372 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 8 May 2019 16:09:36 +0200 Subject: [PATCH 163/254] ssa: backport upstream changes --- ssa/builder.go | 16 ++-- ssa/const.go | 44 +++++----- ssa/create.go | 17 ++-- ssa/doc.go | 12 +-- ssa/example_test.go | 62 ++++++++++----- ssa/methods.go | 4 +- ssa/sanity.go | 12 +++ ssa/source.go | 2 +- ssa/source_test.go | 99 +++++++++++++++-------- ssa/ssa.go | 16 ++-- ssa/ssautil/load.go | 84 ++++++++++++++------ ssa/ssautil/load_test.go | 56 +++++++++++++ ssa/testdata/objlookup.go | 150 +++++++++++++++++------------------ ssa/testdata/valueforexpr.go | 8 +- ssa/testmain.go | 8 +- ssa/wrappers.go | 6 +- 16 files changed, 384 insertions(+), 212 deletions(-) diff --git a/ssa/builder.go b/ssa/builder.go index 537b7e3ff..317ac0611 100644 --- a/ssa/builder.go +++ b/ssa/builder.go @@ -32,7 +32,7 @@ package ssa import ( "fmt" "go/ast" - exact "go/constant" + "go/constant" "go/token" "go/types" "os" @@ -63,7 +63,7 @@ var ( // SSA Value constants. vZero = intConst(0) vOne = intConst(1) - vTrue = NewConst(exact.MakeBool(true), tBool) + vTrue = NewConst(constant.MakeBool(true), tBool) ) // builder holds state associated with the package currently being built. @@ -131,11 +131,11 @@ func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { switch e.Op { case token.LAND: b.cond(fn, e.X, rhs, done) - short = NewConst(exact.MakeBool(false), t) + short = NewConst(constant.MakeBool(false), t) case token.LOR: b.cond(fn, e.X, done, rhs) - short = NewConst(exact.MakeBool(true), t) + short = NewConst(constant.MakeBool(true), t) } // Is rhs unreachable? @@ -969,10 +969,10 @@ func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { c.Args = b.emitCallArgs(fn, sig, e, c.Args) } -// assignOp emits to fn code to perform loc += incr or loc -= incr. -func (b *builder) assignOp(fn *Function, loc lvalue, incr Value, op token.Token, pos token.Pos) { +// assignOp emits to fn code to perform loc = val. +func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { oldv := loc.load(fn) - loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, incr, oldv.Type()), loc.typ(), pos)) + loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos)) } // localValueSpec emits to fn code to define all of the vars in the @@ -1998,7 +1998,7 @@ start: op = token.SUB } loc := b.addr(fn, s.X, false) - b.assignOp(fn, loc, NewConst(exact.MakeInt64(1), loc.typ()), op, s.Pos()) + b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) case *ast.AssignStmt: switch s.Tok { diff --git a/ssa/const.go b/ssa/const.go index ca99adc3f..f95d9e114 100644 --- a/ssa/const.go +++ b/ssa/const.go @@ -8,7 +8,7 @@ package ssa import ( "fmt" - exact "go/constant" + "go/constant" "go/token" "go/types" "strconv" @@ -17,14 +17,14 @@ import ( // NewConst returns a new constant of the specified value and type. // val must be valid according to the specification of Const.Value. // -func NewConst(val exact.Value, typ types.Type) *Const { +func NewConst(val constant.Value, typ types.Type) *Const { return &Const{typ, val} } // intConst returns an 'int' constant that evaluates to i. // (i is an int64 in case the host is narrower than the target.) func intConst(i int64) *Const { - return NewConst(exact.MakeInt64(i), tInt) + return NewConst(constant.MakeInt64(i), tInt) } // nilConst returns a nil constant of the specified type, which may @@ -36,7 +36,7 @@ func nilConst(typ types.Type) *Const { // stringConst returns a 'string' constant that evaluates to s. func stringConst(s string) *Const { - return NewConst(exact.MakeString(s), tString) + return NewConst(constant.MakeString(s), tString) } // zeroConst returns a new "zero" constant of the specified type, @@ -48,11 +48,11 @@ func zeroConst(t types.Type) *Const { case *types.Basic: switch { case t.Info()&types.IsBoolean != 0: - return NewConst(exact.MakeBool(false), t) + return NewConst(constant.MakeBool(false), t) case t.Info()&types.IsNumeric != 0: - return NewConst(exact.MakeInt64(0), t) + return NewConst(constant.MakeInt64(0), t) case t.Info()&types.IsString != 0: - return NewConst(exact.MakeString(""), t) + return NewConst(constant.MakeString(""), t) case t.Kind() == types.UnsafePointer: fallthrough case t.Kind() == types.UntypedNil: @@ -74,8 +74,8 @@ func (c *Const) RelString(from *types.Package) string { var s string if c.Value == nil { s = "nil" - } else if c.Value.Kind() == exact.String { - s = exact.StringVal(c.Value) + } else if c.Value.Kind() == constant.String { + s = constant.StringVal(c.Value) const max = 20 // TODO(adonovan): don't cut a rune in half. if len(s) > max { @@ -121,14 +121,14 @@ func (c *Const) IsNil() bool { // a signed 64-bit integer. // func (c *Const) Int64() int64 { - switch x := exact.ToInt(c.Value); x.Kind() { - case exact.Int: - if i, ok := exact.Int64Val(x); ok { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if i, ok := constant.Int64Val(x); ok { return i } return 0 - case exact.Float: - f, _ := exact.Float64Val(x) + case constant.Float: + f, _ := constant.Float64Val(x) return int64(f) } panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) @@ -138,14 +138,14 @@ func (c *Const) Int64() int64 { // an unsigned 64-bit integer. // func (c *Const) Uint64() uint64 { - switch x := exact.ToInt(c.Value); x.Kind() { - case exact.Int: - if u, ok := exact.Uint64Val(x); ok { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if u, ok := constant.Uint64Val(x); ok { return u } return 0 - case exact.Float: - f, _ := exact.Float64Val(x) + case constant.Float: + f, _ := constant.Float64Val(x) return uint64(f) } panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) @@ -155,7 +155,7 @@ func (c *Const) Uint64() uint64 { // a float64. // func (c *Const) Float64() float64 { - f, _ := exact.Float64Val(c.Value) + f, _ := constant.Float64Val(c.Value) return f } @@ -163,7 +163,7 @@ func (c *Const) Float64() float64 { // fit a complex128. // func (c *Const) Complex128() complex128 { - re, _ := exact.Float64Val(exact.Real(c.Value)) - im, _ := exact.Float64Val(exact.Imag(c.Value)) + re, _ := constant.Float64Val(constant.Real(c.Value)) + im, _ := constant.Float64Val(constant.Imag(c.Value)) return complex(re, im) } diff --git a/ssa/create.go b/ssa/create.go index 69ac12b1b..85163a0c5 100644 --- a/ssa/create.go +++ b/ssa/create.go @@ -251,12 +251,19 @@ func (prog *Program) AllPackages() []*Package { return pkgs } -// ImportedPackage returns the importable SSA Package whose import -// path is path, or nil if no such SSA package has been created. +// ImportedPackage returns the importable Package whose PkgPath +// is path, or nil if no such Package has been created. // -// Not all packages are importable. For example, no import -// declaration can resolve to the x_test package created by 'go test' -// or the ad-hoc main package created 'go build foo.go'. +// A parameter to CreatePackage determines whether a package should be +// considered importable. For example, no import declaration can resolve +// to the ad-hoc main package created by 'go build foo.go'. +// +// TODO(adonovan): rethink this function and the "importable" concept; +// most packages are importable. This function assumes that all +// types.Package.Path values are unique within the ssa.Program, which is +// false---yet this function remains very convenient. +// Clients should use (*Program).Package instead where possible. +// SSA doesn't really need a string-keyed map of packages. // func (prog *Program) ImportedPackage(path string) *Package { return prog.imported[path] diff --git a/ssa/doc.go b/ssa/doc.go index 57474dd20..0f71fda00 100644 --- a/ssa/doc.go +++ b/ssa/doc.go @@ -23,11 +23,13 @@ // such as multi-way branch can be reconstructed as needed; see // ssautil.Switches() for an example. // -// To construct an SSA-form program, call ssautil.CreateProgram on a -// loader.Program, a set of type-checked packages created from -// parsed Go source files. The resulting ssa.Program contains all the -// packages and their members, but SSA code is not created for -// function bodies until a subsequent call to (*Package).Build. +// The simplest way to create the SSA representation of a package is +// to load typed syntax trees using golang.org/x/tools/go/packages, then +// invoke the ssautil.Packages helper function. See ExampleLoadPackages +// and ExampleWholeProgram for examples. +// The resulting ssa.Program contains all the packages and their +// members, but SSA code is not created for function bodies until a +// subsequent call to (*Package).Build or (*Program).Build. // // The builder initially builds a naive SSA form in which all local // variables are addresses of stack locations with explicit loads and diff --git a/ssa/example_test.go b/ssa/example_test.go index 7558859bd..efa1c53b7 100644 --- a/ssa/example_test.go +++ b/ssa/example_test.go @@ -11,9 +11,10 @@ import ( "go/parser" "go/token" "go/types" + "log" "os" - "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" "honnef.co/go/tools/ssa" "honnef.co/go/tools/ssa/ssautil" ) @@ -113,26 +114,51 @@ func Example_buildPackage() { // return } -// This program shows how to load a main package (cmd/cover) and all its -// dependencies from source, using the loader, and then build SSA code -// for the entire program. This is what you'd typically use for a -// whole-program analysis. -// -func Example_loadProgram() { - // Load cmd/cover and its dependencies. - var conf loader.Config - conf.Import("cmd/cover") - lprog, err := conf.Load() +// This example builds SSA code for a set of packages using the +// x/tools/go/packages API. This is what you would typically use for a +// analysis capable of operating on a single package. +func Example_loadPackages() { + // Load, parse, and type-check the initial packages. + cfg := &packages.Config{Mode: packages.LoadSyntax} + initial, err := packages.Load(cfg, "fmt", "net/http") if err != nil { - fmt.Print(err) // type error in some package - return + log.Fatal(err) } - // Create SSA-form program representation. - prog := ssautil.CreateProgram(lprog, ssa.SanityCheckFunctions) + // Stop if any package had errors. + // This step is optional; without it, the next step + // will create SSA for only a subset of packages. + if packages.PrintErrors(initial) > 0 { + log.Fatalf("packages contain errors") + } - // Build SSA code for the entire cmd/cover program. - prog.Build() + // Create SSA packages for all well-typed packages. + prog, pkgs := ssautil.Packages(initial, ssa.PrintPackages) + _ = prog - // Output: + // Build SSA code for the well-typed initial packages. + for _, p := range pkgs { + if p != nil { + p.Build() + } + } +} + +// This example builds SSA code for a set of packages plus all their dependencies, +// using the x/tools/go/packages API. +// This is what you'd typically use for a whole-program analysis. +func Example_loadWholeProgram() { + // Load, parse, and type-check the whole program. + cfg := packages.Config{Mode: packages.LoadAllSyntax} + initial, err := packages.Load(&cfg, "fmt", "net/http") + if err != nil { + log.Fatal(err) + } + + // Create SSA packages for well-typed packages and their dependencies. + prog, pkgs := ssautil.AllPackages(initial, ssa.PrintPackages) + _ = pkgs + + // Build SSA code for the whole program. + prog.Build() } diff --git a/ssa/methods.go b/ssa/methods.go index 080dca968..9cf383916 100644 --- a/ssa/methods.go +++ b/ssa/methods.go @@ -23,14 +23,14 @@ import ( // func (prog *Program) MethodValue(sel *types.Selection) *Function { if sel.Kind() != types.MethodVal { - panic(fmt.Sprintf("Method(%s) kind != MethodVal", sel)) + panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel)) } T := sel.Recv() if isInterface(T) { return nil // abstract method } if prog.mode&LogSource != 0 { - defer logStack("Method %s %v", T, sel)() + defer logStack("MethodValue %s %v", T, sel)() } prog.methodsMu.Lock() diff --git a/ssa/sanity.go b/ssa/sanity.go index 47b2517e0..1d29b66b0 100644 --- a/ssa/sanity.go +++ b/ssa/sanity.go @@ -448,6 +448,18 @@ func (s *sanity) checkFunction(fn *Function) bool { if p.Parent() != fn { s.errorf("Param %s at index %d has wrong parent", p.Name(), i) } + // Check common suffix of Signature and Params match type. + if sig := fn.Signature; sig != nil { + j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params + if j < 0 { + continue + } + if !types.Identical(p.Type(), sig.Params().At(j).Type()) { + s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type()) + + } + } + s.checkReferrerList(p) } for i, fv := range fn.FreeVars { diff --git a/ssa/source.go b/ssa/source.go index 6d2223eda..8d9cca170 100644 --- a/ssa/source.go +++ b/ssa/source.go @@ -150,7 +150,7 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function { // (modulo "untyped" bools resulting from comparisons). // // (Tip: to find the ssa.Value given a source position, use -// importer.PathEnclosingInterval to locate the ast.Node, then +// astutil.PathEnclosingInterval to locate the ast.Node, then // EnclosingFunction to locate the Function, then ValueForExpr to find // the ssa.Value.) // diff --git a/ssa/source_test.go b/ssa/source_test.go index 3ce58810e..512be2322 100644 --- a/ssa/source_test.go +++ b/ssa/source_test.go @@ -9,17 +9,18 @@ package ssa_test import ( "fmt" "go/ast" - exact "go/constant" + "go/constant" "go/parser" "go/token" "go/types" + "io/ioutil" "os" - "regexp" "runtime" "strings" "testing" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/expect" "golang.org/x/tools/go/loader" "honnef.co/go/tools/ssa" "honnef.co/go/tools/ssa/ssautil" @@ -31,10 +32,14 @@ func TestObjValueLookup(t *testing.T) { } conf := loader.Config{ParserMode: parser.ParseComments} - f, err := conf.ParseFile("testdata/objlookup.go", nil) + src, err := ioutil.ReadFile("testdata/objlookup.go") if err != nil { - t.Error(err) - return + t.Fatal(err) + } + readFile := func(filename string) ([]byte, error) { return src, nil } + f, err := conf.ParseFile("testdata/objlookup.go", src) + if err != nil { + t.Fatal(err) } conf.CreateFromFiles("main", f) @@ -42,16 +47,40 @@ func TestObjValueLookup(t *testing.T) { // kind of ssa.Value we expect (represented "Constant", "&Alloc"). expectations := make(map[string]string) - // Find all annotations of form x::BinOp, &y::Alloc, etc. - re := regexp.MustCompile(`(\b|&)?(\w*)::(\w*)\b`) - for _, c := range f.Comments { - text := c.Text() - pos := conf.Fset.Position(c.Pos()) - for _, m := range re.FindAllStringSubmatch(text, -1) { - key := fmt.Sprintf("%s:%d", m[2], pos.Line) - value := m[1] + m[3] - expectations[key] = value + // Each note of the form @ssa(x, "BinOp") in testdata/objlookup.go + // specifies an expectation that an object named x declared on the + // same line is associated with an an ssa.Value of type *ssa.BinOp. + notes, err := expect.Extract(conf.Fset, f) + if err != nil { + t.Fatal(err) + } + for _, n := range notes { + if n.Name != "ssa" { + t.Errorf("%v: unexpected note type %q, want \"ssa\"", conf.Fset.Position(n.Pos), n.Name) + continue + } + if len(n.Args) != 2 { + t.Errorf("%v: ssa has %d args, want 2", conf.Fset.Position(n.Pos), len(n.Args)) + continue + } + ident, ok := n.Args[0].(expect.Identifier) + if !ok { + t.Errorf("%v: got %v for arg 1, want identifier", conf.Fset.Position(n.Pos), n.Args[0]) + continue } + exp, ok := n.Args[1].(string) + if !ok { + t.Errorf("%v: got %v for arg 2, want string", conf.Fset.Position(n.Pos), n.Args[1]) + continue + } + p, _, err := expect.MatchBefore(conf.Fset, readFile, n.Pos, string(ident)) + if err != nil { + t.Error(err) + continue + } + pos := conf.Fset.Position(p) + key := fmt.Sprintf("%s:%d", ident, pos.Line) + expectations[key] = exp } iprog, err := conf.Load() @@ -144,7 +173,7 @@ func checkConstValue(t *testing.T, prog *ssa.Program, obj *types.Const) { return } if obj.Name() != "nil" { - if !exact.Compare(c.Value, token.EQL, obj.Val()) { + if !constant.Compare(c.Value, token.EQL, obj.Val()) { t.Errorf("ConstValue(%s).Value (%s) != %s", obj, c.Value, obj.Val()) return @@ -232,37 +261,41 @@ func testValueForExpr(t *testing.T, testfile string) { } } - // Find the actual AST node for each canonical position. - parenExprByPos := make(map[token.Pos]*ast.ParenExpr) + var parenExprs []*ast.ParenExpr ast.Inspect(f, func(n ast.Node) bool { if n != nil { if e, ok := n.(*ast.ParenExpr); ok { - parenExprByPos[e.Pos()] = e + parenExprs = append(parenExprs, e) } } return true }) - // Find all annotations of form /*@kind*/. - for _, c := range f.Comments { - text := strings.TrimSpace(c.Text()) - if text == "" || text[0] != '@' { - continue + notes, err := expect.Extract(prog.Fset, f) + if err != nil { + t.Fatal(err) + } + for _, n := range notes { + want := n.Name + if want == "nil" { + want = "" } - text = text[1:] - pos := c.End() + 1 - position := prog.Fset.Position(pos) + position := prog.Fset.Position(n.Pos) var e ast.Expr - if target := parenExprByPos[pos]; target == nil { - t.Errorf("%s: annotation doesn't precede ParenExpr: %q", position, text) + for _, paren := range parenExprs { + if paren.Pos() > n.Pos { + e = paren.X + break + } + } + if e == nil { + t.Errorf("%s: note doesn't precede ParenExpr: %q", position, want) continue - } else { - e = target.X } - path, _ := astutil.PathEnclosingInterval(f, pos, pos) + path, _ := astutil.PathEnclosingInterval(f, n.Pos, n.Pos) if path == nil { - t.Errorf("%s: can't find AST path from root to comment: %s", position, text) + t.Errorf("%s: can't find AST path from root to comment: %s", position, want) continue } @@ -274,7 +307,7 @@ func testValueForExpr(t *testing.T, testfile string) { v, gotAddr := fn.ValueForExpr(e) // (may be nil) got := strings.TrimPrefix(fmt.Sprintf("%T", v), "*ssa.") - if want := text; got != want { + if got != want { t.Errorf("%s: got value %q, want %q", position, got, want) } if v != nil { diff --git a/ssa/ssa.go b/ssa/ssa.go index 8825e7b59..aeddd65e5 100644 --- a/ssa/ssa.go +++ b/ssa/ssa.go @@ -10,7 +10,7 @@ package ssa import ( "fmt" "go/ast" - exact "go/constant" + "go/constant" "go/token" "go/types" "sync" @@ -405,7 +405,7 @@ type Parameter struct { // of the same type and value. // // Value holds the exact value of the constant, independent of its -// Type(), using the same representation as package go/exact uses for +// Type(), using the same representation as package go/constant uses for // constants, or nil for a typed nil value. // // Pos() returns token.NoPos. @@ -417,7 +417,7 @@ type Parameter struct { // type Const struct { typ types.Type - Value exact.Value + Value constant.Value } // A Global is a named Value holding the address of a package-level @@ -572,8 +572,8 @@ type BinOp struct { register // One of: // ADD SUB MUL QUO REM + - * / % - // AND OR XOR SHL SHR AND_NOT & | ^ << >> &~ - // EQL LSS GTR NEQ LEQ GEQ == != < <= < >= + // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^ + // EQL NEQ LSS LEQ GTR GEQ == != < <= < >= Op token.Token X, Y Value } @@ -680,10 +680,10 @@ type ChangeInterface struct { // value of a concrete type. // // Use Program.MethodSets.MethodSet(X.Type()) to find the method-set -// of X, and Program.Method(m) to find the implementation of a method. +// of X, and Program.MethodValue(m) to find the implementation of a method. // // To construct the zero value of an interface type T, use: -// NewConst(exact.MakeNil(), T, pos) +// NewConst(constant.MakeNil(), T, pos) // // Pos() returns the ast.CallExpr.Lparen, if the instruction arose // from an explicit conversion in the source. @@ -813,7 +813,7 @@ type Slice struct { type FieldAddr struct { register X Value // *struct - Field int // index into X.Type().Deref().(*types.Struct).Fields + Field int // field is X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct).Field(Field) } // The Field instruction yields the Field of struct X. diff --git a/ssa/ssautil/load.go b/ssa/ssautil/load.go index 3b8694a13..113ff39ca 100644 --- a/ssa/ssautil/load.go +++ b/ssa/ssautil/load.go @@ -16,49 +16,78 @@ import ( "honnef.co/go/tools/ssa" ) -// Packages creates an SSA program for a set of packages loaded from -// source syntax using the golang.org/x/tools/go/packages.Load function. -// It creates and returns an SSA package for each well-typed package in -// the initial list. The resulting list of packages has the same length -// as initial, and contains a nil if SSA could not be constructed for -// the corresponding initial package. +// Packages creates an SSA program for a set of packages. // -// Code for bodies of functions is not built until Build is called -// on the resulting Program. +// The packages must have been loaded from source syntax using the +// golang.org/x/tools/go/packages.Load function in LoadSyntax or +// LoadAllSyntax mode. +// +// Packages creates an SSA package for each well-typed package in the +// initial list, plus all their dependencies. The resulting list of +// packages corresponds to the list of initial packages, and may contain +// a nil if SSA code could not be constructed for the corresponding initial +// package due to type errors. +// +// Code for bodies of functions is not built until Build is called on +// the resulting Program. SSA code is constructed only for the initial +// packages with well-typed syntax trees. // // The mode parameter controls diagnostics and checking during SSA construction. // func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + return doPackages(initial, mode, false) +} + +// AllPackages creates an SSA program for a set of packages plus all +// their dependencies. +// +// The packages must have been loaded from source syntax using the +// golang.org/x/tools/go/packages.Load function in LoadAllSyntax mode. +// +// AllPackages creates an SSA package for each well-typed package in the +// initial list, plus all their dependencies. The resulting list of +// packages corresponds to the list of intial packages, and may contain +// a nil if SSA code could not be constructed for the corresponding +// initial package due to type errors. +// +// Code for bodies of functions is not built until Build is called on +// the resulting Program. SSA code is constructed for all packages with +// well-typed syntax trees. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +func AllPackages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + return doPackages(initial, mode, true) +} + +func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*ssa.Program, []*ssa.Package) { + var fset *token.FileSet if len(initial) > 0 { fset = initial[0].Fset } prog := ssa.NewProgram(fset, mode) - seen := make(map[*packages.Package]*ssa.Package) - var create func(p *packages.Package) *ssa.Package - create = func(p *packages.Package) *ssa.Package { - ssapkg, ok := seen[p] - if !ok { - if p.Types == nil || p.IllTyped { - // not well typed - seen[p] = nil - return nil - } - ssapkg = prog.CreatePackage(p.Types, p.Syntax, p.TypesInfo, true) - seen[p] = ssapkg + isInitial := make(map[*packages.Package]bool, len(initial)) + for _, p := range initial { + isInitial[p] = true + } - for _, imp := range p.Imports { - create(imp) + ssamap := make(map[*packages.Package]*ssa.Package) + packages.Visit(initial, nil, func(p *packages.Package) { + if p.Types != nil && !p.IllTyped { + var files []*ast.File + if deps || isInitial[p] { + files = p.Syntax } + ssamap[p] = prog.CreatePackage(p.Types, files, p.TypesInfo, true) } - return ssapkg - } + }) var ssapkgs []*ssa.Package for _, p := range initial { - ssapkgs = append(ssapkgs, create(p)) + ssapkgs = append(ssapkgs, ssamap[p]) // may be nil } return prog, ssapkgs } @@ -70,7 +99,10 @@ func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, // Code for bodies of functions is not built until Build is called // on the result. // -// mode controls diagnostics and checking during SSA construction. +// The mode parameter controls diagnostics and checking during SSA construction. +// +// Deprecated: use golang.org/x/tools/go/packages and the Packages +// function instead; see ssa.ExampleLoadPackages. // func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { prog := ssa.NewProgram(lprog.Fset, mode) diff --git a/ssa/ssautil/load_test.go b/ssa/ssautil/load_test.go index e3c78db3f..332054839 100644 --- a/ssa/ssautil/load_test.go +++ b/ssa/ssautil/load_test.go @@ -5,14 +5,17 @@ package ssautil_test import ( + "bytes" "go/ast" "go/importer" "go/parser" "go/token" "go/types" "os" + "strings" "testing" + "golang.org/x/tools/go/packages" "honnef.co/go/tools/ssa/ssautil" ) @@ -49,6 +52,45 @@ func TestBuildPackage(t *testing.T) { } } +func TestPackages(t *testing.T) { + cfg := &packages.Config{Mode: packages.LoadSyntax} + initial, err := packages.Load(cfg, "bytes") + if err != nil { + t.Fatal(err) + } + if packages.PrintErrors(initial) > 0 { + t.Fatal("there were errors") + } + + prog, pkgs := ssautil.Packages(initial, 0) + bytesNewBuffer := pkgs[0].Func("NewBuffer") + bytesNewBuffer.Pkg.Build() + + // We'll dump the SSA of bytes.NewBuffer because it is small and stable. + out := new(bytes.Buffer) + bytesNewBuffer.WriteTo(out) + + // For determinism, sanitize the location. + location := prog.Fset.Position(bytesNewBuffer.Pos()).String() + got := strings.Replace(out.String(), location, "$GOROOT/src/bytes/buffer.go:1", -1) + + want := ` +# Name: bytes.NewBuffer +# Package: bytes +# Location: $GOROOT/src/bytes/buffer.go:1 +func NewBuffer(buf []byte) *Buffer: +0: entry P:0 S:0 + t0 = new Buffer (complit) *Buffer + t1 = &t0.buf [#0] *[]byte + *t1 = buf + return t0 + +`[1:] + if got != want { + t.Errorf("bytes.NewBuffer SSA = <<%s>>, want <<%s>>", got, want) + } +} + func TestBuildPackage_MissingImport(t *testing.T) { fset := token.NewFileSet() f, err := parser.ParseFile(fset, "bad.go", `package bad; import "missing"`, 0) @@ -62,3 +104,17 @@ func TestBuildPackage_MissingImport(t *testing.T) { t.Fatal("BuildPackage succeeded unexpectedly") } } + +func TestIssue28106(t *testing.T) { + // In go1.10, go/packages loads all packages from source, not + // export data, but does not type check function bodies of + // imported packages. This test ensures that we do not attempt + // to run the SSA builder on functions without type information. + cfg := &packages.Config{Mode: packages.LoadSyntax} + pkgs, err := packages.Load(cfg, "runtime") + if err != nil { + t.Fatal(err) + } + prog, _ := ssautil.Packages(pkgs, 0) + prog.Build() // no crash +} diff --git a/ssa/testdata/objlookup.go b/ssa/testdata/objlookup.go index 1aaa417e8..d110add69 100644 --- a/ssa/testdata/objlookup.go +++ b/ssa/testdata/objlookup.go @@ -24,7 +24,7 @@ func (*J) method() {} const globalConst = 0 -var globalVar int // &globalVar::Global +var globalVar int //@ ssa(globalVar,"&Global") func globalFunc() {} @@ -33,128 +33,128 @@ type I interface { } type S struct { - x int // x::nil + x int //@ ssa(x,"nil") } func main() { - print(globalVar) // globalVar::UnOp - globalVar = 1 // globalVar::Const + print(globalVar) //@ ssa(globalVar,"UnOp") + globalVar = 1 //@ ssa(globalVar,"Const") - var v0 int = 1 // v0::Const (simple local value spec) - if v0 > 0 { // v0::Const - v0 = 2 // v0::Const + var v0 int = 1 //@ ssa(v0,"Const") // simple local value spec + if v0 > 0 { //@ ssa(v0,"Const") + v0 = 2 //@ ssa(v0,"Const") } - print(v0) // v0::Phi + print(v0) //@ ssa(v0,"Phi") // v1 is captured and thus implicitly address-taken. - var v1 int = 1 // v1::Const - v1 = 2 // v1::Const - fmt.Println(v1) // v1::UnOp (load) - f := func(param int) { // f::MakeClosure param::Parameter - if y := 1; y > 0 { // y::Const - print(v1, param) // v1::UnOp (load) param::Parameter + var v1 int = 1 //@ ssa(v1,"Const") + v1 = 2 //@ ssa(v1,"Const") + fmt.Println(v1) //@ ssa(v1,"UnOp") // load + f := func(param int) { //@ ssa(f,"MakeClosure"), ssa(param,"Parameter") + if y := 1; y > 0 { //@ ssa(y,"Const") + print(v1, param) //@ ssa(v1,"UnOp") /*load*/, ssa(param,"Parameter") } - param = 2 // param::Const - println(param) // param::Const + param = 2 //@ ssa(param,"Const") + println(param) //@ ssa(param,"Const") } - f(0) // f::MakeClosure + f(0) //@ ssa(f,"MakeClosure") - var v2 int // v2::Const (implicitly zero-initialized local value spec) - print(v2) // v2::Const + var v2 int //@ ssa(v2,"Const") // implicitly zero-initialized local value spec + print(v2) //@ ssa(v2,"Const") - m := make(map[string]int) // m::MakeMap + m := make(map[string]int) //@ ssa(m,"MakeMap") // Local value spec with multi-valued RHS: - var v3, v4 = m[""] // v3::Extract v4::Extract m::MakeMap - print(v3) // v3::Extract - print(v4) // v4::Extract + var v3, v4 = m[""] //@ ssa(v3,"Extract"), ssa(v4,"Extract"), ssa(m,"MakeMap") + print(v3) //@ ssa(v3,"Extract") + print(v4) //@ ssa(v4,"Extract") - v3++ // v3::BinOp (assign with op) - v3 += 2 // v3::BinOp (assign with op) + v3++ //@ ssa(v3,"BinOp") // assign with op + v3 += 2 //@ ssa(v3,"BinOp") // assign with op - v5, v6 := false, "" // v5::Const v6::Const (defining assignment) - print(v5) // v5::Const - print(v6) // v6::Const + v5, v6 := false, "" //@ ssa(v5,"Const"), ssa(v6,"Const") // defining assignment + print(v5) //@ ssa(v5,"Const") + print(v6) //@ ssa(v6,"Const") - var v7 S // &v7::Alloc - v7.x = 1 // &v7::Alloc &x::FieldAddr - print(v7.x) // &v7::Alloc &x::FieldAddr + var v7 S //@ ssa(v7,"&Alloc") + v7.x = 1 //@ ssa(v7,"&Alloc"), ssa(x,"&FieldAddr") + print(v7.x) //@ ssa(v7,"&Alloc"), ssa(x,"&FieldAddr") - var v8 [1]int // &v8::Alloc - v8[0] = 0 // &v8::Alloc - print(v8[:]) // &v8::Alloc - _ = v8[0] // &v8::Alloc - _ = v8[:][0] // &v8::Alloc - v8ptr := &v8 // v8ptr::Alloc &v8::Alloc - _ = v8ptr[0] // v8ptr::Alloc - _ = *v8ptr // v8ptr::Alloc + var v8 [1]int //@ ssa(v8,"&Alloc") + v8[0] = 0 //@ ssa(v8,"&Alloc") + print(v8[:]) //@ ssa(v8,"&Alloc") + _ = v8[0] //@ ssa(v8,"&Alloc") + _ = v8[:][0] //@ ssa(v8,"&Alloc") + v8ptr := &v8 //@ ssa(v8ptr,"Alloc"), ssa(v8,"&Alloc") + _ = v8ptr[0] //@ ssa(v8ptr,"Alloc") + _ = *v8ptr //@ ssa(v8ptr,"Alloc") - v8a := make([]int, 1) // v8a::Slice - v8a[0] = 0 // v8a::Slice - print(v8a[:]) // v8a::Slice + v8a := make([]int, 1) //@ ssa(v8a,"Slice") + v8a[0] = 0 //@ ssa(v8a,"Slice") + print(v8a[:]) //@ ssa(v8a,"Slice") - v9 := S{} // &v9::Alloc + v9 := S{} //@ ssa(v9,"&Alloc") - v10 := &v9 // v10::Alloc &v9::Alloc - _ = v10 // v10::Alloc + v10 := &v9 //@ ssa(v10,"Alloc"), ssa(v9,"&Alloc") + _ = v10 //@ ssa(v10,"Alloc") - var v11 *J = nil // v11::Const - v11.method() // v11::Const + var v11 *J = nil //@ ssa(v11,"Const") + v11.method() //@ ssa(v11,"Const") - var v12 J // &v12::Alloc - v12.method() // &v12::Alloc (implicitly address-taken) + var v12 J //@ ssa(v12,"&Alloc") + v12.method() //@ ssa(v12,"&Alloc") // implicitly address-taken // NB, in the following, 'method' resolves to the *types.Func // of (*J).method, so it doesn't help us locate the specific // ssa.Values here: a bound-method closure and a promotion // wrapper. - _ = v11.method // v11::Const - _ = (*struct{ J }).method // J::nil + _ = v11.method //@ ssa(v11,"Const") + _ = (*struct{ J }).method //@ ssa(J,"nil") // These vars are not optimised away. if false { - v13 := 0 // v13::Const - println(v13) // v13::Const + v13 := 0 //@ ssa(v13,"Const") + println(v13) //@ ssa(v13,"Const") } - switch x := 1; x { // x::Const - case v0: // v0::Phi + switch x := 1; x { //@ ssa(x,"Const") + case v0: //@ ssa(v0,"Phi") } - for k, v := range m { // k::Extract v::Extract m::MakeMap - _ = k // k::Extract - v++ // v::BinOp + for k, v := range m { //@ ssa(k,"Extract"), ssa(v,"Extract"), ssa(m,"MakeMap") + _ = k //@ ssa(k,"Extract") + v++ //@ ssa(v,"BinOp") } - if y := 0; y > 1 { // y::Const y::Const + if y := 0; y > 1 { //@ ssa(y,"Const"), ssa(y,"Const") } - var i interface{} // i::Const (nil interface) - i = 1 // i::MakeInterface - switch i := i.(type) { // i::MakeInterface i::MakeInterface + var i interface{} //@ ssa(i,"Const") // nil interface + i = 1 //@ ssa(i,"MakeInterface") + switch i := i.(type) { //@ ssa(i,"MakeInterface"), ssa(i,"MakeInterface") case int: - println(i) // i::Extract + println(i) //@ ssa(i,"Extract") } - ch := make(chan int) // ch::MakeChan + ch := make(chan int) //@ ssa(ch,"MakeChan") select { - case x := <-ch: // x::UnOp (receive) ch::MakeChan - _ = x // x::UnOp + case x := <-ch: //@ ssa(x,"UnOp") /*receive*/, ssa(ch,"MakeChan") + _ = x //@ ssa(x,"UnOp") } // .Op is an inter-package FieldVal-selection. - var err os.PathError // &err::Alloc - _ = err.Op // &err::Alloc &Op::FieldAddr - _ = &err.Op // &err::Alloc &Op::FieldAddr + var err os.PathError //@ ssa(err,"&Alloc") + _ = err.Op //@ ssa(err,"&Alloc"), ssa(Op,"&FieldAddr") + _ = &err.Op //@ ssa(err,"&Alloc"), ssa(Op,"&FieldAddr") // Exercise corner-cases of lvalues vs rvalues. // (Guessing IsAddr from the 'pointerness' won't cut it here.) type N *N - var n N // n::Const - n1 := n // n1::Const n::Const - n2 := &n1 // n2::Alloc &n1::Alloc - n3 := *n2 // n3::UnOp n2::Alloc - n4 := **n3 // n4::UnOp n3::UnOp - _ = n4 // n4::UnOp + var n N //@ ssa(n,"Const") + n1 := n //@ ssa(n1,"Const"), ssa(n,"Const") + n2 := &n1 //@ ssa(n2,"Alloc"), ssa(n1,"&Alloc") + n3 := *n2 //@ ssa(n3,"UnOp"), ssa(n2,"Alloc") + n4 := **n3 //@ ssa(n4,"UnOp"), ssa(n3,"UnOp") + _ = n4 //@ ssa(n4,"UnOp") } diff --git a/ssa/testdata/valueforexpr.go b/ssa/testdata/valueforexpr.go index 0a561c385..d2fbf676c 100644 --- a/ssa/testdata/valueforexpr.go +++ b/ssa/testdata/valueforexpr.go @@ -10,12 +10,13 @@ package main func f(spilled, unspilled int) { _ = /*@UnOp*/ (spilled) _ = /*@Parameter*/ (unspilled) - _ = /*@*/ (1 + 2) // (constant) + _ = /*@nil*/ (1 + 2) // (constant) i := 0 f := func() (int, int) { return 0, 0 } - /*@Call*/ (print( /*@BinOp*/ (i + 1))) + /*@Call*/ + (print( /*@BinOp*/ (i + 1))) _, _ = /*@Call*/ (f()) ch := /*@MakeChan*/ (make(chan int)) /*@UnOp*/ (<-ch) @@ -43,6 +44,7 @@ func f(spilled, unspilled int) { sl := []int{} _ = /*@Slice*/ (sl[:0]) + _ = /*@Alloc*/ (new(int)) tmp := /*@Alloc*/ (new(int)) _ = tmp var iface interface{} @@ -87,6 +89,7 @@ func complit() { _, _, _ = sl1, sl2, sl3 _ = /*@Slice*/ ([]int{}) + _ = /*@Alloc*/ (& /*@Slice*/ ([]int{})) _ = & /*@Slice*/ ([]int{}) // 2. Arrays @@ -115,6 +118,7 @@ func complit() { _, _, _ = m1, m2, m3 _ = /*@MakeMap*/ (M{}) + _ = /*@Alloc*/ (& /*@MakeMap*/ (M{})) _ = & /*@MakeMap*/ (M{}) // 4. Structs diff --git a/ssa/testmain.go b/ssa/testmain.go index ea232ada9..8ec15ba50 100644 --- a/ssa/testmain.go +++ b/ssa/testmain.go @@ -8,8 +8,8 @@ package ssa // tests of the supplied packages. // It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing. // -// TODO(adonovan): this file no longer needs to live in the ssa package. -// Move it to ssautil. +// TODO(adonovan): throws this all away now that x/tools/go/packages +// provides access to the actual synthetic test main files. import ( "bytes" @@ -26,6 +26,8 @@ import ( // FindTests returns the Test, Benchmark, and Example functions // (as defined by "go test") defined in the specified package, // and its TestMain function, if any. +// +// Deprecated: use x/tools/go/packages to access synthetic testmain packages. func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) { prog := pkg.Prog @@ -109,6 +111,8 @@ func isTest(name, prefix string) bool { // // Subsequent calls to prog.AllPackages include the new package. // The package pkg must belong to the program prog. +// +// Deprecated: use x/tools/go/packages to access synthetic testmain packages. func (prog *Program) CreateTestMainPackage(pkg *Package) *Package { if pkg.Prog != prog { log.Fatal("Package does not belong to Program") diff --git a/ssa/wrappers.go b/ssa/wrappers.go index 701dd90d7..a4ae71d8c 100644 --- a/ssa/wrappers.go +++ b/ssa/wrappers.go @@ -141,13 +141,9 @@ func makeWrapper(prog *Program, sel *types.Selection) *Function { // start is the index of the first regular parameter to use. // func createParams(fn *Function, start int) { - var last *Parameter tparams := fn.Signature.Params() for i, n := start, tparams.Len(); i < n; i++ { - last = fn.addParamObj(tparams.At(i)) - } - if fn.Signature.Variadic() { - last.typ = types.NewSlice(last.typ) + fn.addParamObj(tparams.At(i)) } } From a3c69b9f4e8fa557e8299211954b6596678624d8 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 11 May 2019 11:57:43 +0200 Subject: [PATCH 164/254] lint: simplify waiting for dependencies Since we're starting one goroutine per package to be processed there is no longer a need for processPkg to call itself recursively, nor for a deduplicating cache. Replace it with a simple done channel in Package itself. --- lint/runner.go | 26 ++++---------------------- 1 file changed, 4 insertions(+), 22 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 516b77906..01e20e1a0 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -64,6 +64,7 @@ type Package struct { initial bool fromSource bool hash string + done chan struct{} resultsMu sync.Mutex results []*result @@ -85,17 +86,10 @@ type result struct { ready chan struct{} } -type buildResult struct { - done chan struct{} -} - type Runner struct { ld loader.Loader cache *cache.Cache - builtMu sync.Mutex - built map[*Package]*buildResult - analyzerIDs analyzerIDs // limits parallelism of loading packages @@ -436,7 +430,6 @@ func NewRunner(stats *Stats) (*Runner, error) { return &Runner{ cache: cache, - built: map[*Package]*buildResult{}, stats: stats, }, nil } @@ -489,6 +482,7 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy results: make([]*result, len(r.analyzerIDs.m)), facts: make([]map[types.Object][]analysis.Fact, len(r.analyzerIDs.m)), pkgFacts: make([][]analysis.Fact, len(r.analyzerIDs.m)), + done: make(chan struct{}), } allPkgs = append(allPkgs, m[l]) for i := range m[l].facts { @@ -674,18 +668,6 @@ func (err analysisError) Error() string { // either from export data or from source. For packages loaded from // source, the provides analyzers will be run on the package. func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { - r.builtMu.Lock() - res := r.built[pkg] - if res != nil { - r.builtMu.Unlock() - <-res.done - return - } - - res = &buildResult{done: make(chan struct{})} - r.built[pkg] = res - r.builtMu.Unlock() - defer func() { // Clear information we no longer need. Make sure to do this // when returning from processPkg so that we clear @@ -695,7 +677,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { pkg.results = nil atomic.AddUint64(&r.stats.ProcessedPackages, 1) - close(res.done) + close(pkg.done) }() if len(pkg.errs) != 0 { @@ -703,7 +685,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { } for _, imp := range pkg.Imports { - r.processPkg(imp, analyzers) + <-imp.done if len(imp.errs) > 0 { if imp.initial { pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s", imp, pkg)) From 6b439cc53291c277db89a954b6a87e598699980f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 11 May 2019 14:04:59 +0200 Subject: [PATCH 165/254] unused: fully deduplicate objects Previously, we wouldn't actually deduplicate objects, but simply ignore objects which were known to be duplicated, where the duplicate was marked as used. This, however, would not subsequently mark other objects used. Specifically, for the two subgraphs 'A1 -> B' and 'Root -> A2', A2 would be used, causing A1 to be supressed, but we would still report B. Now we correctly deduplicate nodes, so that only ever one node 'A' exists, creating the graph 'Root -> A -> B'. This does require us to keep a single graph for all packages even when not operating in whole program mode. However, that makes sense: even the normal mode of operation is, on a smaller scale, a whole program analysis, because a single source package may exist as many different packages in the build system. The difference between whole-program mode and normal mode, now, is only which objects get automatically marked as used. We will need to restore proper parallelism in a later commit. --- unused/unused.go | 246 +++++++++++++++++++++++------------------------ 1 file changed, 123 insertions(+), 123 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index b7be78a16..106cd4d8d 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -118,7 +118,6 @@ import ( - Differences in whole program mode: - - (e1) all packages share a single graph - (e2) types aim to implement all exported interfaces from all packages - (e3) exported identifiers aren't automatically used. for fields and methods this poses extra issues due to reflection. We assume @@ -416,18 +415,11 @@ type Checker struct { initialPackages map[*types.Package]struct{} allPackages map[*types.Package]struct{} fset *token.FileSet - out []types.Object - - seenMu sync.Mutex - seen map[seenKey]struct{} - - // only used in whole-program mode - graph *Graph + graph *Graph } func NewChecker() *Checker { c := &Checker{ - seen: map[seenKey]struct{}{}, initialPackages: map[*types.Package]struct{}{}, allPackages: map[*types.Package]struct{}{}, } @@ -483,28 +475,20 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { c.initialPackages[pkg.Pkg] = struct{}{} c.mu.Unlock() - if c.WholeProgram { - // (e1) all packages share a single graph - c.mu.Lock() - if c.graph == nil { - c.graph = NewGraph() - c.graph.wholeProgram = true - } - // TODO fine-grained locking in whole-program mode - c.processPkg(c.graph, pkg) - c.graph.seenFns = map[string]struct{}{} - c.graph.pkg = nil - c.mu.Unlock() - } else { - graph := NewGraph() - graph.wholeProgram = false - - c.processPkg(graph, pkg) - // guard both c.out as well as c.results - c.mu.Lock() - c.out = append(c.out, c.results(graph)...) - c.mu.Unlock() + // TODO fine-grained locking + c.mu.Lock() + if c.graph == nil { + c.graph = NewGraph() + c.graph.wholeProgram = c.WholeProgram + c.graph.fset = pass.Fset + } + c.processPkg(c.graph, pkg) + c.graph.seenFns = map[string]struct{}{} + if !c.WholeProgram { + c.graph.seenTypes = typeutil.Map{} } + c.graph.pkg = nil + c.mu.Unlock() return nil, nil } @@ -535,22 +519,14 @@ func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Prob } func (c *Checker) Result() []types.Object { - if c.WholeProgram { - c.out = c.results(c.graph) - } + out := c.results(c.graph) - out2 := make([]types.Object, 0, len(c.out)) - for _, v := range c.out { + out2 := make([]types.Object, 0, len(out)) + for _, v := range out { if _, ok := c.initialPackages[v.Pkg()]; !ok { continue } - position := c.fset.PositionFor(v.Pos(), false) - position.Column = 1 - position.Offset = 0 - k := seenKey{v.String(), position} - if _, ok := c.seen[k]; !ok { - out2 = append(out2, v) - } + out2 = append(out2, v) } return out2 } @@ -565,42 +541,44 @@ func (graph *Graph) quieten(node *Node) { if node.seen { return } - switch obj := node.obj.(type) { - case *types.Func: - sig := obj.Type().(*types.Signature) - if sig.Recv() != nil { - if node, ok := graph.nodeMaybe(sig.Recv()); ok { - node.quiet = true + for obj := range node.objs { + switch obj := obj.(type) { + case *types.Func: + sig := obj.Type().(*types.Signature) + if sig.Recv() != nil { + if node, ok := graph.nodeMaybe(sig.Recv()); ok { + node.quiet = true + } } - } - for i := 0; i < sig.Params().Len(); i++ { - if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { - node.quiet = true + for i := 0; i < sig.Params().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { + node.quiet = true + } } - } - for i := 0; i < sig.Results().Len(); i++ { - if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { - node.quiet = true + for i := 0; i < sig.Results().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { + node.quiet = true + } } - } - case *types.Named: - for i := 0; i < obj.NumMethods(); i++ { - m := obj.Method(i) - if node, ok := graph.nodeMaybe(m); ok { - node.quiet = true + case *types.Named: + for i := 0; i < obj.NumMethods(); i++ { + m := obj.Method(i) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true + } } - } - case *types.Struct: - for i := 0; i < obj.NumFields(); i++ { - if node, ok := graph.nodeMaybe(obj.Field(i)); ok { - node.quiet = true + case *types.Struct: + for i := 0; i < obj.NumFields(); i++ { + if node, ok := graph.nodeMaybe(obj.Field(i)); ok { + node.quiet = true + } } - } - case *types.Interface: - for i := 0; i < obj.NumExplicitMethods(); i++ { - m := obj.ExplicitMethod(i) - if node, ok := graph.nodeMaybe(m); ok { - node.quiet = true + case *types.Interface: + for i := 0; i < obj.NumExplicitMethods(); i++ { + m := obj.ExplicitMethod(i) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true + } } } } @@ -649,10 +627,10 @@ func (c *Checker) results(graph *Graph) []types.Object { if c.Debug != nil { debugNode := func(node *Node) { - if node.obj == nil { + if len(node.objs) == 0 { c.debugf("n%d [label=\"Root\"];\n", node.id) } else { - c.debugf("n%d [label=%q];\n", node.id, node.obj) + c.debugf("n%d [label=%q];\n", node.id, fmt.Sprintf("(%T) %s", node.anyObj(), node.anyObj())) } for used, reasons := range node.used { for _, reason := range reasons { @@ -687,33 +665,6 @@ func (c *Checker) results(graph *Graph) []types.Object { report := func(node *Node) { if node.seen { - var pos token.Pos - if obj, ok := node.obj.(types.Object); ok { - pos = obj.Pos() - - if pos != token.NoPos { - position := c.fset.PositionFor(pos, false) - // All packages passed on the command line are being - // loaded from source. However, thanks to tests and - // test variants of packages, we encounter the same - // object many different times. Worse, some of these - // forms may have been loaded from export data - // (despite being a variant of a package we've loaded - // from source…). Objects from export data do not have - // column information, so we force it to one, so that - // objects loaded from source and from export have the - // same position. - // - // Similarly, the "offset" differs, too. - - position.Column = 1 - position.Offset = 0 - k := seenKey{obj.String(), position} - c.seenMu.Lock() - c.seen[k] = struct{}{} - c.seenMu.Unlock() - } - } return } if node.quiet { @@ -722,7 +673,7 @@ func (c *Checker) results(graph *Graph) []types.Object { } c.debugf("n%d [color=red];\n", node.id) - switch obj := node.obj.(type) { + switch obj := node.anyObj().(type) { case *types.Var: // don't report unnamed variables (receivers, interface embedding) if obj.Name() != "" || obj.IsField() { @@ -753,7 +704,30 @@ func (c *Checker) processPkg(graph *Graph, pkg *pkg) { graph.entry(pkg) } +func objNodeKeyFor(fset *token.FileSet, obj types.Object) objNodeKey { + position := fset.PositionFor(obj.Pos(), false) + position.Column = 0 + position.Offset = 0 + return objNodeKey{ + position: position, + str: fmt.Sprint(obj), + } +} + +// An objNodeKey describes a types.Object node in the graph. +// +// Due to test variants we may end up with multiple instances of the +// same object, which is why we have to deduplicate based on their +// source position. And because export data lacks column information, +// we also have to incorporate the object's string representation in +// the key. +type objNodeKey struct { + position token.Position + str string +} + type Graph struct { + fset *token.FileSet pkg *ssa.Package msCache typeutil.MethodSetCache @@ -764,6 +738,7 @@ type Graph struct { Root *Node TypeNodes typeutil.Map Nodes map[interface{}]*Node + objNodes map[objNodeKey]*Node seenTypes typeutil.Map seenFns map[string]struct{} @@ -771,8 +746,9 @@ type Graph struct { func NewGraph() *Graph { g := &Graph{ - Nodes: map[interface{}]*Node{}, - seenFns: map[string]struct{}{}, + Nodes: map[interface{}]*Node{}, + objNodes: map[objNodeKey]*Node{}, + seenFns: map[string]struct{}{}, } g.Root = g.newNode(nil) return g @@ -796,7 +772,7 @@ type ConstGroup struct { func (ConstGroup) String() string { return "const group" } type Node struct { - obj interface{} + objs map[interface{}]struct{} id int used map[*Node][]string @@ -804,16 +780,22 @@ type Node struct { quiet bool } -func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) { - if t, ok := obj.(types.Type); ok { - if v := g.TypeNodes.At(t); v != nil { - return v.(*Node), true - } - return nil, false +func (n *Node) anyObj() interface{} { + for k := range n.objs { + return k } + return nil +} + +func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) { if node, ok := g.Nodes[obj]; ok { return node, true } + key := objNodeKeyFor(g.fset, obj) + if node, ok := g.objNodes[key]; ok { + node.objs[obj] = struct{}{} + return node, true + } return nil, false } @@ -822,22 +804,44 @@ func (g *Graph) node(obj interface{}) (node *Node, new bool) { if v := g.TypeNodes.At(t); v != nil { return v.(*Node), false } - node := g.newNode(obj) + node := g.newNode(t) g.TypeNodes.Set(t, node) return node, true } + if node, ok := g.Nodes[obj]; ok { return node, false } + if obj, ok := obj.(types.Object); ok { + key := objNodeKeyFor(g.fset, obj) + if node, ok := g.objNodes[key]; ok { + node.objs[obj] = struct{}{} + // We've deduplicated an object, but it's technically + // still "new", because by processing it, we may discover + // more objects. However, the "new" information is only + // used in assertions to guarantee we're not adding new + // objects to the graph by accident, so we're returning + // false. + return node, false + } + } node = g.newNode(obj) g.Nodes[obj] = node + if obj, ok := obj.(types.Object); ok { + key := objNodeKeyFor(g.fset, obj) + g.objNodes[key] = node + } return node, true } func (g *Graph) newNode(obj interface{}) *Node { g.nodeCounter++ + var objs map[interface{}]struct{} + if obj != nil { + objs = map[interface{}]struct{}{obj: struct{}{}} + } return &Node{ - obj: obj, + objs: objs, id: g.nodeCounter, used: map[*Node][]string{}, } @@ -964,7 +968,7 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { // object isn't exported, the question is moot return false } - path := g.pkg.Prog.Fset.Position(obj.Pos()).Filename + path := g.fset.Position(obj.Pos()).Filename if g.wholeProgram { // Example functions without "Output:" comments aren't being // run and thus don't show up in the graph. @@ -1070,7 +1074,6 @@ func (g *Graph) entry(pkg *pkg) { } // Find constants being used inside functions, find sinks in tests - handledConsts := map[*ast.Ident]struct{}{} for _, fn := range pkg.SrcFuncs { if fn.Object() != nil { g.see(fn.Object()) @@ -1100,7 +1103,7 @@ func (g *Graph) entry(pkg *pkg) { if obj == nil { continue } - path := g.pkg.Prog.Fset.File(obj.Pos()).Name() + path := g.fset.File(obj.Pos()).Name() if strings.HasSuffix(path, "_test.go") { if obj.Parent() != nil && obj.Parent().Parent() != nil && obj.Parent().Parent().Parent() == nil { // object's scope is the package, whose @@ -1118,14 +1121,11 @@ func (g *Graph) entry(pkg *pkg) { }) } // Find constants being used in non-function contexts - for ident, obj := range pkg.TypesInfo.Uses { + for _, obj := range pkg.TypesInfo.Uses { _, ok := obj.(*types.Const) if !ok { continue } - if _, ok := handledConsts[ident]; ok { - continue - } g.seeAndUse(obj, nil, "used constant") } @@ -1271,8 +1271,8 @@ func (g *Graph) entry(pkg *pkg) { } if !g.wholeProgram { - // When not in whole program mode we process one package per - // graph, which means g.seenTypes only contains types of + // When not in whole program mode we reset seenTypes after each package, + // which means g.seenTypes only contains types of // interest to us. In whole program mode, we're better off // processing all interfaces at once, globally, both for // performance reasons and because in whole program mode we From 6a3ba50918e2ff32f618b1903f425df86563bf87 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 11 May 2019 17:16:11 +0200 Subject: [PATCH 166/254] unused: quieten arguments etc of anonymous functions We do not have a types.Object for the anonymous function itself, only for its signature. --- unused/testdata/src/quiet/quiet.go | 8 ++++++++ unused/unused.go | 4 ++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/unused/testdata/src/quiet/quiet.go b/unused/testdata/src/quiet/quiet.go index 42cca0dfd..4a31c2685 100644 --- a/unused/testdata/src/quiet/quiet.go +++ b/unused/testdata/src/quiet/quiet.go @@ -19,3 +19,11 @@ type t3 struct { // want `t3` a int b int } + +type T struct{} + +func fn1() { // want `fn1` + meh := func(arg T) { + } + meh(T{}) +} diff --git a/unused/unused.go b/unused/unused.go index 106cd4d8d..045a242d2 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -543,8 +543,8 @@ func (graph *Graph) quieten(node *Node) { } for obj := range node.objs { switch obj := obj.(type) { - case *types.Func: - sig := obj.Type().(*types.Signature) + case *types.Signature: + sig := obj if sig.Recv() != nil { if node, ok := graph.nodeMaybe(sig.Recv()); ok { node.quiet = true From 2325d76ce71b97c2e92a947571308bb920c7741c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 11 May 2019 17:16:42 +0200 Subject: [PATCH 167/254] unused: don't crash if no packages were processed --- unused/unused.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 045a242d2..d63122afb 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -585,13 +585,14 @@ func (graph *Graph) quieten(node *Node) { } func (c *Checker) results(graph *Graph) []types.Object { + if graph == nil { + // We never analyzed any packages + return nil + } + var out []types.Object if c.WholeProgram { - if graph == nil { - // We never analyzed any packages - return nil - } var ifaces []*types.Interface var notIfaces []types.Type From ed6c989c1accd5948423a17b0148dfd5644cea37 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 11 May 2019 18:12:56 +0200 Subject: [PATCH 168/254] unused: express duplicate objects explicitly in the graph Instead of deduplicating nodes based on objects, just make duplicate nodes use each other. This simplifies the logic and makes debug graphs easier to read. --- unused/unused.go | 110 ++++++++++++++++++----------------------------- 1 file changed, 42 insertions(+), 68 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index d63122afb..b19834a72 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -541,44 +541,42 @@ func (graph *Graph) quieten(node *Node) { if node.seen { return } - for obj := range node.objs { - switch obj := obj.(type) { - case *types.Signature: - sig := obj - if sig.Recv() != nil { - if node, ok := graph.nodeMaybe(sig.Recv()); ok { - node.quiet = true - } + switch obj := node.obj.(type) { + case *types.Signature: + sig := obj + if sig.Recv() != nil { + if node, ok := graph.nodeMaybe(sig.Recv()); ok { + node.quiet = true } - for i := 0; i < sig.Params().Len(); i++ { - if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { - node.quiet = true - } + } + for i := 0; i < sig.Params().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { + node.quiet = true } - for i := 0; i < sig.Results().Len(); i++ { - if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { - node.quiet = true - } + } + for i := 0; i < sig.Results().Len(); i++ { + if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { + node.quiet = true } - case *types.Named: - for i := 0; i < obj.NumMethods(); i++ { - m := obj.Method(i) - if node, ok := graph.nodeMaybe(m); ok { - node.quiet = true - } + } + case *types.Named: + for i := 0; i < obj.NumMethods(); i++ { + m := obj.Method(i) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true } - case *types.Struct: - for i := 0; i < obj.NumFields(); i++ { - if node, ok := graph.nodeMaybe(obj.Field(i)); ok { - node.quiet = true - } + } + case *types.Struct: + for i := 0; i < obj.NumFields(); i++ { + if node, ok := graph.nodeMaybe(obj.Field(i)); ok { + node.quiet = true } - case *types.Interface: - for i := 0; i < obj.NumExplicitMethods(); i++ { - m := obj.ExplicitMethod(i) - if node, ok := graph.nodeMaybe(m); ok { - node.quiet = true - } + } + case *types.Interface: + for i := 0; i < obj.NumExplicitMethods(); i++ { + m := obj.ExplicitMethod(i) + if node, ok := graph.nodeMaybe(m); ok { + node.quiet = true } } } @@ -628,10 +626,10 @@ func (c *Checker) results(graph *Graph) []types.Object { if c.Debug != nil { debugNode := func(node *Node) { - if len(node.objs) == 0 { + if node.obj == nil { c.debugf("n%d [label=\"Root\"];\n", node.id) } else { - c.debugf("n%d [label=%q];\n", node.id, fmt.Sprintf("(%T) %s", node.anyObj(), node.anyObj())) + c.debugf("n%d [label=%q];\n", node.id, fmt.Sprintf("(%T) %s", node.obj, node.obj)) } for used, reasons := range node.used { for _, reason := range reasons { @@ -674,7 +672,7 @@ func (c *Checker) results(graph *Graph) []types.Object { } c.debugf("n%d [color=red];\n", node.id) - switch obj := node.anyObj().(type) { + switch obj := node.obj.(type) { case *types.Var: // don't report unnamed variables (receivers, interface embedding) if obj.Name() != "" || obj.IsField() { @@ -773,7 +771,7 @@ type ConstGroup struct { func (ConstGroup) String() string { return "const group" } type Node struct { - objs map[interface{}]struct{} + obj interface{} id int used map[*Node][]string @@ -781,22 +779,10 @@ type Node struct { quiet bool } -func (n *Node) anyObj() interface{} { - for k := range n.objs { - return k - } - return nil -} - func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) { if node, ok := g.Nodes[obj]; ok { return node, true } - key := objNodeKeyFor(g.fset, obj) - if node, ok := g.objNodes[key]; ok { - node.objs[obj] = struct{}{} - return node, true - } return nil, false } @@ -813,36 +799,24 @@ func (g *Graph) node(obj interface{}) (node *Node, new bool) { if node, ok := g.Nodes[obj]; ok { return node, false } - if obj, ok := obj.(types.Object); ok { - key := objNodeKeyFor(g.fset, obj) - if node, ok := g.objNodes[key]; ok { - node.objs[obj] = struct{}{} - // We've deduplicated an object, but it's technically - // still "new", because by processing it, we may discover - // more objects. However, the "new" information is only - // used in assertions to guarantee we're not adding new - // objects to the graph by accident, so we're returning - // false. - return node, false - } - } node = g.newNode(obj) g.Nodes[obj] = node if obj, ok := obj.(types.Object); ok { key := objNodeKeyFor(g.fset, obj) - g.objNodes[key] = node + if onode, ok := g.objNodes[key]; ok { + node.used[onode] = append(node.used[onode], "same object") + onode.used[node] = append(onode.used[node], "same object") + } else { + g.objNodes[key] = node + } } return node, true } func (g *Graph) newNode(obj interface{}) *Node { g.nodeCounter++ - var objs map[interface{}]struct{} - if obj != nil { - objs = map[interface{}]struct{}{obj: struct{}{}} - } return &Node{ - objs: objs, + obj: obj, id: g.nodeCounter, used: map[*Node][]string{}, } From f25b0e67643ed14850752a4a591557607c43d2dd Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 11 May 2019 20:31:23 +0200 Subject: [PATCH 169/254] unused: take special care when deduplicating types Deduplicating types requires some special considerations. The first issue is that multiple functions may share the same signature, but we're still adding individual receivers and parameters to the graph. We must be careful to not miss some of them when ignoring nodes. The second issue involves type identity of interface types. types.Identical considers the underlying types of I1 and I2 identical in the following code: type I1 interface { Fn() } type I2 interface { I1 } Thus, if we first process I1's underlying type and then I2's, we would miss the fact that I2 uses I1. We work around this by handling interfaces specially in our own implementation of Implements. Specifically, we fall back to pointer equality. This will cause the graph to grow more, but it should have no effect on correctness. This commit also changes graph edge labels from strings to enums. This change was part of an earlier experiment. It isn't required for the correctness of this change, but we liked it enough to keep it, anyway. --- go/types/typeutil/identical.go | 59 +++-- go/types/typeutil/map.go | 4 +- lint/lint.go | 2 + lint/lintutil/util.go | 2 + lint/stats.go | 1 + unused/edge.go | 53 +++++ unused/edge_string.go | 107 ++++++++++ unused/testdata/src/interfaces2/interfaces.go | 12 ++ unused/testdata/src/quiet/quiet.go | 5 + unused/unused.go | 202 +++++++++--------- 10 files changed, 333 insertions(+), 114 deletions(-) create mode 100644 unused/edge.go create mode 100644 unused/edge_string.go create mode 100644 unused/testdata/src/interfaces2/interfaces.go diff --git a/go/types/typeutil/identical.go b/go/types/typeutil/identical.go index 7eda29463..2236a953a 100644 --- a/go/types/typeutil/identical.go +++ b/go/types/typeutil/identical.go @@ -6,24 +6,55 @@ import ( // Identical reports whether x and y are identical types. // Unlike types.Identical, receivers of Signature types are not ignored. +// Unlike types.Identical, interfaces are compared via pointer equality. func Identical(x, y types.Type) (ret bool) { if !types.Identical(x, y) { return false } - sigX, ok := x.(*types.Signature) - if !ok { - return true - } - sigY, ok := y.(*types.Signature) - if !ok { - // should be impossible - return true - } - if sigX.Recv() == sigY.Recv() { + + switch x := x.(type) { + case *types.Interface: + // The issue with interfaces, typeutil.Map and types.Identical + // + // types.Identical, when comparing two interfaces, only looks at the set + // of all methods, not differentiating between implicit (embedded) and + // explicit methods. + // + // When we see the following two types, in source order + // + // type I1 interface { foo() } + // type I2 interface { I1 } + // + // then we will first correctly process I1 and its underlying type. When + // we get to I2, we will see that its underlying type is identical to + // that of I1 and not process it again. This, however, means that we will + // not record the fact that I2 embeds I1. If only I2 is reachable via the + // graph root, then I1 will not be considered used. + // + // We choose to be lazy and compare interfaces by their + // pointers. This will obviously miss identical interfaces, + // but this only has a runtime cost, it doesn't affect + // correctness. + y, ok := y.(*types.Interface) + if !ok { + // should be impossible + return true + } + return x == y + case *types.Signature: + y, ok := y.(*types.Signature) + if !ok { + // should be impossible + return true + } + if x.Recv() == y.Recv() { + return true + } + if x.Recv() == nil || y.Recv() == nil { + return false + } + return Identical(x.Recv().Type(), y.Recv().Type()) + default: return true } - if sigX.Recv() == nil || sigY.Recv() == nil { - return false - } - return Identical(sigX.Recv().Type(), sigY.Recv().Type()) } diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go index db0b3bce7..1b6a19804 100644 --- a/go/types/typeutil/map.go +++ b/go/types/typeutil/map.go @@ -23,7 +23,9 @@ import ( // // Not thread-safe. // -// This fork handles Signatures correctly, respecting method receivers. +// This fork handles Signatures correctly, respecting method +// receivers. Furthermore, it differentiates between implicit and +// explicit methods in interfaces. // type Map struct { hasher Hasher // shared by many Maps diff --git a/lint/lint.go b/lint/lint.go index de22a02c7..681bcc329 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -11,6 +11,7 @@ import ( "sort" "strings" "sync" + "sync/atomic" "unicode" "golang.org/x/tools/go/analysis" @@ -165,6 +166,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error } } + atomic.StoreUint64(&r.stats.State, StateCumulative) var problems []Problem for _, cum := range l.CumulativeCheckers { for _, res := range cum.Result() { diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 9ed4d7f57..165815695 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -331,6 +331,8 @@ func Lint(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, paths []string atomic.LoadUint64(&l.Stats.TotalWorkers), atomic.LoadUint64(&l.Stats.Problems), ) + case lint.StateCumulative: + fmt.Fprintln(os.Stderr, "Status: processing cumulative checkers") } } if len(infoSignals) > 0 { diff --git a/lint/stats.go b/lint/stats.go index d975517ff..8f284a8e9 100644 --- a/lint/stats.go +++ b/lint/stats.go @@ -4,6 +4,7 @@ const ( StateInitializing = 0 StateGraph = 1 StateProcessing = 2 + StateCumulative = 3 ) type Stats struct { diff --git a/unused/edge.go b/unused/edge.go new file mode 100644 index 000000000..3181b0e20 --- /dev/null +++ b/unused/edge.go @@ -0,0 +1,53 @@ +package unused + +//go:generate stringer -type edge +type edge uint64 + +func (e edge) is(o edge) bool { + return e&o != 0 +} + +const ( + edgeAlias edge = 1 << iota + edgeBlankField + edgeCgoExported + edgeConstGroup + edgeElementType + edgeEmbeddedInterface + edgeExportedConstant + edgeExportedField + edgeExportedFunction + edgeExportedMethod + edgeExportedType + edgeExportedVariable + edgeExtendsExportedFields + edgeExtendsExportedMethodSet + edgeFieldAccess + edgeFunctionArgument + edgeFunctionResult + edgeFunctionSignature + edgeImplements + edgeInstructionOperand + edgeInterfaceCall + edgeInterfaceMethod + edgeKeyType + edgeLinkname + edgeMainFunction + edgeNamedType + edgeNetRPCRegister + edgeNoCopySentinel + edgeProvidesMethod + edgeReceiver + edgeRuntimeFunction + edgeSameObject + edgeSignature + edgeStructConversion + edgeTestSink + edgeTupleElement + edgeType + edgeTypeName + edgeUnderlyingType + edgeUnsafeConversion + edgeUsedConstant + edgeVarDecl +) diff --git a/unused/edge_string.go b/unused/edge_string.go new file mode 100644 index 000000000..17d431702 --- /dev/null +++ b/unused/edge_string.go @@ -0,0 +1,107 @@ +// Code generated by "stringer -type edge"; DO NOT EDIT. + +package unused + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[edgeAlias-1] + _ = x[edgeBlankField-2] + _ = x[edgeCgoExported-4] + _ = x[edgeConstGroup-8] + _ = x[edgeElementType-16] + _ = x[edgeEmbeddedInterface-32] + _ = x[edgeExportedConstant-64] + _ = x[edgeExportedField-128] + _ = x[edgeExportedFunction-256] + _ = x[edgeExportedMethod-512] + _ = x[edgeExportedType-1024] + _ = x[edgeExportedVariable-2048] + _ = x[edgeExtendsExportedFields-4096] + _ = x[edgeExtendsExportedMethodSet-8192] + _ = x[edgeFieldAccess-16384] + _ = x[edgeFunctionArgument-32768] + _ = x[edgeFunctionResult-65536] + _ = x[edgeFunctionSignature-131072] + _ = x[edgeImplements-262144] + _ = x[edgeInstructionOperand-524288] + _ = x[edgeInterfaceCall-1048576] + _ = x[edgeInterfaceMethod-2097152] + _ = x[edgeKeyType-4194304] + _ = x[edgeLinkname-8388608] + _ = x[edgeMainFunction-16777216] + _ = x[edgeNamedType-33554432] + _ = x[edgeNetRPCRegister-67108864] + _ = x[edgeNoCopySentinel-134217728] + _ = x[edgeProvidesMethod-268435456] + _ = x[edgeReceiver-536870912] + _ = x[edgeRuntimeFunction-1073741824] + _ = x[edgeSameObject-2147483648] + _ = x[edgeSignature-4294967296] + _ = x[edgeStructConversion-8589934592] + _ = x[edgeTestSink-17179869184] + _ = x[edgeTupleElement-34359738368] + _ = x[edgeType-68719476736] + _ = x[edgeTypeName-137438953472] + _ = x[edgeUnderlyingType-274877906944] + _ = x[edgeUnsafeConversion-549755813888] + _ = x[edgeUsedConstant-1099511627776] + _ = x[edgeVarDecl-2199023255552] +} + +const _edge_name = "edgeAliasedgeBlankFieldedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSameObjectedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" + +var _edge_map = map[edge]string{ + 1: _edge_name[0:9], + 2: _edge_name[9:23], + 4: _edge_name[23:38], + 8: _edge_name[38:52], + 16: _edge_name[52:67], + 32: _edge_name[67:88], + 64: _edge_name[88:108], + 128: _edge_name[108:125], + 256: _edge_name[125:145], + 512: _edge_name[145:163], + 1024: _edge_name[163:179], + 2048: _edge_name[179:199], + 4096: _edge_name[199:224], + 8192: _edge_name[224:252], + 16384: _edge_name[252:267], + 32768: _edge_name[267:287], + 65536: _edge_name[287:305], + 131072: _edge_name[305:326], + 262144: _edge_name[326:340], + 524288: _edge_name[340:362], + 1048576: _edge_name[362:379], + 2097152: _edge_name[379:398], + 4194304: _edge_name[398:409], + 8388608: _edge_name[409:421], + 16777216: _edge_name[421:437], + 33554432: _edge_name[437:450], + 67108864: _edge_name[450:468], + 134217728: _edge_name[468:486], + 268435456: _edge_name[486:504], + 536870912: _edge_name[504:516], + 1073741824: _edge_name[516:535], + 2147483648: _edge_name[535:549], + 4294967296: _edge_name[549:562], + 8589934592: _edge_name[562:582], + 17179869184: _edge_name[582:594], + 34359738368: _edge_name[594:610], + 68719476736: _edge_name[610:618], + 137438953472: _edge_name[618:630], + 274877906944: _edge_name[630:648], + 549755813888: _edge_name[648:668], + 1099511627776: _edge_name[668:684], + 2199023255552: _edge_name[684:695], +} + +func (i edge) String() string { + if str, ok := _edge_map[i]; ok { + return str + } + return "edge(" + strconv.FormatInt(int64(i), 10) + ")" +} diff --git a/unused/testdata/src/interfaces2/interfaces.go b/unused/testdata/src/interfaces2/interfaces.go new file mode 100644 index 000000000..d038ef699 --- /dev/null +++ b/unused/testdata/src/interfaces2/interfaces.go @@ -0,0 +1,12 @@ +package pkg + +type I interface { + foo() +} + +type T struct{} + +func (T) foo() {} +func (T) bar() {} // want `bar` + +var _ struct{ T } diff --git a/unused/testdata/src/quiet/quiet.go b/unused/testdata/src/quiet/quiet.go index 4a31c2685..82f8479b8 100644 --- a/unused/testdata/src/quiet/quiet.go +++ b/unused/testdata/src/quiet/quiet.go @@ -27,3 +27,8 @@ func fn1() { // want `fn1` } meh(T{}) } + +type localityList []int // want `localityList` + +func (l *localityList) Fn1() {} +func (l *localityList) Fn2() {} diff --git a/unused/unused.go b/unused/unused.go index b19834a72..47804cfcb 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -542,23 +542,6 @@ func (graph *Graph) quieten(node *Node) { return } switch obj := node.obj.(type) { - case *types.Signature: - sig := obj - if sig.Recv() != nil { - if node, ok := graph.nodeMaybe(sig.Recv()); ok { - node.quiet = true - } - } - for i := 0; i < sig.Params().Len(); i++ { - if node, ok := graph.nodeMaybe(sig.Params().At(i)); ok { - node.quiet = true - } - } - for i := 0; i < sig.Results().Len(); i++ { - if node, ok := graph.nodeMaybe(sig.Results().At(i)); ok { - node.quiet = true - } - } case *types.Named: for i := 0; i < obj.NumMethods(); i++ { m := obj.Method(i) @@ -617,7 +600,7 @@ func (c *Checker) results(graph *Graph) []types.Object { for _, iface := range ifaces { if sels, ok := graph.implements(t, iface, ms); ok { for _, sel := range sels { - graph.useMethod(t, sel, t, "implements") + graph.useMethod(t, sel, t, edgeImplements) } } } @@ -631,9 +614,11 @@ func (c *Checker) results(graph *Graph) []types.Object { } else { c.debugf("n%d [label=%q];\n", node.id, fmt.Sprintf("(%T) %s", node.obj, node.obj)) } - for used, reasons := range node.used { - for _, reason := range reasons { - c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, reason) + for used, e := range node.used { + for i := edge(1); i < 64; i++ { + if e.is(1 << i) { + c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, edge(1< Date: Sun, 12 May 2019 21:10:23 +0200 Subject: [PATCH 170/254] unused: named types use their pointer types --- unused/edge.go | 1 + unused/edge_string.go | 16 +++++++++------- unused/unused.go | 7 +++++++ 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/unused/edge.go b/unused/edge.go index 3181b0e20..b155b03ec 100644 --- a/unused/edge.go +++ b/unused/edge.go @@ -47,6 +47,7 @@ const ( edgeType edgeTypeName edgeUnderlyingType + edgePointerType edgeUnsafeConversion edgeUsedConstant edgeVarDecl diff --git a/unused/edge_string.go b/unused/edge_string.go index 17d431702..bde3ebb86 100644 --- a/unused/edge_string.go +++ b/unused/edge_string.go @@ -47,12 +47,13 @@ func _() { _ = x[edgeType-68719476736] _ = x[edgeTypeName-137438953472] _ = x[edgeUnderlyingType-274877906944] - _ = x[edgeUnsafeConversion-549755813888] - _ = x[edgeUsedConstant-1099511627776] - _ = x[edgeVarDecl-2199023255552] + _ = x[edgePointerType-549755813888] + _ = x[edgeUnsafeConversion-1099511627776] + _ = x[edgeUsedConstant-2199023255552] + _ = x[edgeVarDecl-4398046511104] } -const _edge_name = "edgeAliasedgeBlankFieldedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSameObjectedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" +const _edge_name = "edgeAliasedgeBlankFieldedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSameObjectedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" var _edge_map = map[edge]string{ 1: _edge_name[0:9], @@ -94,9 +95,10 @@ var _edge_map = map[edge]string{ 68719476736: _edge_name[610:618], 137438953472: _edge_name[618:630], 274877906944: _edge_name[630:648], - 549755813888: _edge_name[648:668], - 1099511627776: _edge_name[668:684], - 2199023255552: _edge_name[684:695], + 549755813888: _edge_name[648:663], + 1099511627776: _edge_name[663:683], + 2199023255552: _edge_name[683:699], + 4398046511104: _edge_name[699:710], } func (i edge) String() string { diff --git a/unused/unused.go b/unused/unused.go index 47804cfcb..14f6f7513 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -42,6 +42,10 @@ import ( - (2.3) all their aliases. we can't easily track uses of aliases because go/types turns them into uses of the aliased types. assume that if a type is used, so are all of its aliases. + - (2.4) the pointer type. this aids with eagerly implementing + interfaces. if a method that implements an interface is defined on + a pointer receiver, and the pointer type is never used, but the + named type is, then we still want to mark the method as used. - variables and constants use: - their types @@ -1420,6 +1424,9 @@ func (g *Graph) typ(t types.Type) { g.seeAndUse(t.Obj(), t, edgeTypeName) g.seeAndUse(t, t.Obj(), edgeNamedType) + // (2.4) named types use the pointer type + g.seeAndUse(types.NewPointer(t), t, edgePointerType) + for i := 0; i < t.NumMethods(); i++ { g.see(t.Method(i)) // don't use trackExportedIdentifier here, we care about From 6fffb53510f50bed9a4a68f43844ac6f65154d7c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 13 May 2019 04:19:27 +0200 Subject: [PATCH 171/254] unused: add fine-grained locking --- cmd/staticcheck/staticcheck.go | 5 +- unused/unused.go | 446 ++++++++++++++++++--------------- unused/unused_test.go | 2 +- 3 files changed, 250 insertions(+), 203 deletions(-) diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 6400eb383..78534c8b3 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -29,10 +29,7 @@ func main() { cs = append(cs, v) } - u := unused.NewChecker() - if *wholeProgram { - u.WholeProgram = true - } + u := unused.NewChecker(*wholeProgram) cums := []lint.CumulativeChecker{u} lintutil.ProcessFlagSet(cs, cums, fs) } diff --git a/unused/unused.go b/unused/unused.go index 14f6f7513..d89add9f1 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -8,6 +8,7 @@ import ( "io" "strings" "sync" + "sync/atomic" "golang.org/x/tools/go/analysis" "honnef.co/go/tools/go/types/typeutil" @@ -418,17 +419,15 @@ type Checker struct { mu sync.Mutex initialPackages map[*types.Package]struct{} allPackages map[*types.Package]struct{} - fset *token.FileSet graph *Graph } -func NewChecker() *Checker { - c := &Checker{ +func NewChecker(wholeProgram bool) *Checker { + return &Checker{ initialPackages: map[*types.Package]struct{}{}, allPackages: map[*types.Package]struct{}{}, + WholeProgram: wholeProgram, } - - return c } func (c *Checker) Analyzer() *analysis.Analyzer { @@ -446,6 +445,12 @@ func (c *Checker) Analyzer() *analysis.Analyzer { func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { c.mu.Lock() + if c.graph == nil { + c.graph = NewGraph() + c.graph.wholeProgram = c.WholeProgram + c.graph.fset = pass.Fset + } + var visit func(pkg *types.Package) visit = func(pkg *types.Package) { if _, ok := c.allPackages[pkg]; ok { @@ -457,6 +462,8 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { } } visit(pass.Pkg) + + c.initialPackages[pass.Pkg] = struct{}{} c.mu.Unlock() ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) @@ -470,29 +477,7 @@ func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { SrcFuncs: ssapkg.SrcFuncs, } - c.mu.Lock() - if c.fset == nil { - c.fset = pass.Fset - } else { - assert(c.fset == pass.Fset) - } - c.initialPackages[pkg.Pkg] = struct{}{} - c.mu.Unlock() - - // TODO fine-grained locking - c.mu.Lock() - if c.graph == nil { - c.graph = NewGraph() - c.graph.wholeProgram = c.WholeProgram - c.graph.fset = pass.Fset - } c.processPkg(c.graph, pkg) - c.graph.seenFns = map[string]struct{}{} - if !c.WholeProgram { - c.graph.seenTypes = typeutil.Map{} - } - c.graph.pkg = nil - c.mu.Unlock() return nil, nil } @@ -532,6 +517,7 @@ func (c *Checker) Result() []types.Object { } out2 = append(out2, v) } + return out2 } @@ -597,14 +583,19 @@ func (c *Checker) results(graph *Graph) []types.Object { ifaces = append(ifaces, interfacesFromExportData(pkg)...) } + ctx := &context{ + g: graph, + seenTypes: &graph.seenTypes, + nodes: map[interface{}]*Node{}, + } // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages for _, t := range notIfaces { - ms := graph.msCache.MethodSet(t) + ms := types.NewMethodSet(t) for _, iface := range ifaces { if sels, ok := graph.implements(t, iface, ms); ok { for _, sel := range sels { - graph.useMethod(t, sel, t, edgeImplements) + graph.useMethod(ctx, t, sel, t, edgeImplements) } } } @@ -721,30 +712,43 @@ type objNodeKey struct { } type Graph struct { - fset *token.FileSet - pkg *ssa.Package - msCache typeutil.MethodSetCache + // Safe for concurrent use + fset *token.FileSet + Root *Node + seenTypes typeutil.Map + // read-only wholeProgram bool - nodeCounter int - - Root *Node - TypeNodes typeutil.Map + // need synchronisation + mu sync.Mutex Nodes map[interface{}]*Node + TypeNodes typeutil.Map objNodes map[objNodeKey]*Node - seenTypes typeutil.Map - seenFns map[string]struct{} + // accessed atomically + nodeOffset uint64 +} + +type context struct { + g *Graph + pkg *pkg + seenFns map[string]struct{} + seenTypes *typeutil.Map + nodeCounter uint64 + msCache typeutil.MethodSetCache + + // these act as local, lock-free caches for the maps in Graph. + typeNodes typeutil.Map + nodes map[interface{}]*Node } func NewGraph() *Graph { g := &Graph{ Nodes: map[interface{}]*Node{}, objNodes: map[objNodeKey]*Node{}, - seenFns: map[string]struct{}{}, } - g.Root = g.newNode(nil) + g.Root = g.newNode(&context{}, nil) return g } @@ -766,46 +770,70 @@ type ConstGroup struct { func (ConstGroup) String() string { return "const group" } type Node struct { - obj interface{} - id int + obj interface{} + id uint64 + + mu sync.Mutex used map[*Node]edge + // even if unused, this specific node should never be reported. + // e.g. function receivers. + ignored bool + // set during final graph walk if node is reachable seen bool // a parent node (e.g. the struct type containing a field) is // already unused, don't report children quiet bool - // even if unused, this specific node should never be reported. - // e.g. function receivers. - ignored bool } func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) { + // never called concurrently + if node, ok := g.Nodes[obj]; ok { return node, true } return nil, false } -func (g *Graph) node(obj interface{}) (node *Node, new bool) { +func (g *Graph) node(ctx *context, obj interface{}) (node *Node, new bool) { if t, ok := obj.(types.Type); ok { + if v := ctx.typeNodes.At(t); v != nil { + return v.(*Node), false + } + g.mu.Lock() + defer g.mu.Unlock() + if v := g.TypeNodes.At(t); v != nil { return v.(*Node), false } - node := g.newNode(t) + node := g.newNode(ctx, t) g.TypeNodes.Set(t, node) + ctx.typeNodes.Set(t, node) return node, true } + if node, ok := ctx.nodes[obj]; ok { + return node, false + } + + g.mu.Lock() + defer g.mu.Unlock() + if node, ok := g.Nodes[obj]; ok { return node, false } - node = g.newNode(obj) + node = g.newNode(ctx, obj) g.Nodes[obj] = node + ctx.nodes[obj] = node if obj, ok := obj.(types.Object); ok { key := objNodeKeyFor(g.fset, obj) if onode, ok := g.objNodes[key]; ok { + node.mu.Lock() + onode.mu.Lock() node.used[onode] |= edgeSameObject onode.used[node] |= edgeSameObject + node.mu.Unlock() + onode.mu.Unlock() } else { g.objNodes[key] = node } @@ -813,16 +841,18 @@ func (g *Graph) node(obj interface{}) (node *Node, new bool) { return node, true } -func (g *Graph) newNode(obj interface{}) *Node { - g.nodeCounter++ +func (g *Graph) newNode(ctx *context, obj interface{}) *Node { + ctx.nodeCounter++ return &Node{ obj: obj, - id: g.nodeCounter, + id: ctx.nodeCounter, used: map[*Node]edge{}, } } func (n *Node) use(node *Node, kind edge) { + n.mu.Lock() + defer n.mu.Unlock() assert(node != nil) n.used[node] |= kind } @@ -891,56 +921,49 @@ func isIrrelevant(obj interface{}) bool { return false } -func (g *Graph) isInterestingPackage(pkg *types.Package) bool { - if g.wholeProgram { - return true - } - return pkg == g.pkg.Pkg -} - -func (g *Graph) see(obj interface{}) *Node { +func (ctx *context) see(obj interface{}) *Node { if isIrrelevant(obj) { return nil } assert(obj != nil) // add new node to graph - node, _ := g.node(obj) + node, _ := ctx.g.node(ctx, obj) return node } -func (g *Graph) use(used, by interface{}, kind edge) { +func (ctx *context) use(used, by interface{}, kind edge) { if isIrrelevant(used) { return } assert(used != nil) if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { - if !g.isInterestingPackage(obj.Pkg()) { + if !ctx.g.wholeProgram && obj.Pkg() != ctx.pkg.Pkg { return } } - usedNode, new := g.node(used) + usedNode, new := ctx.g.node(ctx, used) assert(!new) if by == nil { - g.Root.use(usedNode, kind) + ctx.g.Root.use(usedNode, kind) } else { - byNode, new := g.node(by) + byNode, new := ctx.g.node(ctx, by) assert(!new) byNode.use(usedNode, kind) } } -func (g *Graph) seeAndUse(used, by interface{}, kind edge) *Node { - node := g.see(used) - g.use(used, by, kind) +func (ctx *context) seeAndUse(used, by interface{}, kind edge) *Node { + node := ctx.see(used) + ctx.use(used, by, kind) return node } // trackExportedIdentifier reports whether obj should be considered // used due to being exported, checking various conditions that affect // the decision. -func (g *Graph) trackExportedIdentifier(obj types.Object) bool { +func (g *Graph) trackExportedIdentifier(ctx *context, obj types.Object) bool { if !obj.Exported() { // object isn't exported, the question is moot return false @@ -956,7 +979,7 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { return false } - if g.pkg.Pkg.Name() == "main" && !strings.HasSuffix(path, "_test.go") { + if ctx.pkg.Pkg.Name() == "main" && !strings.HasSuffix(path, "_test.go") { // exported identifiers in package main can't be imported. // However, test functions can be called, and xtest packages // even have access to exported identifiers. @@ -976,8 +999,19 @@ func (g *Graph) trackExportedIdentifier(obj types.Object) bool { } func (g *Graph) entry(pkg *pkg) { - // TODO rename Entry - g.pkg = pkg.SSA + no := atomic.AddUint64(&g.nodeOffset, 1) + ctx := &context{ + g: g, + pkg: pkg, + nodeCounter: no * 1e9, + seenFns: map[string]struct{}{}, + nodes: map[interface{}]*Node{}, + } + if g.wholeProgram { + ctx.seenTypes = &g.seenTypes + } else { + ctx.seenTypes = &typeutil.Map{} + } scopes := map[*types.Scope]*ssa.Function{} for _, fn := range pkg.SrcFuncs { @@ -1010,7 +1044,7 @@ func (g *Graph) entry(pkg *pkg) { panic(fmt.Sprintf("unhandled type: %T", m)) } assert(obj != nil) - g.seeAndUse(obj, nil, edgeLinkname) + ctx.seeAndUse(obj, nil, edgeLinkname) } } } @@ -1039,21 +1073,21 @@ func (g *Graph) entry(pkg *pkg) { case *types.TypeName: // types are being handled by walking the AST case *types.Const: - g.see(obj) + ctx.see(obj) fn := surroundingFunc(obj) - if fn == nil && g.trackExportedIdentifier(obj) { + if fn == nil && g.trackExportedIdentifier(ctx, obj) { // (1.4) packages use exported constants (unless in package main) - g.use(obj, nil, edgeExportedConstant) + ctx.use(obj, nil, edgeExportedConstant) } - g.typ(obj.Type()) - g.seeAndUse(obj.Type(), obj, edgeType) + g.typ(ctx, obj.Type()) + ctx.seeAndUse(obj.Type(), obj, edgeType) } } // Find constants being used inside functions, find sinks in tests for _, fn := range pkg.SrcFuncs { if fn.Object() != nil { - g.see(fn.Object()) + ctx.see(fn.Object()) } node := fn.Syntax() if node == nil { @@ -1068,7 +1102,7 @@ func (g *Graph) entry(pkg *pkg) { } switch obj := obj.(type) { case *types.Const: - g.seeAndUse(obj, owningObject(fn), edgeUsedConstant) + ctx.seeAndUse(obj, owningObject(fn), edgeUsedConstant) } case *ast.AssignStmt: for _, expr := range node.Lhs { @@ -1088,7 +1122,7 @@ func (g *Graph) entry(pkg *pkg) { // (4.9) functions use package-level variables they assign to iff in tests (sinks for benchmarks) // (9.7) variable _reads_ use variables, writes do not, except in tests - g.seeAndUse(obj, owningObject(fn), edgeTestSink) + ctx.seeAndUse(obj, owningObject(fn), edgeTestSink) } } } @@ -1103,7 +1137,7 @@ func (g *Graph) entry(pkg *pkg) { if !ok { continue } - g.seeAndUse(obj, nil, edgeUsedConstant) + ctx.seeAndUse(obj, nil, edgeUsedConstant) } var fn *types.Func @@ -1112,7 +1146,7 @@ func (g *Graph) entry(pkg *pkg) { switch n := n.(type) { case *ast.FuncDecl: fn = pkg.TypesInfo.ObjectOf(n.Name).(*types.Func) - g.see(fn) + ctx.see(fn) case *ast.GenDecl: switch n.Tok { case token.CONST: @@ -1120,13 +1154,13 @@ func (g *Graph) entry(pkg *pkg) { for _, specs := range groups { if len(specs) > 1 { cg := &ConstGroup{} - g.see(cg) + ctx.see(cg) for _, spec := range specs { for _, name := range spec.(*ast.ValueSpec).Names { obj := pkg.TypesInfo.ObjectOf(name) // (10.1) const groups - g.seeAndUse(obj, cg, edgeConstGroup) - g.use(cg, obj, edgeConstGroup) + ctx.seeAndUse(obj, cg, edgeConstGroup) + ctx.use(cg, obj, edgeConstGroup) } } } @@ -1137,11 +1171,11 @@ func (g *Graph) entry(pkg *pkg) { for _, name := range v.Names { T := pkg.TypesInfo.TypeOf(name) if fn != nil { - g.seeAndUse(T, fn, edgeVarDecl) + ctx.seeAndUse(T, fn, edgeVarDecl) } else { - g.seeAndUse(T, nil, edgeVarDecl) + ctx.seeAndUse(T, nil, edgeVarDecl) } - g.typ(T) + g.typ(ctx, T) } } case token.TYPE: @@ -1155,11 +1189,11 @@ func (g *Graph) entry(pkg *pkg) { v := spec.(*ast.TypeSpec) T := pkg.TypesInfo.TypeOf(v.Type) obj := pkg.TypesInfo.ObjectOf(v.Name) - g.see(obj) - g.see(T) - g.use(T, obj, edgeType) - g.typ(obj.Type()) - g.typ(T) + ctx.see(obj) + ctx.see(T) + ctx.use(T, obj, edgeType) + g.typ(ctx, obj.Type()) + g.typ(ctx, T) if v.Assign != 0 { aliasFor := obj.(*types.TypeName).Type() @@ -1170,10 +1204,10 @@ func (g *Graph) entry(pkg *pkg) { // just mark the alias used. // // FIXME(dh): what about aliases declared inside functions? - g.use(obj, nil, edgeAlias) + ctx.use(obj, nil, edgeAlias) } else { - g.see(aliasFor) - g.seeAndUse(obj, aliasFor, edgeAlias) + ctx.see(aliasFor) + ctx.seeAndUse(obj, aliasFor, edgeAlias) } } } @@ -1183,22 +1217,22 @@ func (g *Graph) entry(pkg *pkg) { }) } - for _, m := range g.pkg.Members { + for _, m := range pkg.SSA.Members { switch m := m.(type) { case *ssa.NamedConst: // nothing to do, we collect all constants from Defs case *ssa.Global: if m.Object() != nil { - g.see(m.Object()) - if g.trackExportedIdentifier(m.Object()) { + ctx.see(m.Object()) + if g.trackExportedIdentifier(ctx, m.Object()) { // (1.3) packages use exported variables (unless in package main) - g.use(m.Object(), nil, edgeExportedVariable) + ctx.use(m.Object(), nil, edgeExportedVariable) } } case *ssa.Function: mObj := owningObject(m) if mObj != nil { - g.see(mObj) + ctx.see(mObj) } //lint:ignore SA9003 handled implicitly if m.Name() == "init" { @@ -1209,17 +1243,17 @@ func (g *Graph) entry(pkg *pkg) { // be owned by the package. } // This branch catches top-level functions, not methods. - if m.Object() != nil && g.trackExportedIdentifier(m.Object()) { + if m.Object() != nil && g.trackExportedIdentifier(ctx, m.Object()) { // (1.2) packages use exported functions (unless in package main) - g.use(mObj, nil, edgeExportedFunction) + ctx.use(mObj, nil, edgeExportedFunction) } - if m.Name() == "main" && g.pkg.Pkg.Name() == "main" { + if m.Name() == "main" && pkg.Pkg.Name() == "main" { // (1.7) packages use the main function iff in the main package - g.use(mObj, nil, edgeMainFunction) + ctx.use(mObj, nil, edgeMainFunction) } - if g.pkg.Pkg.Path() == "runtime" && runtimeFuncs[m.Name()] { + if pkg.Pkg.Path() == "runtime" && runtimeFuncs[m.Name()] { // (9.8) runtime functions that may be called from user code via the compiler - g.use(mObj, nil, edgeRuntimeFunction) + ctx.use(mObj, nil, edgeRuntimeFunction) } if m.Syntax() != nil { doc := m.Syntax().(*ast.FuncDecl).Doc @@ -1227,21 +1261,21 @@ func (g *Graph) entry(pkg *pkg) { for _, cmt := range doc.List { if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { // (1.6) packages use functions exported to cgo - g.use(mObj, nil, edgeCgoExported) + ctx.use(mObj, nil, edgeCgoExported) } } } } - g.function(m) + g.function(ctx, m) case *ssa.Type: if m.Object() != nil { - g.see(m.Object()) - if g.trackExportedIdentifier(m.Object()) { + ctx.see(m.Object()) + if g.trackExportedIdentifier(ctx, m.Object()) { // (1.1) packages use exported named types (unless in package main) - g.use(m.Object(), nil, edgeExportedType) + ctx.use(m.Object(), nil, edgeExportedType) } } - g.typ(m.Type()) + g.typ(ctx, m.Type()) default: panic(fmt.Sprintf("unreachable: %T", m)) } @@ -1259,7 +1293,7 @@ func (g *Graph) entry(pkg *pkg) { var ifaces []*types.Interface var notIfaces []types.Type - g.seenTypes.Iterate(func(t types.Type, _ interface{}) { + ctx.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: // OPT(dh): (8.1) we only need interfaces that have unexported methods @@ -1273,11 +1307,11 @@ func (g *Graph) entry(pkg *pkg) { // (8.0) handle interfaces for _, t := range notIfaces { - ms := g.msCache.MethodSet(t) + ms := ctx.msCache.MethodSet(t) for _, iface := range ifaces { if sels, ok := g.implements(t, iface, ms); ok { for _, sel := range sels { - g.useMethod(t, sel, t, edgeImplements) + g.useMethod(ctx, t, sel, t, edgeImplements) } } } @@ -1285,7 +1319,7 @@ func (g *Graph) entry(pkg *pkg) { } } -func (g *Graph) useMethod(t types.Type, sel *types.Selection, by interface{}, kind edge) { +func (g *Graph) useMethod(ctx *context, t types.Type, sel *types.Selection, by interface{}, kind edge) { obj := sel.Obj() path := sel.Index() assert(obj != nil) @@ -1294,12 +1328,12 @@ func (g *Graph) useMethod(t types.Type, sel *types.Selection, by interface{}, ki for _, idx := range path[:len(path)-1] { next := base.Field(idx) // (6.3) structs use embedded fields that help implement interfaces - g.see(base) - g.seeAndUse(next, base, edgeProvidesMethod) + ctx.see(base) + ctx.seeAndUse(next, base, edgeProvidesMethod) base, _ = lintdsl.Dereference(next.Type()).Underlying().(*types.Struct) } } - g.seeAndUse(obj, by, kind) + ctx.seeAndUse(obj, by, kind) } func owningObject(fn *ssa.Function) types.Object { @@ -1312,58 +1346,74 @@ func owningObject(fn *ssa.Function) types.Object { return nil } -func (g *Graph) function(fn *ssa.Function) { - if fn.Package() != nil && fn.Package() != g.pkg { +func (g *Graph) function(ctx *context, fn *ssa.Function) { + if fn.Package() != nil && fn.Package() != ctx.pkg.SSA { return } name := fn.RelString(nil) - if _, ok := g.seenFns[name]; ok { + if _, ok := ctx.seenFns[name]; ok { return } - g.seenFns[name] = struct{}{} + ctx.seenFns[name] = struct{}{} // (4.1) functions use all their arguments, return parameters and receivers - g.seeAndUse(fn.Signature, owningObject(fn), edgeFunctionSignature) - g.signature(fn.Signature) - g.instructions(fn) + ctx.seeAndUse(fn.Signature, owningObject(fn), edgeFunctionSignature) + g.signature(ctx, fn.Signature) + g.instructions(ctx, fn) for _, anon := range fn.AnonFuncs { // (4.2) functions use anonymous functions defined beneath them // // This fact is expressed implicitly. Anonymous functions have // no types.Object, so their owner is the surrounding // function. - g.function(anon) + g.function(ctx, anon) } } -func (g *Graph) typ(t types.Type) { - if g.seenTypes.At(t) != nil { +func (g *Graph) typ(ctx *context, t types.Type) { + if g.wholeProgram { + g.mu.Lock() + } + if ctx.seenTypes.At(t) != nil { + if g.wholeProgram { + g.mu.Unlock() + } return } + if g.wholeProgram { + g.mu.Unlock() + } if t, ok := t.(*types.Named); ok && t.Obj().Pkg() != nil { - if t.Obj().Pkg() != g.pkg.Pkg { + if t.Obj().Pkg() != ctx.pkg.Pkg { return } } - g.seenTypes.Set(t, struct{}{}) + + if g.wholeProgram { + g.mu.Lock() + } + ctx.seenTypes.Set(t, struct{}{}) + if g.wholeProgram { + g.mu.Unlock() + } if isIrrelevant(t) { return } - g.see(t) + ctx.see(t) switch t := t.(type) { case *types.Struct: for i := 0; i < t.NumFields(); i++ { - g.see(t.Field(i)) + ctx.see(t.Field(i)) if t.Field(i).Exported() { // (6.2) structs use exported fields - g.use(t.Field(i), t, edgeExportedField) + ctx.use(t.Field(i), t, edgeExportedField) } else if t.Field(i).Name() == "_" { - g.use(t.Field(i), t, edgeBlankField) + ctx.use(t.Field(i), t, edgeBlankField) } else if isNoCopyType(t.Field(i).Type()) { // (6.1) structs use fields of type NoCopy sentinel - g.use(t.Field(i), t, edgeNoCopySentinel) + ctx.use(t.Field(i), t, edgeNoCopySentinel) } if t.Field(i).Anonymous() { // (e3) exported identifiers aren't automatically used. @@ -1375,11 +1425,11 @@ func (g *Graph) typ(t types.Type) { // the pointer type to get the full method set T = types.NewPointer(T) } - ms := g.msCache.MethodSet(T) + ms := ctx.msCache.MethodSet(T) for j := 0; j < ms.Len(); j++ { if ms.At(j).Obj().Exported() { // (6.4) structs use embedded fields that have exported methods (recursively) - g.use(t.Field(i), t, edgeExtendsExportedMethodSet) + ctx.use(t.Field(i), t, edgeExtendsExportedMethodSet) break } } @@ -1410,114 +1460,114 @@ func (g *Graph) typ(t types.Type) { // does the embedded field contribute exported fields? if hasExportedField(t.Field(i).Type()) { // (6.5) structs use embedded structs that have exported fields (recursively) - g.use(t.Field(i), t, edgeExtendsExportedFields) + ctx.use(t.Field(i), t, edgeExtendsExportedFields) } } - g.variable(t.Field(i)) + g.variable(ctx, t.Field(i)) } case *types.Basic: // Nothing to do case *types.Named: // (9.3) types use their underlying and element types - g.seeAndUse(t.Underlying(), t, edgeUnderlyingType) - g.seeAndUse(t.Obj(), t, edgeTypeName) - g.seeAndUse(t, t.Obj(), edgeNamedType) + ctx.seeAndUse(t.Underlying(), t, edgeUnderlyingType) + ctx.seeAndUse(t.Obj(), t, edgeTypeName) + ctx.seeAndUse(t, t.Obj(), edgeNamedType) // (2.4) named types use the pointer type - g.seeAndUse(types.NewPointer(t), t, edgePointerType) + ctx.seeAndUse(types.NewPointer(t), t, edgePointerType) for i := 0; i < t.NumMethods(); i++ { - g.see(t.Method(i)) + ctx.see(t.Method(i)) // don't use trackExportedIdentifier here, we care about // all exported methods, even in package main or in tests. if t.Method(i).Exported() && !g.wholeProgram { // (2.1) named types use exported methods - g.use(t.Method(i), t, edgeExportedMethod) + ctx.use(t.Method(i), t, edgeExportedMethod) } - g.function(g.pkg.Prog.FuncValue(t.Method(i))) + g.function(ctx, ctx.pkg.SSA.Prog.FuncValue(t.Method(i))) } - g.typ(t.Underlying()) + g.typ(ctx, t.Underlying()) case *types.Slice: // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem()) + ctx.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(ctx, t.Elem()) case *types.Map: // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) + ctx.seeAndUse(t.Elem(), t, edgeElementType) // (9.3) types use their underlying and element types - g.seeAndUse(t.Key(), t, edgeKeyType) - g.typ(t.Elem()) - g.typ(t.Key()) + ctx.seeAndUse(t.Key(), t, edgeKeyType) + g.typ(ctx, t.Elem()) + g.typ(ctx, t.Key()) case *types.Signature: - g.signature(t) + g.signature(ctx, t) case *types.Interface: for i := 0; i < t.NumMethods(); i++ { m := t.Method(i) // (8.3) All interface methods are marked as used - g.seeAndUse(m, t, edgeInterfaceMethod) - g.seeAndUse(m.Type().(*types.Signature), m, edgeSignature) - g.signature(m.Type().(*types.Signature)) + ctx.seeAndUse(m, t, edgeInterfaceMethod) + ctx.seeAndUse(m.Type().(*types.Signature), m, edgeSignature) + g.signature(ctx, m.Type().(*types.Signature)) } for i := 0; i < t.NumEmbeddeds(); i++ { tt := t.EmbeddedType(i) // (8.4) All embedded interfaces are marked as used - g.seeAndUse(tt, t, edgeEmbeddedInterface) + ctx.seeAndUse(tt, t, edgeEmbeddedInterface) } case *types.Array: // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem()) + ctx.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(ctx, t.Elem()) case *types.Pointer: // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem()) + ctx.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(ctx, t.Elem()) case *types.Chan: // (9.3) types use their underlying and element types - g.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(t.Elem()) + ctx.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(ctx, t.Elem()) case *types.Tuple: for i := 0; i < t.Len(); i++ { // (9.3) types use their underlying and element types - g.seeAndUse(t.At(i), t, edgeTupleElement) - g.variable(t.At(i)) + ctx.seeAndUse(t.At(i), t, edgeTupleElement) + g.variable(ctx, t.At(i)) } default: panic(fmt.Sprintf("unreachable: %T", t)) } } -func (g *Graph) variable(v *types.Var) { +func (g *Graph) variable(ctx *context, v *types.Var) { // (9.2) variables use their types - g.seeAndUse(v.Type(), v, edgeType) - g.typ(v.Type()) + ctx.seeAndUse(v.Type(), v, edgeType) + g.typ(ctx, v.Type()) } -func (g *Graph) signature(sig *types.Signature) { +func (g *Graph) signature(ctx *context, sig *types.Signature) { if sig.Recv() != nil { - if node := g.seeAndUse(sig.Recv(), sig, edgeReceiver); node != nil { + if node := ctx.seeAndUse(sig.Recv(), sig, edgeReceiver); node != nil { node.ignored = true } - g.variable(sig.Recv()) + g.variable(ctx, sig.Recv()) } for i := 0; i < sig.Params().Len(); i++ { param := sig.Params().At(i) - if node := g.seeAndUse(param, sig, edgeFunctionArgument); node != nil { + if node := ctx.seeAndUse(param, sig, edgeFunctionArgument); node != nil { node.ignored = true } - g.variable(param) + g.variable(ctx, param) } for i := 0; i < sig.Results().Len(); i++ { param := sig.Results().At(i) - if node := g.seeAndUse(param, sig, edgeFunctionResult); node != nil { + if node := ctx.seeAndUse(param, sig, edgeFunctionResult); node != nil { node.ignored = true } - g.variable(param) + g.variable(ctx, param) } } -func (g *Graph) instructions(fn *ssa.Function) { +func (g *Graph) instructions(ctx *context, fn *ssa.Function) { fnObj := owningObject(fn) for _, b := range fn.Blocks { for _, instr := range b.Instrs { @@ -1538,17 +1588,17 @@ func (g *Graph) instructions(fn *ssa.Function) { // (9.5) instructions use their operands // (4.4) functions use functions they return. we assume that someone else will call the returned function if owningObject(v) != nil { - g.seeAndUse(owningObject(v), fnObj, edgeInstructionOperand) + ctx.seeAndUse(owningObject(v), fnObj, edgeInstructionOperand) } - g.function(v) + g.function(ctx, v) case *ssa.Const: // (9.6) instructions use their operands' types - g.seeAndUse(v.Type(), fnObj, edgeType) - g.typ(v.Type()) + ctx.seeAndUse(v.Type(), fnObj, edgeType) + g.typ(ctx, v.Type()) case *ssa.Global: if v.Object() != nil { // (9.5) instructions use their operands - g.seeAndUse(v.Object(), fnObj, edgeInstructionOperand) + ctx.seeAndUse(v.Object(), fnObj, edgeInstructionOperand) } } }) @@ -1559,8 +1609,8 @@ func (g *Graph) instructions(fn *ssa.Function) { // (4.8) instructions use their types // (9.4) conversions use the type they convert to - g.seeAndUse(v.Type(), fnObj, edgeType) - g.typ(v.Type()) + ctx.seeAndUse(v.Type(), fnObj, edgeType) + g.typ(ctx, v.Type()) } } switch instr := instr.(type) { @@ -1568,12 +1618,12 @@ func (g *Graph) instructions(fn *ssa.Function) { st := instr.X.Type().Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access - g.seeAndUse(field, fnObj, edgeFieldAccess) + ctx.seeAndUse(field, fnObj, edgeFieldAccess) case *ssa.FieldAddr: st := lintdsl.Dereference(instr.X.Type()).Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access - g.seeAndUse(field, fnObj, edgeFieldAccess) + ctx.seeAndUse(field, fnObj, edgeFieldAccess) case *ssa.Store: // nothing to do, handled generically by operands case *ssa.Call: @@ -1599,10 +1649,10 @@ func (g *Graph) instructions(fn *ssa.Function) { walkPhi(arg, func(v ssa.Value) { if v, ok := v.(*ssa.MakeInterface); ok { walkPhi(v.X, func(vv ssa.Value) { - ms := g.msCache.MethodSet(vv.Type()) + ms := ctx.msCache.MethodSet(vv.Type()) for i := 0; i < ms.Len(); i++ { if ms.At(i).Obj().Exported() { - g.useMethod(vv.Type(), ms.At(i), fnObj, edgeNetRPCRegister) + g.useMethod(ctx, vv.Type(), ms.At(i), fnObj, edgeNetRPCRegister) } } }) @@ -1612,7 +1662,7 @@ func (g *Graph) instructions(fn *ssa.Function) { } } else { // (4.5) functions use functions/interface methods they call - g.seeAndUse(c.Method, fnObj, edgeInterfaceCall) + ctx.seeAndUse(c.Method, fnObj, edgeInterfaceCall) } case *ssa.Return: // nothing to do, handled generically by operands @@ -1629,14 +1679,14 @@ func (g *Graph) instructions(fn *ssa.Function) { assert(s1.NumFields() == s2.NumFields()) for i := 0; i < s1.NumFields(); i++ { - g.see(s1.Field(i)) - g.see(s2.Field(i)) + ctx.see(s1.Field(i)) + ctx.see(s2.Field(i)) // (5.1) when converting between two equivalent structs, the fields in // either struct use each other. the fields are relevant for the // conversion, but only if the fields are also accessed outside the // conversion. - g.seeAndUse(s1.Field(i), s2.Field(i), edgeStructConversion) - g.seeAndUse(s2.Field(i), s1.Field(i), edgeStructConversion) + ctx.seeAndUse(s1.Field(i), s2.Field(i), edgeStructConversion) + ctx.seeAndUse(s2.Field(i), s1.Field(i), edgeStructConversion) } } case *ssa.MakeInterface: @@ -1652,7 +1702,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - g.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) + ctx.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) } } } @@ -1663,7 +1713,7 @@ func (g *Graph) instructions(fn *ssa.Function) { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - g.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) + ctx.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) } } } diff --git a/unused/unused_test.go b/unused/unused_test.go index 8cf5d8fb1..3035f2017 100644 --- a/unused/unused_test.go +++ b/unused/unused_test.go @@ -174,7 +174,7 @@ func check(t *testing.T, fset *token.FileSet, diagnostics []types.Object) { } func TestAll(t *testing.T) { - c := NewChecker() + c := NewChecker(false) var stats lint.Stats r, err := lint.NewRunner(&stats) if err != nil { From adcb8c51c01293bfdf531908c2c2d0a422a351a1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 13 May 2019 16:33:44 +0200 Subject: [PATCH 172/254] unused: use sync.Map for maps accessed concurrently --- unused/unused.go | 60 +++++++++++++++++++----------------------------- 1 file changed, 24 insertions(+), 36 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index d89add9f1..6252c3732 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -586,7 +586,6 @@ func (c *Checker) results(graph *Graph) []types.Object { ctx := &context{ g: graph, seenTypes: &graph.seenTypes, - nodes: map[interface{}]*Node{}, } // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages @@ -620,9 +619,10 @@ func (c *Checker) results(graph *Graph) []types.Object { c.debugf("digraph{\n") debugNode(graph.Root) - for _, node := range graph.Nodes { - debugNode(node) - } + graph.Nodes.Range(func(k, v interface{}) bool { + debugNode(v.(*Node)) + return true + }) graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { debugNode(value.(*Node)) }) @@ -636,9 +636,10 @@ func (c *Checker) results(graph *Graph) []types.Object { // don't flag its receiver. if a named type is unused, don't // flag its methods. - for _, node := range graph.Nodes { - graph.quieten(node) - } + graph.Nodes.Range(func(k, v interface{}) bool { + graph.quieten(v.(*Node)) + return true + }) graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { graph.quieten(value.(*Node)) }) @@ -672,9 +673,10 @@ func (c *Checker) results(graph *Graph) []types.Object { } c.debugf("n%d [color=gray];\n", node.id) } - for _, node := range graph.Nodes { - report(node) - } + graph.Nodes.Range(func(k, v interface{}) bool { + report(v.(*Node)) + return true + }) graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { report(value.(*Node)) }) @@ -716,15 +718,15 @@ type Graph struct { fset *token.FileSet Root *Node seenTypes typeutil.Map + Nodes sync.Map // map[interface{}]*Node + objNodes sync.Map // map[objNodeKey]*Node // read-only wholeProgram bool // need synchronisation mu sync.Mutex - Nodes map[interface{}]*Node TypeNodes typeutil.Map - objNodes map[objNodeKey]*Node // accessed atomically nodeOffset uint64 @@ -738,16 +740,12 @@ type context struct { nodeCounter uint64 msCache typeutil.MethodSetCache - // these act as local, lock-free caches for the maps in Graph. + // local cache for the map in Graph typeNodes typeutil.Map - nodes map[interface{}]*Node } func NewGraph() *Graph { - g := &Graph{ - Nodes: map[interface{}]*Node{}, - objNodes: map[objNodeKey]*Node{}, - } + g := &Graph{} g.Root = g.newNode(&context{}, nil) return g } @@ -787,10 +785,8 @@ type Node struct { } func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) { - // never called concurrently - - if node, ok := g.Nodes[obj]; ok { - return node, true + if node, ok := g.Nodes.Load(obj); ok { + return node.(*Node), true } return nil, false } @@ -812,22 +808,15 @@ func (g *Graph) node(ctx *context, obj interface{}) (node *Node, new bool) { return node, true } - if node, ok := ctx.nodes[obj]; ok { - return node, false - } - - g.mu.Lock() - defer g.mu.Unlock() - - if node, ok := g.Nodes[obj]; ok { - return node, false + if node, ok := g.Nodes.Load(obj); ok { + return node.(*Node), false } node = g.newNode(ctx, obj) - g.Nodes[obj] = node - ctx.nodes[obj] = node + g.Nodes.Store(obj, node) if obj, ok := obj.(types.Object); ok { key := objNodeKeyFor(g.fset, obj) - if onode, ok := g.objNodes[key]; ok { + if o, ok := g.objNodes.Load(key); ok { + onode := o.(*Node) node.mu.Lock() onode.mu.Lock() node.used[onode] |= edgeSameObject @@ -835,7 +824,7 @@ func (g *Graph) node(ctx *context, obj interface{}) (node *Node, new bool) { node.mu.Unlock() onode.mu.Unlock() } else { - g.objNodes[key] = node + g.objNodes.Store(key, node) } } return node, true @@ -1005,7 +994,6 @@ func (g *Graph) entry(pkg *pkg) { pkg: pkg, nodeCounter: no * 1e9, seenFns: map[string]struct{}{}, - nodes: map[interface{}]*Node{}, } if g.wholeProgram { ctx.seenTypes = &g.seenTypes From 6b3a750f5897619804040509c450592300cf7a38 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 13 May 2019 19:32:07 +0200 Subject: [PATCH 173/254] unused: remove unnecessary argument --- unused/unused.go | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 6252c3732..fcf936470 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -508,7 +508,7 @@ func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Prob } func (c *Checker) Result() []types.Object { - out := c.results(c.graph) + out := c.results() out2 := make([]types.Object, 0, len(out)) for _, v := range out { @@ -555,8 +555,8 @@ func (graph *Graph) quieten(node *Node) { } } -func (c *Checker) results(graph *Graph) []types.Object { - if graph == nil { +func (c *Checker) results() []types.Object { + if c.graph == nil { // We never analyzed any packages return nil } @@ -568,7 +568,7 @@ func (c *Checker) results(graph *Graph) []types.Object { var notIfaces []types.Type // implement as many interfaces as possible - graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { + c.graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: ifaces = append(ifaces, t) @@ -584,17 +584,17 @@ func (c *Checker) results(graph *Graph) []types.Object { } ctx := &context{ - g: graph, - seenTypes: &graph.seenTypes, + g: c.graph, + seenTypes: &c.graph.seenTypes, } // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages for _, t := range notIfaces { ms := types.NewMethodSet(t) for _, iface := range ifaces { - if sels, ok := graph.implements(t, iface, ms); ok { + if sels, ok := c.graph.implements(t, iface, ms); ok { for _, sel := range sels { - graph.useMethod(ctx, t, sel, t, edgeImplements) + c.graph.useMethod(ctx, t, sel, t, edgeImplements) } } } @@ -618,30 +618,30 @@ func (c *Checker) results(graph *Graph) []types.Object { } c.debugf("digraph{\n") - debugNode(graph.Root) - graph.Nodes.Range(func(k, v interface{}) bool { + debugNode(c.graph.Root) + c.graph.Nodes.Range(func(k, v interface{}) bool { debugNode(v.(*Node)) return true }) - graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { + c.graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { debugNode(value.(*Node)) }) c.debugf("}\n") } - graph.color(graph.Root) + c.graph.color(c.graph.Root) // if a node is unused, don't report any of the node's // children as unused. for example, if a function is unused, // don't flag its receiver. if a named type is unused, don't // flag its methods. - graph.Nodes.Range(func(k, v interface{}) bool { - graph.quieten(v.(*Node)) + c.graph.Nodes.Range(func(k, v interface{}) bool { + c.graph.quieten(v.(*Node)) return true }) - graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { - graph.quieten(value.(*Node)) + c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + c.graph.quieten(value.(*Node)) }) report := func(node *Node) { @@ -673,11 +673,11 @@ func (c *Checker) results(graph *Graph) []types.Object { } c.debugf("n%d [color=gray];\n", node.id) } - graph.Nodes.Range(func(k, v interface{}) bool { + c.graph.Nodes.Range(func(k, v interface{}) bool { report(v.(*Node)) return true }) - graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { + c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { report(value.(*Node)) }) From c6fa781e62570f5844fcf7ee0a156d49c11a1560 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 13 May 2019 21:50:59 +0200 Subject: [PATCH 174/254] cmd/staticcheck: add flag for dumping 'unused' debug graph --- cmd/staticcheck/staticcheck.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/cmd/staticcheck/staticcheck.go b/cmd/staticcheck/staticcheck.go index 78534c8b3..4f504dc39 100644 --- a/cmd/staticcheck/staticcheck.go +++ b/cmd/staticcheck/staticcheck.go @@ -2,6 +2,7 @@ package main // import "honnef.co/go/tools/cmd/staticcheck" import ( + "log" "os" "golang.org/x/tools/go/analysis" @@ -16,6 +17,7 @@ import ( func main() { fs := lintutil.FlagSet("staticcheck") wholeProgram := fs.Bool("unused.whole-program", false, "Run unused in whole program mode") + debug := fs.String("debug.unused-graph", "", "Write unused's object graph to `file`") fs.Parse(os.Args[1:]) var cs []*analysis.Analyzer @@ -30,6 +32,13 @@ func main() { } u := unused.NewChecker(*wholeProgram) + if *debug != "" { + f, err := os.OpenFile(*debug, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666) + if err != nil { + log.Fatal(err) + } + u.Debug = f + } cums := []lint.CumulativeChecker{u} lintutil.ProcessFlagSet(cs, cums, fs) } From 15e17b731085ad034fe9ab7b528128f296772c6d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 13 May 2019 22:17:37 +0200 Subject: [PATCH 175/254] unused: don't deduplicate struct types --- go/types/typeutil/identical.go | 8 ++ go/types/typeutil/map.go | 6 +- unused/edge.go | 1 + unused/edge_string.go | 168 ++++++++++++----------- unused/testdata/src/fields/fields.go | 2 +- unused/testdata/src/type-dedup/dedup.go | 18 +++ unused/testdata/src/type-dedup2/dedup.go | 23 ++++ unused/testdata/src/type-dedup3/dedup.go | 23 ++++ unused/unused.go | 43 +++--- 9 files changed, 190 insertions(+), 102 deletions(-) create mode 100644 unused/testdata/src/type-dedup/dedup.go create mode 100644 unused/testdata/src/type-dedup2/dedup.go create mode 100644 unused/testdata/src/type-dedup3/dedup.go diff --git a/go/types/typeutil/identical.go b/go/types/typeutil/identical.go index 2236a953a..042682b82 100644 --- a/go/types/typeutil/identical.go +++ b/go/types/typeutil/identical.go @@ -7,12 +7,20 @@ import ( // Identical reports whether x and y are identical types. // Unlike types.Identical, receivers of Signature types are not ignored. // Unlike types.Identical, interfaces are compared via pointer equality. +// Unlike types.Identical, structs are compared via pointer equality. func Identical(x, y types.Type) (ret bool) { if !types.Identical(x, y) { return false } switch x := x.(type) { + case *types.Struct: + y, ok := y.(*types.Struct) + if !ok { + // should be impossible + return true + } + return x == y case *types.Interface: // The issue with interfaces, typeutil.Map and types.Identical // diff --git a/go/types/typeutil/map.go b/go/types/typeutil/map.go index 1b6a19804..f929353cc 100644 --- a/go/types/typeutil/map.go +++ b/go/types/typeutil/map.go @@ -24,8 +24,10 @@ import ( // Not thread-safe. // // This fork handles Signatures correctly, respecting method -// receivers. Furthermore, it differentiates between implicit and -// explicit methods in interfaces. +// receivers. Furthermore, it doesn't deduplicate interfaces or +// structs. Interfaces aren't deduplicated as not to conflate implicit +// and explicit methods. Structs aren't deduplicated because we track +// fields of each type separately. // type Map struct { hasher Hasher // shared by many Maps diff --git a/unused/edge.go b/unused/edge.go index b155b03ec..94110630d 100644 --- a/unused/edge.go +++ b/unused/edge.go @@ -10,6 +10,7 @@ func (e edge) is(o edge) bool { const ( edgeAlias edge = 1 << iota edgeBlankField + edgeAnonymousStruct edgeCgoExported edgeConstGroup edgeElementType diff --git a/unused/edge_string.go b/unused/edge_string.go index bde3ebb86..4f006fee3 100644 --- a/unused/edge_string.go +++ b/unused/edge_string.go @@ -10,95 +10,97 @@ func _() { var x [1]struct{} _ = x[edgeAlias-1] _ = x[edgeBlankField-2] - _ = x[edgeCgoExported-4] - _ = x[edgeConstGroup-8] - _ = x[edgeElementType-16] - _ = x[edgeEmbeddedInterface-32] - _ = x[edgeExportedConstant-64] - _ = x[edgeExportedField-128] - _ = x[edgeExportedFunction-256] - _ = x[edgeExportedMethod-512] - _ = x[edgeExportedType-1024] - _ = x[edgeExportedVariable-2048] - _ = x[edgeExtendsExportedFields-4096] - _ = x[edgeExtendsExportedMethodSet-8192] - _ = x[edgeFieldAccess-16384] - _ = x[edgeFunctionArgument-32768] - _ = x[edgeFunctionResult-65536] - _ = x[edgeFunctionSignature-131072] - _ = x[edgeImplements-262144] - _ = x[edgeInstructionOperand-524288] - _ = x[edgeInterfaceCall-1048576] - _ = x[edgeInterfaceMethod-2097152] - _ = x[edgeKeyType-4194304] - _ = x[edgeLinkname-8388608] - _ = x[edgeMainFunction-16777216] - _ = x[edgeNamedType-33554432] - _ = x[edgeNetRPCRegister-67108864] - _ = x[edgeNoCopySentinel-134217728] - _ = x[edgeProvidesMethod-268435456] - _ = x[edgeReceiver-536870912] - _ = x[edgeRuntimeFunction-1073741824] - _ = x[edgeSameObject-2147483648] - _ = x[edgeSignature-4294967296] - _ = x[edgeStructConversion-8589934592] - _ = x[edgeTestSink-17179869184] - _ = x[edgeTupleElement-34359738368] - _ = x[edgeType-68719476736] - _ = x[edgeTypeName-137438953472] - _ = x[edgeUnderlyingType-274877906944] - _ = x[edgePointerType-549755813888] - _ = x[edgeUnsafeConversion-1099511627776] - _ = x[edgeUsedConstant-2199023255552] - _ = x[edgeVarDecl-4398046511104] + _ = x[edgeAnonymousStruct-4] + _ = x[edgeCgoExported-8] + _ = x[edgeConstGroup-16] + _ = x[edgeElementType-32] + _ = x[edgeEmbeddedInterface-64] + _ = x[edgeExportedConstant-128] + _ = x[edgeExportedField-256] + _ = x[edgeExportedFunction-512] + _ = x[edgeExportedMethod-1024] + _ = x[edgeExportedType-2048] + _ = x[edgeExportedVariable-4096] + _ = x[edgeExtendsExportedFields-8192] + _ = x[edgeExtendsExportedMethodSet-16384] + _ = x[edgeFieldAccess-32768] + _ = x[edgeFunctionArgument-65536] + _ = x[edgeFunctionResult-131072] + _ = x[edgeFunctionSignature-262144] + _ = x[edgeImplements-524288] + _ = x[edgeInstructionOperand-1048576] + _ = x[edgeInterfaceCall-2097152] + _ = x[edgeInterfaceMethod-4194304] + _ = x[edgeKeyType-8388608] + _ = x[edgeLinkname-16777216] + _ = x[edgeMainFunction-33554432] + _ = x[edgeNamedType-67108864] + _ = x[edgeNetRPCRegister-134217728] + _ = x[edgeNoCopySentinel-268435456] + _ = x[edgeProvidesMethod-536870912] + _ = x[edgeReceiver-1073741824] + _ = x[edgeRuntimeFunction-2147483648] + _ = x[edgeSameObject-4294967296] + _ = x[edgeSignature-8589934592] + _ = x[edgeStructConversion-17179869184] + _ = x[edgeTestSink-34359738368] + _ = x[edgeTupleElement-68719476736] + _ = x[edgeType-137438953472] + _ = x[edgeTypeName-274877906944] + _ = x[edgeUnderlyingType-549755813888] + _ = x[edgePointerType-1099511627776] + _ = x[edgeUnsafeConversion-2199023255552] + _ = x[edgeUsedConstant-4398046511104] + _ = x[edgeVarDecl-8796093022208] } -const _edge_name = "edgeAliasedgeBlankFieldedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSameObjectedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" +const _edge_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSameObjectedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" var _edge_map = map[edge]string{ 1: _edge_name[0:9], 2: _edge_name[9:23], - 4: _edge_name[23:38], - 8: _edge_name[38:52], - 16: _edge_name[52:67], - 32: _edge_name[67:88], - 64: _edge_name[88:108], - 128: _edge_name[108:125], - 256: _edge_name[125:145], - 512: _edge_name[145:163], - 1024: _edge_name[163:179], - 2048: _edge_name[179:199], - 4096: _edge_name[199:224], - 8192: _edge_name[224:252], - 16384: _edge_name[252:267], - 32768: _edge_name[267:287], - 65536: _edge_name[287:305], - 131072: _edge_name[305:326], - 262144: _edge_name[326:340], - 524288: _edge_name[340:362], - 1048576: _edge_name[362:379], - 2097152: _edge_name[379:398], - 4194304: _edge_name[398:409], - 8388608: _edge_name[409:421], - 16777216: _edge_name[421:437], - 33554432: _edge_name[437:450], - 67108864: _edge_name[450:468], - 134217728: _edge_name[468:486], - 268435456: _edge_name[486:504], - 536870912: _edge_name[504:516], - 1073741824: _edge_name[516:535], - 2147483648: _edge_name[535:549], - 4294967296: _edge_name[549:562], - 8589934592: _edge_name[562:582], - 17179869184: _edge_name[582:594], - 34359738368: _edge_name[594:610], - 68719476736: _edge_name[610:618], - 137438953472: _edge_name[618:630], - 274877906944: _edge_name[630:648], - 549755813888: _edge_name[648:663], - 1099511627776: _edge_name[663:683], - 2199023255552: _edge_name[683:699], - 4398046511104: _edge_name[699:710], + 4: _edge_name[23:42], + 8: _edge_name[42:57], + 16: _edge_name[57:71], + 32: _edge_name[71:86], + 64: _edge_name[86:107], + 128: _edge_name[107:127], + 256: _edge_name[127:144], + 512: _edge_name[144:164], + 1024: _edge_name[164:182], + 2048: _edge_name[182:198], + 4096: _edge_name[198:218], + 8192: _edge_name[218:243], + 16384: _edge_name[243:271], + 32768: _edge_name[271:286], + 65536: _edge_name[286:306], + 131072: _edge_name[306:324], + 262144: _edge_name[324:345], + 524288: _edge_name[345:359], + 1048576: _edge_name[359:381], + 2097152: _edge_name[381:398], + 4194304: _edge_name[398:417], + 8388608: _edge_name[417:428], + 16777216: _edge_name[428:440], + 33554432: _edge_name[440:456], + 67108864: _edge_name[456:469], + 134217728: _edge_name[469:487], + 268435456: _edge_name[487:505], + 536870912: _edge_name[505:523], + 1073741824: _edge_name[523:535], + 2147483648: _edge_name[535:554], + 4294967296: _edge_name[554:568], + 8589934592: _edge_name[568:581], + 17179869184: _edge_name[581:601], + 34359738368: _edge_name[601:613], + 68719476736: _edge_name[613:629], + 137438953472: _edge_name[629:637], + 274877906944: _edge_name[637:649], + 549755813888: _edge_name[649:667], + 1099511627776: _edge_name[667:682], + 2199023255552: _edge_name[682:702], + 4398046511104: _edge_name[702:718], + 8796093022208: _edge_name[718:729], } func (i edge) String() string { diff --git a/unused/testdata/src/fields/fields.go b/unused/testdata/src/fields/fields.go index 401acf4f1..8b07c8152 100644 --- a/unused/testdata/src/fields/fields.go +++ b/unused/testdata/src/fields/fields.go @@ -57,7 +57,7 @@ func foo() { _ = a1{{1}} _ = a2{0: {1}} _ = map[[1]t16]int{{{1}}: 1} - y := struct{ x int }{} // want `x` + y := struct{ x int }{} _ = y _ = t18{f181: 1} _ = []m2{{"a": {1}}} diff --git a/unused/testdata/src/type-dedup/dedup.go b/unused/testdata/src/type-dedup/dedup.go new file mode 100644 index 000000000..53cf2f989 --- /dev/null +++ b/unused/testdata/src/type-dedup/dedup.go @@ -0,0 +1,18 @@ +package pkg + +type t1 struct { + a int + b int // want `b` +} + +type t2 struct { + a int // want `a` + b int +} + +func Fn() { + x := t1{} + y := t2{} + println(x.a) + println(y.b) +} diff --git a/unused/testdata/src/type-dedup2/dedup.go b/unused/testdata/src/type-dedup2/dedup.go new file mode 100644 index 000000000..56c7dc951 --- /dev/null +++ b/unused/testdata/src/type-dedup2/dedup.go @@ -0,0 +1,23 @@ +package pkg + +func fn1(t struct { + a int + b int +}) { + println(t.a) + fn2(t) +} + +func fn2(t struct { + a int + b int +}) { + println(t.b) +} + +func Fn() { + fn1(struct { + a int + b int + }{}) +} diff --git a/unused/testdata/src/type-dedup3/dedup.go b/unused/testdata/src/type-dedup3/dedup.go new file mode 100644 index 000000000..095e95f86 --- /dev/null +++ b/unused/testdata/src/type-dedup3/dedup.go @@ -0,0 +1,23 @@ +package pkg + +func fn1(t struct { + a int + b int +}) { + fn2(t) +} + +func fn2(t struct { + a int + b int +}) { + println(t.a) + println(t.b) +} + +func Fn() { + fn1(struct { + a int + b int + }{1, 2}) +} diff --git a/unused/unused.go b/unused/unused.go index fcf936470..95d69f11b 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -121,6 +121,14 @@ import ( https://github.com/dominikh/go-tools/issues/365 +- (11.1) anonymous struct types use all their fields. we cannot + deduplicate struct types, as that leads to order-dependent + reportings. we can't not deduplicate struct types while still + tracking fields, because then each instance of the unnamed type in + the data flow chain will get its own fields, causing false + positives. Thus, we only accurately track fields of named struct + types, and assume that unnamed struct types use all their fields. + - Differences in whole program mode: - (e2) types aim to implement all exported interfaces from all packages @@ -1067,7 +1075,7 @@ func (g *Graph) entry(pkg *pkg) { // (1.4) packages use exported constants (unless in package main) ctx.use(obj, nil, edgeExportedConstant) } - g.typ(ctx, obj.Type()) + g.typ(ctx, obj.Type(), nil) ctx.seeAndUse(obj.Type(), obj, edgeType) } } @@ -1163,7 +1171,7 @@ func (g *Graph) entry(pkg *pkg) { } else { ctx.seeAndUse(T, nil, edgeVarDecl) } - g.typ(ctx, T) + g.typ(ctx, T, nil) } } case token.TYPE: @@ -1180,8 +1188,8 @@ func (g *Graph) entry(pkg *pkg) { ctx.see(obj) ctx.see(T) ctx.use(T, obj, edgeType) - g.typ(ctx, obj.Type()) - g.typ(ctx, T) + g.typ(ctx, obj.Type(), nil) + g.typ(ctx, T, nil) if v.Assign != 0 { aliasFor := obj.(*types.TypeName).Type() @@ -1263,7 +1271,7 @@ func (g *Graph) entry(pkg *pkg) { ctx.use(m.Object(), nil, edgeExportedType) } } - g.typ(ctx, m.Type()) + g.typ(ctx, m.Type(), nil) default: panic(fmt.Sprintf("unreachable: %T", m)) } @@ -1359,7 +1367,7 @@ func (g *Graph) function(ctx *context, fn *ssa.Function) { } } -func (g *Graph) typ(ctx *context, t types.Type) { +func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { if g.wholeProgram { g.mu.Lock() } @@ -1402,6 +1410,9 @@ func (g *Graph) typ(ctx *context, t types.Type) { } else if isNoCopyType(t.Field(i).Type()) { // (6.1) structs use fields of type NoCopy sentinel ctx.use(t.Field(i), t, edgeNoCopySentinel) + } else if parent == nil { + // (11.1) anonymous struct types use all their fields. + ctx.use(t.Field(i), t, edgeAnonymousStruct) } if t.Field(i).Anonymous() { // (e3) exported identifiers aren't automatically used. @@ -1476,18 +1487,18 @@ func (g *Graph) typ(ctx *context, t types.Type) { g.function(ctx, ctx.pkg.SSA.Prog.FuncValue(t.Method(i))) } - g.typ(ctx, t.Underlying()) + g.typ(ctx, t.Underlying(), t) case *types.Slice: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(ctx, t.Elem()) + g.typ(ctx, t.Elem(), nil) case *types.Map: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) // (9.3) types use their underlying and element types ctx.seeAndUse(t.Key(), t, edgeKeyType) - g.typ(ctx, t.Elem()) - g.typ(ctx, t.Key()) + g.typ(ctx, t.Elem(), nil) + g.typ(ctx, t.Key(), nil) case *types.Signature: g.signature(ctx, t) case *types.Interface: @@ -1506,15 +1517,15 @@ func (g *Graph) typ(ctx *context, t types.Type) { case *types.Array: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(ctx, t.Elem()) + g.typ(ctx, t.Elem(), nil) case *types.Pointer: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(ctx, t.Elem()) + g.typ(ctx, t.Elem(), nil) case *types.Chan: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) - g.typ(ctx, t.Elem()) + g.typ(ctx, t.Elem(), nil) case *types.Tuple: for i := 0; i < t.Len(); i++ { // (9.3) types use their underlying and element types @@ -1529,7 +1540,7 @@ func (g *Graph) typ(ctx *context, t types.Type) { func (g *Graph) variable(ctx *context, v *types.Var) { // (9.2) variables use their types ctx.seeAndUse(v.Type(), v, edgeType) - g.typ(ctx, v.Type()) + g.typ(ctx, v.Type(), nil) } func (g *Graph) signature(ctx *context, sig *types.Signature) { @@ -1582,7 +1593,7 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) { case *ssa.Const: // (9.6) instructions use their operands' types ctx.seeAndUse(v.Type(), fnObj, edgeType) - g.typ(ctx, v.Type()) + g.typ(ctx, v.Type(), nil) case *ssa.Global: if v.Object() != nil { // (9.5) instructions use their operands @@ -1598,7 +1609,7 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) { // (4.8) instructions use their types // (9.4) conversions use the type they convert to ctx.seeAndUse(v.Type(), fnObj, edgeType) - g.typ(ctx, v.Type()) + g.typ(ctx, v.Type(), nil) } } switch instr := instr.(type) { From 2192e04fe135f4f8a6aab7fc0c615e1c898a497c Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 13 May 2019 23:41:55 +0200 Subject: [PATCH 176/254] unused: collapse unnecessary nodes --- unused/unused.go | 51 ++++++++++++++++++++++++------------------------ 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 95d69f11b..1c3008153 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -18,6 +18,14 @@ import ( "honnef.co/go/tools/ssa" ) +// The graph we construct omits nodes along a path that do not +// contribute any new information to the solution. For example, the +// full graph for a function with a receiver would be Func -> +// Signature -> Var -> Type. However, since signatures cannot be +// unused, and receivers are always considered used, we can compact +// the graph down to Func -> Type. This makes the graph smaller, but +// harder to debug. + // TODO(dh): conversions between structs mark fields as used, but the // conversion itself isn't part of that subgraph. even if the function // containing the conversion is unused, the fields will be marked as @@ -660,10 +668,6 @@ func (c *Checker) results() []types.Object { c.debugf("n%d [color=purple];\n", node.id) return } - if node.ignored { - c.debugf("n%d [color=gray];\n", node.id) - return - } c.debugf("n%d [color=red];\n", node.id) switch obj := node.obj.(type) { @@ -781,9 +785,6 @@ type Node struct { mu sync.Mutex used map[*Node]edge - // even if unused, this specific node should never be reported. - // e.g. function receivers. - ignored bool // set during final graph walk if node is reachable seen bool @@ -1354,8 +1355,7 @@ func (g *Graph) function(ctx *context, fn *ssa.Function) { ctx.seenFns[name] = struct{}{} // (4.1) functions use all their arguments, return parameters and receivers - ctx.seeAndUse(fn.Signature, owningObject(fn), edgeFunctionSignature) - g.signature(ctx, fn.Signature) + g.signature(ctx, fn.Signature, owningObject(fn)) g.instructions(ctx, fn) for _, anon := range fn.AnonFuncs { // (4.2) functions use anonymous functions defined beneath them @@ -1500,14 +1500,14 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { g.typ(ctx, t.Elem(), nil) g.typ(ctx, t.Key(), nil) case *types.Signature: - g.signature(ctx, t) + g.signature(ctx, t, nil) case *types.Interface: for i := 0; i < t.NumMethods(); i++ { m := t.Method(i) // (8.3) All interface methods are marked as used ctx.seeAndUse(m, t, edgeInterfaceMethod) ctx.seeAndUse(m.Type().(*types.Signature), m, edgeSignature) - g.signature(ctx, m.Type().(*types.Signature)) + g.signature(ctx, m.Type().(*types.Signature), nil) } for i := 0; i < t.NumEmbeddeds(); i++ { tt := t.EmbeddedType(i) @@ -1529,8 +1529,8 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { case *types.Tuple: for i := 0; i < t.Len(); i++ { // (9.3) types use their underlying and element types - ctx.seeAndUse(t.At(i), t, edgeTupleElement) - g.variable(ctx, t.At(i)) + ctx.seeAndUse(t.At(i).Type(), t, edgeTupleElement|edgeType) + g.typ(ctx, t.At(i).Type(), nil) } default: panic(fmt.Sprintf("unreachable: %T", t)) @@ -1543,26 +1543,25 @@ func (g *Graph) variable(ctx *context, v *types.Var) { g.typ(ctx, v.Type(), nil) } -func (g *Graph) signature(ctx *context, sig *types.Signature) { +func (g *Graph) signature(ctx *context, sig *types.Signature, fn types.Object) { + var user interface{} = fn + if fn == nil { + user = sig + ctx.see(sig) + } if sig.Recv() != nil { - if node := ctx.seeAndUse(sig.Recv(), sig, edgeReceiver); node != nil { - node.ignored = true - } - g.variable(ctx, sig.Recv()) + ctx.seeAndUse(sig.Recv().Type(), user, edgeReceiver|edgeType) + g.typ(ctx, sig.Recv().Type(), nil) } for i := 0; i < sig.Params().Len(); i++ { param := sig.Params().At(i) - if node := ctx.seeAndUse(param, sig, edgeFunctionArgument); node != nil { - node.ignored = true - } - g.variable(ctx, param) + ctx.seeAndUse(param.Type(), user, edgeFunctionArgument|edgeType) + g.typ(ctx, param.Type(), nil) } for i := 0; i < sig.Results().Len(); i++ { param := sig.Results().At(i) - if node := ctx.seeAndUse(param, sig, edgeFunctionResult); node != nil { - node.ignored = true - } - g.variable(ctx, param) + ctx.seeAndUse(param.Type(), user, edgeFunctionResult|edgeType) + g.typ(ctx, param.Type(), nil) } } From ed8ee2ac4e529a4398b7dc36749324503f7497cf Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 14 May 2019 02:05:21 +0200 Subject: [PATCH 177/254] unused: more efficient graph representation --- unused/edge.go | 8 +-- unused/edge_string.go | 111 -------------------------------------- unused/edgekind_string.go | 111 ++++++++++++++++++++++++++++++++++++++ unused/unused.go | 38 +++++++------ 4 files changed, 136 insertions(+), 132 deletions(-) delete mode 100644 unused/edge_string.go create mode 100644 unused/edgekind_string.go diff --git a/unused/edge.go b/unused/edge.go index 94110630d..14edc336c 100644 --- a/unused/edge.go +++ b/unused/edge.go @@ -1,14 +1,14 @@ package unused -//go:generate stringer -type edge -type edge uint64 +//go:generate stringer -type edgeKind +type edgeKind uint64 -func (e edge) is(o edge) bool { +func (e edgeKind) is(o edgeKind) bool { return e&o != 0 } const ( - edgeAlias edge = 1 << iota + edgeAlias edgeKind = 1 << iota edgeBlankField edgeAnonymousStruct edgeCgoExported diff --git a/unused/edge_string.go b/unused/edge_string.go deleted file mode 100644 index 4f006fee3..000000000 --- a/unused/edge_string.go +++ /dev/null @@ -1,111 +0,0 @@ -// Code generated by "stringer -type edge"; DO NOT EDIT. - -package unused - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[edgeAlias-1] - _ = x[edgeBlankField-2] - _ = x[edgeAnonymousStruct-4] - _ = x[edgeCgoExported-8] - _ = x[edgeConstGroup-16] - _ = x[edgeElementType-32] - _ = x[edgeEmbeddedInterface-64] - _ = x[edgeExportedConstant-128] - _ = x[edgeExportedField-256] - _ = x[edgeExportedFunction-512] - _ = x[edgeExportedMethod-1024] - _ = x[edgeExportedType-2048] - _ = x[edgeExportedVariable-4096] - _ = x[edgeExtendsExportedFields-8192] - _ = x[edgeExtendsExportedMethodSet-16384] - _ = x[edgeFieldAccess-32768] - _ = x[edgeFunctionArgument-65536] - _ = x[edgeFunctionResult-131072] - _ = x[edgeFunctionSignature-262144] - _ = x[edgeImplements-524288] - _ = x[edgeInstructionOperand-1048576] - _ = x[edgeInterfaceCall-2097152] - _ = x[edgeInterfaceMethod-4194304] - _ = x[edgeKeyType-8388608] - _ = x[edgeLinkname-16777216] - _ = x[edgeMainFunction-33554432] - _ = x[edgeNamedType-67108864] - _ = x[edgeNetRPCRegister-134217728] - _ = x[edgeNoCopySentinel-268435456] - _ = x[edgeProvidesMethod-536870912] - _ = x[edgeReceiver-1073741824] - _ = x[edgeRuntimeFunction-2147483648] - _ = x[edgeSameObject-4294967296] - _ = x[edgeSignature-8589934592] - _ = x[edgeStructConversion-17179869184] - _ = x[edgeTestSink-34359738368] - _ = x[edgeTupleElement-68719476736] - _ = x[edgeType-137438953472] - _ = x[edgeTypeName-274877906944] - _ = x[edgeUnderlyingType-549755813888] - _ = x[edgePointerType-1099511627776] - _ = x[edgeUnsafeConversion-2199023255552] - _ = x[edgeUsedConstant-4398046511104] - _ = x[edgeVarDecl-8796093022208] -} - -const _edge_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSameObjectedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" - -var _edge_map = map[edge]string{ - 1: _edge_name[0:9], - 2: _edge_name[9:23], - 4: _edge_name[23:42], - 8: _edge_name[42:57], - 16: _edge_name[57:71], - 32: _edge_name[71:86], - 64: _edge_name[86:107], - 128: _edge_name[107:127], - 256: _edge_name[127:144], - 512: _edge_name[144:164], - 1024: _edge_name[164:182], - 2048: _edge_name[182:198], - 4096: _edge_name[198:218], - 8192: _edge_name[218:243], - 16384: _edge_name[243:271], - 32768: _edge_name[271:286], - 65536: _edge_name[286:306], - 131072: _edge_name[306:324], - 262144: _edge_name[324:345], - 524288: _edge_name[345:359], - 1048576: _edge_name[359:381], - 2097152: _edge_name[381:398], - 4194304: _edge_name[398:417], - 8388608: _edge_name[417:428], - 16777216: _edge_name[428:440], - 33554432: _edge_name[440:456], - 67108864: _edge_name[456:469], - 134217728: _edge_name[469:487], - 268435456: _edge_name[487:505], - 536870912: _edge_name[505:523], - 1073741824: _edge_name[523:535], - 2147483648: _edge_name[535:554], - 4294967296: _edge_name[554:568], - 8589934592: _edge_name[568:581], - 17179869184: _edge_name[581:601], - 34359738368: _edge_name[601:613], - 68719476736: _edge_name[613:629], - 137438953472: _edge_name[629:637], - 274877906944: _edge_name[637:649], - 549755813888: _edge_name[649:667], - 1099511627776: _edge_name[667:682], - 2199023255552: _edge_name[682:702], - 4398046511104: _edge_name[702:718], - 8796093022208: _edge_name[718:729], -} - -func (i edge) String() string { - if str, ok := _edge_map[i]; ok { - return str - } - return "edge(" + strconv.FormatInt(int64(i), 10) + ")" -} diff --git a/unused/edgekind_string.go b/unused/edgekind_string.go new file mode 100644 index 000000000..a01a4ad5a --- /dev/null +++ b/unused/edgekind_string.go @@ -0,0 +1,111 @@ +// Code generated by "stringer -type edgeKind"; DO NOT EDIT. + +package unused + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[edgeAlias-1] + _ = x[edgeBlankField-2] + _ = x[edgeAnonymousStruct-4] + _ = x[edgeCgoExported-8] + _ = x[edgeConstGroup-16] + _ = x[edgeElementType-32] + _ = x[edgeEmbeddedInterface-64] + _ = x[edgeExportedConstant-128] + _ = x[edgeExportedField-256] + _ = x[edgeExportedFunction-512] + _ = x[edgeExportedMethod-1024] + _ = x[edgeExportedType-2048] + _ = x[edgeExportedVariable-4096] + _ = x[edgeExtendsExportedFields-8192] + _ = x[edgeExtendsExportedMethodSet-16384] + _ = x[edgeFieldAccess-32768] + _ = x[edgeFunctionArgument-65536] + _ = x[edgeFunctionResult-131072] + _ = x[edgeFunctionSignature-262144] + _ = x[edgeImplements-524288] + _ = x[edgeInstructionOperand-1048576] + _ = x[edgeInterfaceCall-2097152] + _ = x[edgeInterfaceMethod-4194304] + _ = x[edgeKeyType-8388608] + _ = x[edgeLinkname-16777216] + _ = x[edgeMainFunction-33554432] + _ = x[edgeNamedType-67108864] + _ = x[edgeNetRPCRegister-134217728] + _ = x[edgeNoCopySentinel-268435456] + _ = x[edgeProvidesMethod-536870912] + _ = x[edgeReceiver-1073741824] + _ = x[edgeRuntimeFunction-2147483648] + _ = x[edgeSameObject-4294967296] + _ = x[edgeSignature-8589934592] + _ = x[edgeStructConversion-17179869184] + _ = x[edgeTestSink-34359738368] + _ = x[edgeTupleElement-68719476736] + _ = x[edgeType-137438953472] + _ = x[edgeTypeName-274877906944] + _ = x[edgeUnderlyingType-549755813888] + _ = x[edgePointerType-1099511627776] + _ = x[edgeUnsafeConversion-2199023255552] + _ = x[edgeUsedConstant-4398046511104] + _ = x[edgeVarDecl-8796093022208] +} + +const _edgeKind_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSameObjectedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" + +var _edgeKind_map = map[edgeKind]string{ + 1: _edgeKind_name[0:9], + 2: _edgeKind_name[9:23], + 4: _edgeKind_name[23:42], + 8: _edgeKind_name[42:57], + 16: _edgeKind_name[57:71], + 32: _edgeKind_name[71:86], + 64: _edgeKind_name[86:107], + 128: _edgeKind_name[107:127], + 256: _edgeKind_name[127:144], + 512: _edgeKind_name[144:164], + 1024: _edgeKind_name[164:182], + 2048: _edgeKind_name[182:198], + 4096: _edgeKind_name[198:218], + 8192: _edgeKind_name[218:243], + 16384: _edgeKind_name[243:271], + 32768: _edgeKind_name[271:286], + 65536: _edgeKind_name[286:306], + 131072: _edgeKind_name[306:324], + 262144: _edgeKind_name[324:345], + 524288: _edgeKind_name[345:359], + 1048576: _edgeKind_name[359:381], + 2097152: _edgeKind_name[381:398], + 4194304: _edgeKind_name[398:417], + 8388608: _edgeKind_name[417:428], + 16777216: _edgeKind_name[428:440], + 33554432: _edgeKind_name[440:456], + 67108864: _edgeKind_name[456:469], + 134217728: _edgeKind_name[469:487], + 268435456: _edgeKind_name[487:505], + 536870912: _edgeKind_name[505:523], + 1073741824: _edgeKind_name[523:535], + 2147483648: _edgeKind_name[535:554], + 4294967296: _edgeKind_name[554:568], + 8589934592: _edgeKind_name[568:581], + 17179869184: _edgeKind_name[581:601], + 34359738368: _edgeKind_name[601:613], + 68719476736: _edgeKind_name[613:629], + 137438953472: _edgeKind_name[629:637], + 274877906944: _edgeKind_name[637:649], + 549755813888: _edgeKind_name[649:667], + 1099511627776: _edgeKind_name[667:682], + 2199023255552: _edgeKind_name[682:702], + 4398046511104: _edgeKind_name[702:718], + 8796093022208: _edgeKind_name[718:729], +} + +func (i edgeKind) String() string { + if str, ok := _edgeKind_map[i]; ok { + return str + } + return "edgeKind(" + strconv.FormatInt(int64(i), 10) + ")" +} diff --git a/unused/unused.go b/unused/unused.go index 1c3008153..6f60317bd 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -624,10 +624,10 @@ func (c *Checker) results() []types.Object { } else { c.debugf("n%d [label=%q];\n", node.id, fmt.Sprintf("(%T) %s", node.obj, node.obj)) } - for used, e := range node.used { - for i := edge(1); i < 64; i++ { - if e.is(1 << i) { - c.debugf("n%d -> n%d [label=%q];\n", node.id, used.id, edge(1< n%d [label=%q];\n", node.id, e.node.id, edgeKind(1< Date: Tue, 14 May 2019 04:30:48 +0200 Subject: [PATCH 178/254] unused: don't strive to implement empty interfaces --- unused/unused.go | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 6f60317bd..c3c9ba02a 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -587,7 +587,9 @@ func (c *Checker) results() []types.Object { c.graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: - ifaces = append(ifaces, t) + if t.NumMethods() > 0 { + ifaces = append(ifaces, t) + } default: if _, ok := t.Underlying().(*types.Interface); !ok { notIfaces = append(notIfaces, t) @@ -596,7 +598,11 @@ func (c *Checker) results() []types.Object { }) for pkg := range c.allPackages { - ifaces = append(ifaces, interfacesFromExportData(pkg)...) + for _, iface := range interfacesFromExportData(pkg) { + if iface.NumMethods() > 0 { + ifaces = append(ifaces, iface) + } + } } ctx := &context{ From 82166e4bcb42893ef70aa7f7623eda34b0cab58d Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 14 May 2019 20:37:36 +0200 Subject: [PATCH 179/254] unused: only truly empty interfaces are irrelevant --- unused/testdata/src/embedding/embedding.go | 5 +++++ unused/unused.go | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/unused/testdata/src/embedding/embedding.go b/unused/testdata/src/embedding/embedding.go index 03fb8dd2f..b907e2918 100644 --- a/unused/testdata/src/embedding/embedding.go +++ b/unused/testdata/src/embedding/embedding.go @@ -70,3 +70,8 @@ func (*t10) Foo() {} type t11 struct{ t10 } var _ = t11{} + +type i5 interface{} +type I6 interface { + i5 +} diff --git a/unused/unused.go b/unused/unused.go index c3c9ba02a..83552ecb5 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -921,7 +921,7 @@ func isIrrelevant(obj interface{}) bool { } return true case *types.Interface: - return T.NumMethods() == 0 + return T.NumMethods() == 0 && T.NumEmbeddeds() == 0 default: return false } From c104c7b0a34edb4519fef0f60f84b40079ce49b1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 14 May 2019 20:45:42 +0200 Subject: [PATCH 180/254] unused: don't create pointer types for interfaces --- unused/unused.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/unused/unused.go b/unused/unused.go index 83552ecb5..008187a15 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1484,7 +1484,9 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { ctx.seeAndUse(t, t.Obj(), edgeNamedType) // (2.4) named types use the pointer type - ctx.seeAndUse(types.NewPointer(t), t, edgePointerType) + if _, ok := t.Underlying().(*types.Interface); !ok { + ctx.seeAndUse(types.NewPointer(t), t, edgePointerType) + } for i := 0; i < t.NumMethods(); i++ { ctx.see(t.Method(i)) From 564e228f122b088938c830425f934f8031a40c63 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 14 May 2019 20:45:56 +0200 Subject: [PATCH 181/254] unused: deduplicate empty interface --- go/types/typeutil/identical.go | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/go/types/typeutil/identical.go b/go/types/typeutil/identical.go index 042682b82..c0ca441c3 100644 --- a/go/types/typeutil/identical.go +++ b/go/types/typeutil/identical.go @@ -6,7 +6,7 @@ import ( // Identical reports whether x and y are identical types. // Unlike types.Identical, receivers of Signature types are not ignored. -// Unlike types.Identical, interfaces are compared via pointer equality. +// Unlike types.Identical, interfaces are compared via pointer equality (except for the empty interface, which gets deduplicated). // Unlike types.Identical, structs are compared via pointer equality. func Identical(x, y types.Type) (ret bool) { if !types.Identical(x, y) { @@ -48,6 +48,13 @@ func Identical(x, y types.Type) (ret bool) { // should be impossible return true } + if x.NumEmbeddeds() == 0 && + y.NumEmbeddeds() == 0 && + x.NumMethods() == 0 && + y.NumMethods() == 0 { + // all truly empty interfaces are the same + return true + } return x == y case *types.Signature: y, ok := y.(*types.Signature) From 9e50c21463db82a19b2e5625976b6c41b0cf80d6 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 14 May 2019 20:58:07 +0200 Subject: [PATCH 182/254] unused: don't create pointer types if named type has no methods --- unused/unused.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/unused/unused.go b/unused/unused.go index 008187a15..9606f9185 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1484,7 +1484,7 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { ctx.seeAndUse(t, t.Obj(), edgeNamedType) // (2.4) named types use the pointer type - if _, ok := t.Underlying().(*types.Interface); !ok { + if _, ok := t.Underlying().(*types.Interface); !ok && t.NumMethods() > 0 { ctx.seeAndUse(types.NewPointer(t), t, edgePointerType) } From ec62690e0345599f3a50d3276800ac723ef7fec0 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 14 May 2019 22:40:27 +0200 Subject: [PATCH 183/254] unused: omit more irrelevant types --- unused/unused.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/unused/unused.go b/unused/unused.go index 9606f9185..e71710c15 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -922,6 +922,14 @@ func isIrrelevant(obj interface{}) bool { return true case *types.Interface: return T.NumMethods() == 0 && T.NumEmbeddeds() == 0 + case *types.Pointer: + return isIrrelevant(T.Elem()) + case *types.Map: + return isIrrelevant(T.Key()) && isIrrelevant(T.Elem()) + case *types.Struct: + return T.NumFields() == 0 + case *types.Chan: + return isIrrelevant(T.Elem()) default: return false } From e5bf23c29cf35e9046c6f44f7d6533644fb5ab0b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 15 May 2019 01:07:39 +0200 Subject: [PATCH 184/254] unused: merge identical objects We've represented identical objects as multiple nodes with edges between them. This made the graph more explicit and helped with debugging. However, it wastes memory and isn't useful for anything but debugging. --- unused/edge.go | 1 - unused/edgekind_string.go | 48 +++++++++++++++++++-------------------- unused/unused.go | 20 ++++++++-------- 3 files changed, 33 insertions(+), 36 deletions(-) diff --git a/unused/edge.go b/unused/edge.go index 14edc336c..02e0d09cf 100644 --- a/unused/edge.go +++ b/unused/edge.go @@ -40,7 +40,6 @@ const ( edgeProvidesMethod edgeReceiver edgeRuntimeFunction - edgeSameObject edgeSignature edgeStructConversion edgeTestSink diff --git a/unused/edgekind_string.go b/unused/edgekind_string.go index a01a4ad5a..7629636cf 100644 --- a/unused/edgekind_string.go +++ b/unused/edgekind_string.go @@ -40,21 +40,20 @@ func _() { _ = x[edgeProvidesMethod-536870912] _ = x[edgeReceiver-1073741824] _ = x[edgeRuntimeFunction-2147483648] - _ = x[edgeSameObject-4294967296] - _ = x[edgeSignature-8589934592] - _ = x[edgeStructConversion-17179869184] - _ = x[edgeTestSink-34359738368] - _ = x[edgeTupleElement-68719476736] - _ = x[edgeType-137438953472] - _ = x[edgeTypeName-274877906944] - _ = x[edgeUnderlyingType-549755813888] - _ = x[edgePointerType-1099511627776] - _ = x[edgeUnsafeConversion-2199023255552] - _ = x[edgeUsedConstant-4398046511104] - _ = x[edgeVarDecl-8796093022208] + _ = x[edgeSignature-4294967296] + _ = x[edgeStructConversion-8589934592] + _ = x[edgeTestSink-17179869184] + _ = x[edgeTupleElement-34359738368] + _ = x[edgeType-68719476736] + _ = x[edgeTypeName-137438953472] + _ = x[edgeUnderlyingType-274877906944] + _ = x[edgePointerType-549755813888] + _ = x[edgeUnsafeConversion-1099511627776] + _ = x[edgeUsedConstant-2199023255552] + _ = x[edgeVarDecl-4398046511104] } -const _edgeKind_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSameObjectedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" +const _edgeKind_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" var _edgeKind_map = map[edgeKind]string{ 1: _edgeKind_name[0:9], @@ -89,18 +88,17 @@ var _edgeKind_map = map[edgeKind]string{ 536870912: _edgeKind_name[505:523], 1073741824: _edgeKind_name[523:535], 2147483648: _edgeKind_name[535:554], - 4294967296: _edgeKind_name[554:568], - 8589934592: _edgeKind_name[568:581], - 17179869184: _edgeKind_name[581:601], - 34359738368: _edgeKind_name[601:613], - 68719476736: _edgeKind_name[613:629], - 137438953472: _edgeKind_name[629:637], - 274877906944: _edgeKind_name[637:649], - 549755813888: _edgeKind_name[649:667], - 1099511627776: _edgeKind_name[667:682], - 2199023255552: _edgeKind_name[682:702], - 4398046511104: _edgeKind_name[702:718], - 8796093022208: _edgeKind_name[718:729], + 4294967296: _edgeKind_name[554:567], + 8589934592: _edgeKind_name[567:587], + 17179869184: _edgeKind_name[587:599], + 34359738368: _edgeKind_name[599:615], + 68719476736: _edgeKind_name[615:623], + 137438953472: _edgeKind_name[623:635], + 274877906944: _edgeKind_name[635:653], + 549755813888: _edgeKind_name[653:668], + 1099511627776: _edgeKind_name[668:688], + 2199023255552: _edgeKind_name[688:704], + 4398046511104: _edgeKind_name[704:715], } func (i edgeKind) String() string { diff --git a/unused/unused.go b/unused/unused.go index e71710c15..d1d4a695e 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -831,22 +831,22 @@ func (g *Graph) node(ctx *context, obj interface{}) (node *Node, new bool) { if node, ok := g.Nodes.Load(obj); ok { return node.(*Node), false } - node = g.newNode(ctx, obj) - g.Nodes.Store(obj, node) + if obj, ok := obj.(types.Object); ok { key := objNodeKeyFor(g.fset, obj) if o, ok := g.objNodes.Load(key); ok { onode := o.(*Node) - node.mu.Lock() - onode.mu.Lock() - node.used = append(node.used, edge{node: onode, kind: edgeSameObject}) - onode.used = append(onode.used, edge{node: node, kind: edgeSameObject}) - node.mu.Unlock() - onode.mu.Unlock() - } else { - g.objNodes.Store(key, node) + return onode, false } + + node = g.newNode(ctx, obj) + g.Nodes.Store(obj, node) + g.objNodes.Store(key, node) + return node, true } + + node = g.newNode(ctx, obj) + g.Nodes.Store(obj, node) return node, true } From ad1db131e23ea62304831bae2bba3b54573580d6 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 16 May 2019 04:54:46 +0200 Subject: [PATCH 185/254] lint: Package.Imports doesn't need to be a map --- lint/runner.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 01e20e1a0..3023b692f 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -60,7 +60,7 @@ const sanityCheck = true type Package struct { *packages.Package - Imports map[string]*Package + Imports []*Package initial bool fromSource bool hash string @@ -478,7 +478,6 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy packages.Visit(initialPkgs, nil, func(l *packages.Package) { m[l] = &Package{ Package: l, - Imports: map[string]*Package{}, results: make([]*result, len(r.analyzerIDs.m)), facts: make([]map[types.Object][]analysis.Fact, len(r.analyzerIDs.m)), pkgFacts: make([][]analysis.Fact, len(r.analyzerIDs.m)), @@ -491,8 +490,8 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy for _, err := range l.Errors { m[l].errs = append(m[l].errs, err) } - for k, v := range l.Imports { - m[l].Imports[k] = m[v] + for _, v := range l.Imports { + m[l].Imports = append(m[l].Imports, m[v]) } m[l].hash, err = packageHash(m[l]) From 3aa670edc92a03bfc6d057fcaccb35ae4770b4e8 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 16 May 2019 08:50:54 +0200 Subject: [PATCH 186/254] all: reuse method set cache of SSA --- simple/analysis.go | 2 +- simple/lint.go | 9 ++++++--- staticcheck/lint.go | 18 ++++++++++++------ unused/unused.go | 9 +++++---- 4 files changed, 24 insertions(+), 14 deletions(-) diff --git a/simple/analysis.go b/simple/analysis.go index d2a5959b2..a3dd37f7d 100644 --- a/simple/analysis.go +++ b/simple/analysis.go @@ -168,7 +168,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "S1025", Run: LintRedundantSprintf, Doc: docS1025, - Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, + Requires: []*analysis.Analyzer{buildssa.Analyzer, inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1028": { diff --git a/simple/lint.go b/simple/lint.go index 183adfc9f..b1fcf988c 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -14,7 +14,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" . "honnef.co/go/tools/arg" + "honnef.co/go/tools/internal/passes/buildssa" "honnef.co/go/tools/internal/sharedcheck" "honnef.co/go/tools/lint" . "honnef.co/go/tools/lint/lintdsl" @@ -1470,8 +1472,8 @@ func LintRedundantBreak(pass *analysis.Pass) (interface{}, error) { return nil, nil } -func isStringer(T types.Type) bool { - ms := types.NewMethodSet(T) +func isStringer(T types.Type, msCache *typeutil.MethodSetCache) bool { + ms := msCache.MethodSet(T) sel := ms.Lookup(nil, "String") if sel == nil { return false @@ -1509,7 +1511,8 @@ func LintRedundantSprintf(pass *analysis.Pass) (interface{}, error) { arg := call.Args[Arg("fmt.Sprintf.a[0]")] typ := pass.TypesInfo.TypeOf(arg) - if isStringer(typ) { + ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg + if isStringer(typ, &ssapkg.Prog.MethodSets) { pass.Reportf(call.Pos(), "should use String() instead of fmt.Sprintf") return } diff --git a/staticcheck/lint.go b/staticcheck/lint.go index eac5532b1..24d59cc61 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -35,6 +35,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" ) func validRegexp(call *Call) { @@ -314,6 +315,11 @@ var verbs = [...]verbFlag{ } func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { + var msCache *typeutil.MethodSetCache + if f.Parent() != nil { + msCache = &f.Parent().Prog.MethodSets + } + elem := func(T types.Type, verb rune) ([]types.Type, bool) { if verbs[verb]&noRecurse != 0 { return []types.Type{T}, false @@ -438,7 +444,7 @@ func checkPrintfCallImpl(call *Call, f ssa.Value, args []ssa.Value) { return true } - ms := types.NewMethodSet(T) + ms := msCache.MethodSet(T) if isFormatter(T, ms) { // the value is responsible for formatting itself return true @@ -671,7 +677,7 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { } } // OPT(dh): we could use a method set cache here - ms := types.NewMethodSet(T) + ms := call.Instr.Parent().Prog.MethodSets.MethodSet(T) // TODO(dh): we're not checking the signature, which can cause false negatives. // This isn't a huge problem, however, since vet complains about incorrect signatures. for _, meth := range meths { @@ -686,14 +692,15 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallCheck { // TODO(dh): flag slices and maps of unsupported types return func(call *Call) { + msCache := call.Instr.Parent().Prog.MethodSets + arg := call.Args[argN] T := arg.Value.Value.Type() Ts, ok := Dereference(T).Underlying().(*types.Struct) if !ok { return } - // OPT(dh): we could use a method set cache here - ms := types.NewMethodSet(T) + ms := msCache.MethodSet(T) // TODO(dh): we're not checking the signature, which can cause false negatives. // This isn't a huge problem, however, since vet complains about incorrect signatures. for _, meth := range meths { @@ -709,8 +716,7 @@ func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallChec if reflect.StructTag(field.Tag).Get(tag) == "-" { continue } - // OPT(dh): we could use a method set cache here - ms := types.NewMethodSet(field.Var.Type()) + ms := msCache.MethodSet(field.Var.Type()) // TODO(dh): we're not checking the signature, which can cause false negatives. // This isn't a huge problem, however, since vet complains about incorrect signatures. for _, meth := range meths { diff --git a/unused/unused.go b/unused/unused.go index d1d4a695e..3dfeeeb05 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -612,6 +612,8 @@ func (c *Checker) results() []types.Object { // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages for _, t := range notIfaces { + // OPT(dh): it is unfortunate that we do not have access + // to a populated method set at this point. ms := types.NewMethodSet(t) for _, iface := range ifaces { if sels, ok := c.graph.implements(t, iface, ms); ok { @@ -756,7 +758,6 @@ type context struct { seenFns map[string]struct{} seenTypes *typeutil.Map nodeCounter uint64 - msCache typeutil.MethodSetCache // local cache for the map in Graph typeNodes typeutil.Map @@ -1322,7 +1323,7 @@ func (g *Graph) entry(pkg *pkg) { // (8.0) handle interfaces for _, t := range notIfaces { - ms := ctx.msCache.MethodSet(t) + ms := pkg.SSA.Prog.MethodSets.MethodSet(t) for _, iface := range ifaces { if sels, ok := g.implements(t, iface, ms); ok { for _, sel := range sels { @@ -1442,7 +1443,7 @@ func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { // the pointer type to get the full method set T = types.NewPointer(T) } - ms := ctx.msCache.MethodSet(T) + ms := ctx.pkg.SSA.Prog.MethodSets.MethodSet(T) for j := 0; j < ms.Len(); j++ { if ms.At(j).Obj().Exported() { // (6.4) structs use embedded fields that have exported methods (recursively) @@ -1667,7 +1668,7 @@ func (g *Graph) instructions(ctx *context, fn *ssa.Function) { walkPhi(arg, func(v ssa.Value) { if v, ok := v.(*ssa.MakeInterface); ok { walkPhi(v.X, func(vv ssa.Value) { - ms := ctx.msCache.MethodSet(vv.Type()) + ms := ctx.pkg.SSA.Prog.MethodSets.MethodSet(vv.Type()) for i := 0; i < ms.Len(); i++ { if ms.At(i).Obj().Exported() { g.useMethod(ctx, vv.Type(), ms.At(i), fnObj, edgeNetRPCRegister) From 1dc4b547d7f9ef247c11686bf0d20c45867b52ad Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 17 May 2019 00:00:19 +0200 Subject: [PATCH 187/254] unused: compute more efficient key for object deduplication --- unused/unused.go | 47 +++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index 3dfeeeb05..b3cb167ec 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -712,15 +712,51 @@ func (c *Checker) processPkg(graph *Graph, pkg *pkg) { } func objNodeKeyFor(fset *token.FileSet, obj types.Object) objNodeKey { + var kind objType + switch obj.(type) { + case *types.PkgName: + kind = otPkgName + case *types.Const: + kind = otConst + case *types.TypeName: + kind = otTypeName + case *types.Var: + kind = otVar + case *types.Func: + kind = otFunc + case *types.Label: + kind = otLabel + case *types.Builtin: + kind = otBuiltin + case *types.Nil: + kind = otNil + default: + panic(fmt.Sprintf("unreachable: %T", obj)) + } + position := fset.PositionFor(obj.Pos(), false) position.Column = 0 position.Offset = 0 return objNodeKey{ position: position, - str: fmt.Sprint(obj), + kind: kind, + name: obj.Name(), } } +type objType uint8 + +const ( + otPkgName objType = iota + otConst + otTypeName + otVar + otFunc + otLabel + otBuiltin + otNil +) + // An objNodeKey describes a types.Object node in the graph. // // Due to test variants we may end up with multiple instances of the @@ -728,9 +764,16 @@ func objNodeKeyFor(fset *token.FileSet, obj types.Object) objNodeKey { // source position. And because export data lacks column information, // we also have to incorporate the object's string representation in // the key. +// +// Previously we used the object's full string representation +// (types.ObjectString), but that causes a significant amount of +// allocations. Currently we're using the object's type and name, in +// the hope that it is impossible for two objects to have the same +// type, name and file position. type objNodeKey struct { position token.Position - str string + kind objType + name string } type Graph struct { From 8885ed769131661592af0461cab15213e5a79ac9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 18 May 2019 08:12:47 +0200 Subject: [PATCH 188/254] lint: code cleanups --- lint/runner.go | 47 +++++++++++++---------------------------------- 1 file changed, 13 insertions(+), 34 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 3023b692f..e38a602b4 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -67,7 +67,8 @@ type Package struct { done chan struct{} resultsMu sync.Mutex - results []*result + // results maps analyzer IDs to analyzer results + results []*result cfg *config.Config gen map[string]bool @@ -115,17 +116,12 @@ type Fact struct { Fact analysis.Fact } -type newFact struct { - obj types.Object - fact analysis.Fact -} - type analysisAction struct { - analyzer *analysis.Analyzer - analyzerID int - pkg *Package - newFacts []newFact - problems []Problem + analyzer *analysis.Analyzer + analyzerID int + pkg *Package + newPackageFacts []analysis.Fact + problems []Problem pkgFacts map[*types.Package][]analysis.Fact } @@ -194,7 +190,7 @@ func (ac *analysisAction) exportPackageFact(fact analysis.Fact) { panic("analysis doesn't export any facts") } ac.pkgFacts[ac.pkg.Types] = append(ac.pkgFacts[ac.pkg.Types], fact) - ac.newFacts = append(ac.newFacts, newFact{nil, fact}) + ac.newPackageFacts = append(ac.newPackageFacts, fact) } func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) { @@ -226,11 +222,6 @@ func (r *Runner) runAnalysis(ac *analysisAction) (ret interface{}, err error) { close(res.ready) }() - // Package may be a dependency or a package the user requested - // Facts for a dependency may be cached or not - // Diagnostics for a user package may be cached or not (not yet) - // When we have to analyze a package, we have to analyze it with all dependencies. - pass := new(analysis.Pass) *pass = analysis.Pass{ Analyzer: ac.analyzer, @@ -373,24 +364,12 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter } if len(ac.analyzer.FactTypes) > 0 { - // Merge new facts into the package. - for _, fact := range ac.newFacts { - if fact.obj == nil { - id := r.analyzerIDs.get(ac.analyzer) - ac.pkg.pkgFacts[id] = append(ac.pkg.pkgFacts[id], fact.fact) - } else { - panic("unexpected new object fact") - } - } - - // Persist facts to cache + // Merge new facts into the package and persist them. var facts []Fact - for _, fact := range ac.newFacts { - if fact.obj == nil { - facts = append(facts, Fact{"", fact.fact}) - } else { - panic("unexpected object fact") - } + for _, fact := range ac.newPackageFacts { + id := r.analyzerIDs.get(ac.analyzer) + ac.pkg.pkgFacts[id] = append(ac.pkg.pkgFacts[id], fact) + facts = append(facts, Fact{"", fact}) } for obj, afacts := range ac.pkg.facts[ac.analyzerID] { if obj.Pkg() != ac.pkg.Package.Types { From 42be3572544ceaefd08a21ca9cf0fbf46051f801 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 18 May 2019 08:23:21 +0200 Subject: [PATCH 189/254] lint: add docs for some functions --- lint/runner.go | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/lint/runner.go b/lint/runner.go index e38a602b4..df1f7ef1f 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -328,6 +328,9 @@ func (r *Runner) makeAnalysisAction(a *analysis.Analyzer, pkg *Package) *analysi return ac } +// analyzes that we always want to run, even if they're not being run +// explicitly or as dependencies. these are necessary for the inner +// workings of the runner. var injectedAnalyses = []*analysis.Analyzer{facts.Generated, config.Analyzer} func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (interface{}, error) { @@ -413,6 +416,13 @@ func NewRunner(stats *Stats) (*Runner, error) { }, nil } +// Run loads packages corresponding to patterns and analyses them with +// analyzers. It returns the loaded packages, which contain reported +// diagnostics as well as extracted ignore directives. +// +// Note that diagnostics have not been filtered at this point yet, to +// accomodate cumulative analyzes that require additional steps to +// produce diagnostics. func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer) ([]*Package, error) { r.analyzerIDs = analyzerIDs{m: map[*analysis.Analyzer]int{}} id := 0 @@ -666,6 +676,9 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { <-imp.done if len(imp.errs) > 0 { if imp.initial { + // Don't print the error of the dependency since it's + // an initial package and we're already printing the + // error. pkg.errs = append(pkg.errs, fmt.Errorf("could not analyze dependency %s of %s", imp, pkg)) } else { var s string @@ -774,6 +787,9 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { // from processPkg. } +// hasFacts reports whether an analysis exports any facts. An analysis +// that has a transitive dependency that exports facts is considered +// to be exporting facts. func (r *Runner) hasFacts(a *analysis.Analyzer) bool { ret := false seen := make([]bool, len(r.analyzerIDs.m)) @@ -806,6 +822,8 @@ func parseDirective(s string) (cmd string, args []string) { return fields[0], fields[1:] } +// parseDirectives extracts all linter directives from the source +// files of the package. Malformed directives are returned as problems. func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) { var ignores []Ignore var problems []Problem @@ -835,7 +853,6 @@ func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) { switch cmd { case "ignore", "file-ignore": if len(args) < 2 { - // FIXME(dh): this causes duplicated warnings when using megacheck p := Problem{ Pos: DisplayPosition(pkg.Fset, c.Pos()), Message: "malformed linter directive; missing the required reason field?", @@ -875,6 +892,9 @@ func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) { return ignores, problems } +// packageHash computes a package's hash. The hash is based on all Go +// files that make up the package, as well as the hashes of imported +// packages. func packageHash(pkg *Package) (string, error) { key := cache.NewHash("package hash") fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) @@ -903,6 +923,7 @@ func packageHash(pkg *Package) (string, error) { return hex.EncodeToString(h[:]), nil } +// passActionID computes an ActionID for an analysis pass. func passActionID(pkg *Package, analyzer *analysis.Analyzer) (cache.ActionID, error) { key := cache.NewHash("action ID") fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) From e5fb910e30106cf707011b306912a525a936eedf Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 18 May 2019 09:13:37 +0200 Subject: [PATCH 190/254] lint: only inject our internal analyzers once per package --- lint/runner.go | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index df1f7ef1f..9d23c3bdc 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -126,6 +126,10 @@ type analysisAction struct { pkgFacts map[*types.Package][]analysis.Fact } +func (ac *analysisAction) String() string { + return fmt.Sprintf("%s @ %s", ac.analyzer, ac.pkg) +} + func (ac *analysisAction) allObjectFacts() []analysis.ObjectFact { out := make([]analysis.ObjectFact, 0, len(ac.pkg.facts[ac.analyzerID])) for obj, facts := range ac.pkg.facts[ac.analyzerID] { @@ -340,16 +344,7 @@ func (r *Runner) runAnalysisUser(pass *analysis.Pass, ac *analysisAction) (inter // User-provided package, analyse it // First analyze it with dependencies - var req []*analysis.Analyzer - req = append(req, ac.analyzer.Requires...) - if pass.Analyzer != facts.Generated && pass.Analyzer != config.Analyzer { - // Ensure all packages have the generated map and config. This is - // required by interna of the runner. Analyses that themselves - // make use of either have an explicit dependency so that other - // runners work correctly, too. - req = append(req, injectedAnalyses...) - } - for _, req := range req { + for _, req := range ac.analyzer.Requires { acReq := r.makeAnalysisAction(req, ac.pkg) ret, err := r.runAnalysis(acReq) if err != nil { @@ -668,6 +663,12 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { close(pkg.done) }() + // Ensure all packages have the generated map and config. This is + // required by interna of the runner. Analyses that themselves + // make use of either have an explicit dependency so that other + // runners work correctly, too. + analyzers = append(analyzers[0:len(analyzers):len(analyzers)], injectedAnalyses...) + if len(pkg.errs) != 0 { return } @@ -774,10 +775,15 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { for _, ac := range acs { pkg.problems = append(pkg.problems, ac.problems...) } - if pkg.results[r.analyzerIDs.get(config.Analyzer)].v != nil { - pkg.cfg = pkg.results[r.analyzerIDs.get(config.Analyzer)].v.(*config.Config) + + if pkg.initial { + // Only initial packages have these analyzers run, and only + // initial packages need these. + if pkg.results[r.analyzerIDs.get(config.Analyzer)].v != nil { + pkg.cfg = pkg.results[r.analyzerIDs.get(config.Analyzer)].v.(*config.Config) + } + pkg.gen = pkg.results[r.analyzerIDs.get(facts.Generated)].v.(map[string]bool) } - pkg.gen = pkg.results[r.analyzerIDs.get(facts.Generated)].v.(map[string]bool) // In a previous version of the code, we would throw away all type // information and reload it from export data. That was From 615c8f1e501f049000fe232e8ce38639a4cafcec Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 18 May 2019 11:34:57 +0200 Subject: [PATCH 191/254] lint: delete unused type StringSliceVar --- lint/lint.go | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 681bcc329..3b18aad8a 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -386,18 +386,3 @@ func writePackage(buf *bytes.Buffer, pkg *types.Package) { buf.WriteByte('.') } } - -type StringSliceVar []string - -func (v StringSliceVar) String() string { - return strings.Join(v, ",") -} - -func (v *StringSliceVar) Set(s string) error { - *v = StringSliceVar(strings.Split(s, ",")) - return nil -} - -func (v *StringSliceVar) Get() interface{} { - return []string(*v) -} From eede67cb7576dd20f4739f9b73c43e99abd91c70 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 18 May 2019 11:37:51 +0200 Subject: [PATCH 192/254] lint: simplify slice copy --- lint/runner.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/lint/runner.go b/lint/runner.go index 9d23c3bdc..d826dc2f3 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -911,10 +911,9 @@ func packageHash(pkg *Package) (string, error) { } fmt.Fprintf(key, "file %s %x\n", f, h) } - imps := make([]*Package, 0, len(pkg.Imports)) - for _, v := range pkg.Imports { - imps = append(imps, v) - } + + imps := make([]*Package, len(pkg.Imports)) + copy(imps, pkg.Imports) sort.Slice(imps, func(i, j int) bool { return imps[i].PkgPath < imps[j].PkgPath }) From 0ded025fc598722149bb79f3b9951251ab3cec94 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 18 May 2019 11:38:22 +0200 Subject: [PATCH 193/254] unused: delete unused type seenKey --- unused/unused.go | 5 ----- 1 file changed, 5 deletions(-) diff --git a/unused/unused.go b/unused/unused.go index b3cb167ec..6af85a77f 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -423,11 +423,6 @@ type pkg struct { SrcFuncs []*ssa.Function } -type seenKey struct { - s string - pos token.Position -} - type Checker struct { WholeProgram bool Debug io.Writer From 313ae6da0100419c55568eef8f5a75fb4ead1980 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 03:35:15 +0200 Subject: [PATCH 194/254] lint/lintutil: make sure we exit even if no errors were found --- lint/lintutil/util.go | 1 + 1 file changed, 1 insertion(+) diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 165815695..bbcdea68d 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -259,6 +259,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * if errors > 0 { exit(1) } + exit(0) } type Options struct { From 020dd3d6d5370db51ce320746b152a23d0777fed Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 04:46:48 +0200 Subject: [PATCH 195/254] staticcheck: don't copy MethodSet Copying MethodSet copies a mutex, causing a race condition. --- staticcheck/lint.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 24d59cc61..c8da6c842 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -692,7 +692,7 @@ func checkNoopMarshalImpl(argN int, meths ...string) CallCheck { func checkUnsupportedMarshalImpl(argN int, tag string, meths ...string) CallCheck { // TODO(dh): flag slices and maps of unsupported types return func(call *Call) { - msCache := call.Instr.Parent().Prog.MethodSets + msCache := &call.Instr.Parent().Prog.MethodSets arg := call.Args[argN] T := arg.Value.Value.Type() From 61b943c6ee2fae3152803ed726b54a5b1f896685 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 11:49:25 +0200 Subject: [PATCH 196/254] staticcheck: simplify/optimize some instances of AST walking --- staticcheck/lint.go | 88 ++++++++++++++++++++++++--------------------- 1 file changed, 48 insertions(+), 40 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index c8da6c842..a50297dcb 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -940,6 +940,7 @@ func CheckDeferInInfiniteLoop(pass *analysis.Pass) (interface{}, error) { switch stmt := node.(type) { case *ast.ReturnStmt: mightExit = true + return false case *ast.BranchStmt: // TODO(dominikh): if this sees a break in a switch or // select, it doesn't check if it breaks the loop or @@ -947,6 +948,7 @@ func CheckDeferInInfiniteLoop(pass *analysis.Pass) (interface{}, error) { // negatives. if stmt.Tok == token.BREAK { mightExit = true + return false } case *ast.DeferStmt: defers = append(defers, stmt) @@ -993,19 +995,43 @@ func CheckDubiousDeferInChannelRangeLoop(pass *analysis.Pass) (interface{}, erro } func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) { - fn := func(node ast.Node) { - if !isTestMain(pass, node) { - return + var ( + fnmain ast.Node + callsExit bool + callsRun bool + arg types.Object + ) + fn := func(node ast.Node, push bool) bool { + if !push { + if fnmain != nil && node == fnmain { + if !callsExit && callsRun { + pass.Reportf(fnmain.Pos(), "TestMain should call os.Exit to set exit code") + } + fnmain = nil + callsExit = false + callsRun = false + arg = nil + } + return true } - arg := pass.TypesInfo.ObjectOf(node.(*ast.FuncDecl).Type.Params.List[0].Names[0]) - callsRun := false - fn2 := func(node ast.Node) bool { - call, ok := node.(*ast.CallExpr) - if !ok { + switch node := node.(type) { + case *ast.FuncDecl: + if fnmain != nil { return true } - sel, ok := call.Fun.(*ast.SelectorExpr) + if !isTestMain(pass, node) { + return false + } + fnmain = node + arg = pass.TypesInfo.ObjectOf(node.Type.Params.List[0].Names[0]) + return true + case *ast.CallExpr: + if IsCallToAST(pass, node, "os.Exit") { + callsExit = true + return false + } + sel, ok := node.Fun.(*ast.SelectorExpr) if !ok { return true } @@ -1021,31 +1047,16 @@ func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) { return false } return true - } - ast.Inspect(node.(*ast.FuncDecl).Body, fn2) - - callsExit := false - fn3 := func(node ast.Node) bool { - if IsCallToAST(pass, node, "os.Exit") { - callsExit = true - return false - } + default: + // unreachable return true } - ast.Inspect(node.(*ast.FuncDecl).Body, fn3) - if !callsExit && callsRun { - pass.Reportf(node.Pos(), "TestMain should call os.Exit to set exit code") - } } - pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(nil, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.FuncDecl)(nil), (*ast.CallExpr)(nil)}, fn) return nil, nil } -func isTestMain(pass *analysis.Pass, node ast.Node) bool { - decl, ok := node.(*ast.FuncDecl) - if !ok { - return false - } +func isTestMain(pass *analysis.Pass, decl *ast.FuncDecl) bool { if decl.Name.Name != "TestMain" { return false } @@ -2668,20 +2679,17 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { for _, imp := range pass.Pkg.Imports() { imps[imp.Path()] = imp } - for _, f := range pass.Files { - ast.Inspect(f, func(node ast.Node) bool { - if node, ok := node.(*ast.ImportSpec); ok { - p := node.Path.Value - path := p[1 : len(p)-1] - imp := imps[path] - if depr, ok := deprs.Packages[imp]; ok { - pass.Reportf(node.Pos(), "Package %s is deprecated: %s", path, depr.Msg) - } - } - return true - }) + fn2 := func(node ast.Node) { + spec := node.(*ast.ImportSpec) + p := spec.Path.Value + path := p[1 : len(p)-1] + imp := imps[path] + if depr, ok := deprs.Packages[imp]; ok { + pass.Reportf(spec.Pos(), "Package %s is deprecated: %s", path, depr.Msg) + } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes(nil, fn) + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ImportSpec)(nil)}, fn2) return nil, nil } From c4bd12e9b14878ab0f44790cc27070c4a8691d26 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 12:07:03 +0200 Subject: [PATCH 197/254] all: ignore various minor issues so that staticcheck passes Primarily we're ignoring uses of ssautil.CreateProgram in tests of packages we forked. --- callgraph/cha/cha_test.go | 4 +++- callgraph/rta/rta_test.go | 4 +++- callgraph/static/static_test.go | 4 +++- cmd/keyify/keyify.go | 1 + internal/cache/cache.go | 1 + ssa/builder_test.go | 2 ++ ssa/source_test.go | 2 ++ ssa/stdlib_test.go | 2 ++ ssa/testmain_test.go | 2 ++ 9 files changed, 19 insertions(+), 3 deletions(-) diff --git a/callgraph/cha/cha_test.go b/callgraph/cha/cha_test.go index 54da13640..f210938b3 100644 --- a/callgraph/cha/cha_test.go +++ b/callgraph/cha/cha_test.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//lint:file-ignore SA1019 go/callgraph's test suite is built around the deprecated go/loader. We'll leave fixing that to upstream. + // No testdata on Android. // +build !android @@ -20,9 +22,9 @@ import ( "strings" "testing" + "golang.org/x/tools/go/loader" "honnef.co/go/tools/callgraph" "honnef.co/go/tools/callgraph/cha" - "golang.org/x/tools/go/loader" "honnef.co/go/tools/ssa/ssautil" ) diff --git a/callgraph/rta/rta_test.go b/callgraph/rta/rta_test.go index d2857b5a4..a77dfdb4e 100644 --- a/callgraph/rta/rta_test.go +++ b/callgraph/rta/rta_test.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//lint:file-ignore SA1019 go/callgraph's test suite is built around the deprecated go/loader. We'll leave fixing that to upstream. + // No testdata on Android. // +build !android @@ -20,9 +22,9 @@ import ( "strings" "testing" + "golang.org/x/tools/go/loader" "honnef.co/go/tools/callgraph" "honnef.co/go/tools/callgraph/rta" - "golang.org/x/tools/go/loader" "honnef.co/go/tools/ssa" "honnef.co/go/tools/ssa/ssautil" ) diff --git a/callgraph/static/static_test.go b/callgraph/static/static_test.go index 200b047ae..e3f77b09f 100644 --- a/callgraph/static/static_test.go +++ b/callgraph/static/static_test.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//lint:file-ignore SA1019 go/callgraph's test suite is built around the deprecated go/loader. We'll leave fixing that to upstream. + package static_test import ( @@ -11,9 +13,9 @@ import ( "sort" "testing" + "golang.org/x/tools/go/loader" "honnef.co/go/tools/callgraph" "honnef.co/go/tools/callgraph/static" - "golang.org/x/tools/go/loader" "honnef.co/go/tools/ssa/ssautil" ) diff --git a/cmd/keyify/keyify.go b/cmd/keyify/keyify.go index a552d6f54..c4a56e1ea 100644 --- a/cmd/keyify/keyify.go +++ b/cmd/keyify/keyify.go @@ -191,6 +191,7 @@ func keyify( // what were we intending to do here? var lines int numLines += lines + //lint:ignore SA4006 See FIXME above. val, lines = keyify(pkg, val2) } } diff --git a/internal/cache/cache.go b/internal/cache/cache.go index 508877ce7..2b33ca106 100644 --- a/internal/cache/cache.go +++ b/internal/cache/cache.go @@ -155,6 +155,7 @@ func (c *Cache) get(id ActionID) (Entry, error) { eid, entry := entry[3:3+hexSize], entry[3+hexSize:] eout, entry := entry[1:1+hexSize], entry[1+hexSize:] esize, entry := entry[1:1+20], entry[1+20:] + //lint:ignore SA4006 See https://github.com/dominikh/go-tools/issues/465 etime, entry := entry[1:1+20], entry[1+20:] var buf [HashSize]byte if _, err := hex.Decode(buf[:], eid); err != nil || buf != id { diff --git a/ssa/builder_test.go b/ssa/builder_test.go index a824d4e2b..f3564ec6c 100644 --- a/ssa/builder_test.go +++ b/ssa/builder_test.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//lint:file-ignore SA1019 go/ssa's test suite is built around the deprecated go/loader. We'll leave fixing that to upstream. + package ssa_test import ( diff --git a/ssa/source_test.go b/ssa/source_test.go index 512be2322..7f2f9e3e3 100644 --- a/ssa/source_test.go +++ b/ssa/source_test.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//lint:file-ignore SA1019 go/ssa's test suite is built around the deprecated go/loader. We'll leave fixing that to upstream. + package ssa_test // This file defines tests of source-level debugging utilities. diff --git a/ssa/stdlib_test.go b/ssa/stdlib_test.go index 4850a3135..a4874abfa 100644 --- a/ssa/stdlib_test.go +++ b/ssa/stdlib_test.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//lint:file-ignore SA1019 go/ssa's test suite is built around the deprecated go/loader. We'll leave fixing that to upstream. + // Incomplete source tree on Android. // +build !android diff --git a/ssa/testmain_test.go b/ssa/testmain_test.go index d2b2f6bd7..e05109768 100644 --- a/ssa/testmain_test.go +++ b/ssa/testmain_test.go @@ -2,6 +2,8 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. +//lint:file-ignore SA1019 go/ssa's test suite is built around the deprecated go/loader. We'll leave fixing that to upstream. + package ssa_test // Tests of FindTests. CreateTestMainPackage is tested via the interpreter. From 93a008eb089f6d57e151cfe3530d3888e6108dcb Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 13:14:18 +0200 Subject: [PATCH 198/254] lint: pass "go" version flag through to analyzers --- lint/lint.go | 1 + lint/runner.go | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/lint/lint.go b/lint/lint.go index 3b18aad8a..f53d6d946 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -116,6 +116,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error if err != nil { return nil, err } + r.goVersion = l.GoVersion pkgs, err := r.Run(cfg, patterns, analyzers) if err != nil { diff --git a/lint/runner.go b/lint/runner.go index d826dc2f3..0b22eff30 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -96,7 +96,8 @@ type Runner struct { // limits parallelism of loading packages loadSem chan struct{} - stats *Stats + goVersion int + stats *Stats } type analyzerIDs struct { @@ -438,6 +439,9 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy } } for _, a := range analyzers { + if v := a.Flags.Lookup("go"); v != nil { + v.Value.Set(fmt.Sprintf("1.%d", r.goVersion)) + } dfs(a) } for _, a := range injectedAnalyses { From 49bbb7a7483c8d355e7619531dc01b4b3f4bd8e1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 13:15:06 +0200 Subject: [PATCH 199/254] go/types/typeutil: don't use importer.ForCompiler yet ForCompiler was added in Go 1.12, but we want to support Go 1.11, too. --- go/types/typeutil/callee_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go/types/typeutil/callee_test.go b/go/types/typeutil/callee_test.go index 2201eee71..6875d699f 100644 --- a/go/types/typeutil/callee_test.go +++ b/go/types/typeutil/callee_test.go @@ -63,7 +63,7 @@ func noncalls() { Uses: make(map[*ast.Ident]types.Object), Selections: make(map[*ast.SelectorExpr]*types.Selection), } - cfg := &types.Config{Importer: importer.ForCompiler(fset, "source", nil)} + cfg := &types.Config{Importer: importer.For("source", nil)} if _, err := cfg.Check("p", fset, []*ast.File{f}, info); err != nil { t.Fatal(err) } From 6be3e97684639f8c47ab9534dea055c57481cf6a Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 13:41:45 +0200 Subject: [PATCH 200/254] config: try harder at finding the configuration In Go <1.13, generated files didn't happen to be at the front of the list of files. In Go 1.13 they do, so we have to stop being lazy. Find the first file that looks like it isn't in the build cache. --- config/config.go | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/config/config.go b/config/config.go index 3ffbcb2dc..690a2f4d7 100644 --- a/config/config.go +++ b/config/config.go @@ -5,6 +5,7 @@ import ( "os" "path/filepath" "reflect" + "strings" "github.com/BurntSushi/toml" "golang.org/x/tools/go/analysis" @@ -18,8 +19,29 @@ var Analyzer = &analysis.Analyzer{ cfg := DefaultConfig return &cfg, nil } - // FIXME(dh): this may yield the wrong path for generated files in the build cache - path := pass.Fset.PositionFor(pass.Files[0].Pos(), true).Filename + cache, err := os.UserCacheDir() + if err != nil { + cache = "" + } + var path string + for _, f := range pass.Files { + p := pass.Fset.PositionFor(f.Pos(), true).Filename + // FIXME(dh): using strings.HasPrefix isn't technically + // correct, but it should be good enough for now. + if cache != "" && strings.HasPrefix(p, cache) { + // File in the build cache of the standard Go build system + continue + } + path = p + break + } + + if path == "" { + // The package only consists of generated files. + cfg := DefaultConfig + return &cfg, nil + } + dir := filepath.Dir(path) cfg, err := Load(dir) if err != nil { From 2d84c8307c19de0d28a2d4b59d9f3b456a8c40f0 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 14:00:15 +0200 Subject: [PATCH 201/254] staticcheck: update CheckDeprecated tests due to stdlib wording changes --- .../testdata/src/CheckDeprecated_go14/CheckDeprecated.go | 2 +- .../testdata/src/CheckDeprecated_go18/CheckDeprecated.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go index 1e8c272ab..21aa784e2 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go14/CheckDeprecated.go @@ -12,7 +12,7 @@ var _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` func fn1(err error) { var r *http.Request - _ = r.Cancel // want `Use the Context and WithContext methods` + _ = r.Cancel // want `If a Request's Cancel field and context are both` _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` _ = os.SEEK_SET if err == http.ErrWriteAfterFlush { // want `ErrWriteAfterFlush is no longer` diff --git a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go index 470d46bad..e67a9b1f7 100644 --- a/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go +++ b/staticcheck/testdata/src/CheckDeprecated_go18/CheckDeprecated.go @@ -12,7 +12,7 @@ var _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` func fn1(err error) { var r *http.Request - _ = r.Cancel // want `Use the Context and WithContext methods` + _ = r.Cancel // want `If a Request's Cancel field and context are both` _ = syscall.StringByteSlice("") // want `Use ByteSliceFromString instead` _ = os.SEEK_SET // want `Use io\.SeekStart, io\.SeekCurrent, and io\.SeekEnd` if err == http.ErrWriteAfterFlush { // want `ErrWriteAfterFlush is no longer` From 0219a42ae052b8611c50a4c56854b941c94fbc61 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 14:49:44 +0200 Subject: [PATCH 202/254] staticcheck: only use SIGSTOP in test on platforms that have it --- .../CheckUntrappableSignal.go | 3 --- .../CheckUntrappableSignal_unix.go | 16 ++++++++++++++++ 2 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal_unix.go diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go index 3e0a64dfe..d2e8d7ada 100644 --- a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go +++ b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal.go @@ -16,7 +16,4 @@ func fn() { signal.Ignore(syscall.SIGKILL) // want `cannot be trapped` signal.Notify(c, syscall.SIGKILL) // want `cannot be trapped` signal.Reset(syscall.SIGKILL) // want `cannot be trapped` - signal.Ignore(syscall.SIGSTOP) // want `cannot be trapped` - signal.Notify(c, syscall.SIGSTOP) // want `cannot be trapped` - signal.Reset(syscall.SIGSTOP) // want `cannot be trapped` } diff --git a/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal_unix.go b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal_unix.go new file mode 100644 index 000000000..faef5e1c3 --- /dev/null +++ b/staticcheck/testdata/src/CheckUntrappableSignal/CheckUntrappableSignal_unix.go @@ -0,0 +1,16 @@ +// +build android darwin dragonfly freebsd linux netbsd openbsd solaris + +package main + +import ( + "os" + "os/signal" + "syscall" +) + +func fn2() { + c := make(chan os.Signal, 1) + signal.Ignore(syscall.SIGSTOP) // want `cannot be trapped` + signal.Notify(c, syscall.SIGSTOP) // want `cannot be trapped` + signal.Reset(syscall.SIGSTOP) // want `cannot be trapped` +} From 68fd1a1c6d03f3756075d864d5529ae754625d56 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 20 May 2019 05:49:52 +0200 Subject: [PATCH 203/254] all: make sure files are gofmted --- callgraph/cha/cha.go | 2 +- callgraph/rta/rta.go | 2 +- unused/testdata/src/tests/tests.go | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/callgraph/cha/cha.go b/callgraph/cha/cha.go index 2a96b9089..5fb1b868e 100644 --- a/callgraph/cha/cha.go +++ b/callgraph/cha/cha.go @@ -26,10 +26,10 @@ package cha // import "honnef.co/go/tools/callgraph/cha" import ( "go/types" + "golang.org/x/tools/go/types/typeutil" "honnef.co/go/tools/callgraph" "honnef.co/go/tools/ssa" "honnef.co/go/tools/ssa/ssautil" - "golang.org/x/tools/go/types/typeutil" ) // CallGraph computes the call graph of the specified program using the diff --git a/callgraph/rta/rta.go b/callgraph/rta/rta.go index eb7262922..7d7403813 100644 --- a/callgraph/rta/rta.go +++ b/callgraph/rta/rta.go @@ -50,9 +50,9 @@ import ( "fmt" "go/types" + "golang.org/x/tools/go/types/typeutil" "honnef.co/go/tools/callgraph" "honnef.co/go/tools/ssa" - "golang.org/x/tools/go/types/typeutil" ) // A Result holds the results of Rapid Type Analysis, which includes the diff --git a/unused/testdata/src/tests/tests.go b/unused/testdata/src/tests/tests.go index 253de73aa..ca2d5b3cd 100644 --- a/unused/testdata/src/tests/tests.go +++ b/unused/testdata/src/tests/tests.go @@ -1,3 +1,3 @@ package pkg -func fn(){} +func fn() {} From d36f521c5329f8706eb916689bc156af2ad7dbe4 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 21 May 2019 08:35:02 +0200 Subject: [PATCH 204/254] staticcheck: simplify CheckWaitgroupAdd --- staticcheck/lint.go | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index a50297dcb..83834ef7e 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -863,20 +863,8 @@ func CheckWaitgroupAdd(pass *analysis.Pass) (interface{}, error) { if !ok { return } - call, ok := stmt.X.(*ast.CallExpr) - if !ok { - return - } - sel, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - return - } - fn, ok := pass.TypesInfo.ObjectOf(sel.Sel).(*types.Func) - if !ok { - return - } - if lint.FuncName(fn) == "(*sync.WaitGroup).Add" { - pass.Reportf(sel.Pos(), "should call %s before starting the goroutine to avoid a race", + if IsCallToAST(pass, stmt.X, "(*sync.WaitGroup).Add") { + pass.Reportf(stmt.Pos(), "should call %s before starting the goroutine to avoid a race", Render(pass, stmt)) } } From 7ccbd68c83c5ad5138c6f501f5275cc418d923f2 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Tue, 21 May 2019 09:02:38 +0200 Subject: [PATCH 205/254] all: make use of subtests and parallel tests, move shared code into helper package --- lint/testutil/util.go | 35 ++++++++ simple/lint_test.go | 96 ++++++++------------- staticcheck/lint_test.go | 174 ++++++++++++++++----------------------- stylecheck/lint_test.go | 51 ++++-------- 4 files changed, 156 insertions(+), 200 deletions(-) create mode 100644 lint/testutil/util.go diff --git a/lint/testutil/util.go b/lint/testutil/util.go new file mode 100644 index 000000000..3acc1086f --- /dev/null +++ b/lint/testutil/util.go @@ -0,0 +1,35 @@ +package testutil + +import ( + "testing" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/analysistest" +) + +type Test struct { + Dir string + Version string +} + +type Analyzer struct { + Analyzer *analysis.Analyzer + Tests []Test +} + +func Run(t *testing.T, analyzers []Analyzer) { + for _, a := range analyzers { + a := a + t.Run(a.Analyzer.Name, func(t *testing.T) { + t.Parallel() + for _, test := range a.Tests { + if test.Version != "" { + if err := a.Analyzer.Flags.Lookup("go").Value.Set(test.Version); err != nil { + t.Fatal(err) + } + } + analysistest.Run(t, analysistest.TestData(), a.Analyzer, test.Dir) + } + }) + } +} diff --git a/simple/lint_test.go b/simple/lint_test.go index f8fa32e5b..4bbd3208c 100644 --- a/simple/lint_test.go +++ b/simple/lint_test.go @@ -3,71 +3,41 @@ package simple import ( "testing" - "golang.org/x/tools/go/analysis/analysistest" + "honnef.co/go/tools/lint/testutil" ) func TestAll(t *testing.T) { - checks := map[string][]struct { - dir string - version string - }{ - "S1000": {{dir: "single-case-select"}}, - "S1001": {{dir: "copy"}}, - "S1002": {{dir: "bool-cmp"}}, - "S1003": {{dir: "contains"}}, - "S1004": {{dir: "compare"}}, - "S1005": { - {dir: "LintBlankOK"}, - {dir: "receive-blank"}, - {dir: "range_go13", version: "1.3"}, - {dir: "range_go14", version: "1.4"}, - }, - "S1006": { - {dir: "for-true"}, - {dir: "generated"}, - }, - "S1007": {{dir: "regexp-raw"}}, - "S1008": {{dir: "if-return"}}, - "S1009": {{dir: "nil-len"}}, - "S1010": {{dir: "slicing"}}, - "S1011": {{dir: "loop-append"}}, - "S1012": {{dir: "time-since"}}, - "S1016": { - {dir: "convert"}, - {dir: "convert_go17", version: "1.7"}, - {dir: "convert_go18", version: "1.8"}, - }, - "S1017": {{dir: "trim"}}, - "S1018": {{dir: "LintLoopSlide"}}, - "S1019": {{dir: "LintMakeLenCap"}}, - "S1020": {{dir: "LintAssertNotNil"}}, - "S1021": {{dir: "LintDeclareAssign"}}, - "S1023": { - {dir: "LintRedundantBreak"}, - {dir: "LintRedundantReturn"}, - }, - "S1024": { - {dir: "LimeTimeUntil_go17", version: "1.7"}, - {dir: "LimeTimeUntil_go18", version: "1.8"}, - }, - "S1025": {{dir: "LintRedundantSprintf"}}, - "S1028": {{dir: "LintErrorsNewSprintf"}}, - "S1029": {{dir: "LintRangeStringRunes"}}, - "S1030": {{dir: "LintBytesBufferConversions"}}, - "S1031": {{dir: "LintNilCheckAroundRange"}}, - "S1032": {{dir: "LintSortHelpers"}}, - "S1033": {{dir: "LintGuardedDelete"}}, - "S1034": {{dir: "LintSimplifyTypeSwitch"}}, - } - for check, dirs := range checks { - a := Analyzers[check] - for _, dir := range dirs { - if dir.version != "" { - if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { - t.Fatal(err) - } - } - analysistest.Run(t, analysistest.TestData(), a, dir.dir) - } + checks := []testutil.Analyzer{ + {Analyzer: Analyzers["S1000"], Tests: []testutil.Test{{Dir: "single-case-select"}}}, + {Analyzer: Analyzers["S1001"], Tests: []testutil.Test{{Dir: "copy"}}}, + {Analyzer: Analyzers["S1002"], Tests: []testutil.Test{{Dir: "bool-cmp"}}}, + {Analyzer: Analyzers["S1003"], Tests: []testutil.Test{{Dir: "contains"}}}, + {Analyzer: Analyzers["S1004"], Tests: []testutil.Test{{Dir: "compare"}}}, + {Analyzer: Analyzers["S1005"], Tests: []testutil.Test{{Dir: "LintBlankOK"}, {Dir: "receive-blank"}, {Dir: "range_go13", Version: "1.3"}, {Dir: "range_go14", Version: "1.4"}}}, + {Analyzer: Analyzers["S1006"], Tests: []testutil.Test{{Dir: "for-true"}, {Dir: "generated"}}}, + {Analyzer: Analyzers["S1007"], Tests: []testutil.Test{{Dir: "regexp-raw"}}}, + {Analyzer: Analyzers["S1008"], Tests: []testutil.Test{{Dir: "if-return"}}}, + {Analyzer: Analyzers["S1009"], Tests: []testutil.Test{{Dir: "nil-len"}}}, + {Analyzer: Analyzers["S1010"], Tests: []testutil.Test{{Dir: "slicing"}}}, + {Analyzer: Analyzers["S1011"], Tests: []testutil.Test{{Dir: "loop-append"}}}, + {Analyzer: Analyzers["S1012"], Tests: []testutil.Test{{Dir: "time-since"}}}, + {Analyzer: Analyzers["S1016"], Tests: []testutil.Test{{Dir: "convert"}, {Dir: "convert_go17", Version: "1.7"}, {Dir: "convert_go18", Version: "1.8"}}}, + {Analyzer: Analyzers["S1017"], Tests: []testutil.Test{{Dir: "trim"}}}, + {Analyzer: Analyzers["S1018"], Tests: []testutil.Test{{Dir: "LintLoopSlide"}}}, + {Analyzer: Analyzers["S1019"], Tests: []testutil.Test{{Dir: "LintMakeLenCap"}}}, + {Analyzer: Analyzers["S1020"], Tests: []testutil.Test{{Dir: "LintAssertNotNil"}}}, + {Analyzer: Analyzers["S1021"], Tests: []testutil.Test{{Dir: "LintDeclareAssign"}}}, + {Analyzer: Analyzers["S1023"], Tests: []testutil.Test{{Dir: "LintRedundantBreak"}, {Dir: "LintRedundantReturn"}}}, + {Analyzer: Analyzers["S1024"], Tests: []testutil.Test{{Dir: "LimeTimeUntil_go17", Version: "1.7"}, {Dir: "LimeTimeUntil_go18", Version: "1.8"}}}, + {Analyzer: Analyzers["S1025"], Tests: []testutil.Test{{Dir: "LintRedundantSprintf"}}}, + {Analyzer: Analyzers["S1028"], Tests: []testutil.Test{{Dir: "LintErrorsNewSprintf"}}}, + {Analyzer: Analyzers["S1029"], Tests: []testutil.Test{{Dir: "LintRangeStringRunes"}}}, + {Analyzer: Analyzers["S1030"], Tests: []testutil.Test{{Dir: "LintBytesBufferConversions"}}}, + {Analyzer: Analyzers["S1031"], Tests: []testutil.Test{{Dir: "LintNilCheckAroundRange"}}}, + {Analyzer: Analyzers["S1032"], Tests: []testutil.Test{{Dir: "LintSortHelpers"}}}, + {Analyzer: Analyzers["S1033"], Tests: []testutil.Test{{Dir: "LintGuardedDelete"}}}, + {Analyzer: Analyzers["S1034"], Tests: []testutil.Test{{Dir: "LintSimplifyTypeSwitch"}}}, } + + testutil.Run(t, checks) } diff --git a/staticcheck/lint_test.go b/staticcheck/lint_test.go index 2b129b53e..6511b75e6 100644 --- a/staticcheck/lint_test.go +++ b/staticcheck/lint_test.go @@ -3,111 +3,81 @@ package staticcheck import ( "testing" - "golang.org/x/tools/go/analysis/analysistest" + "honnef.co/go/tools/lint/testutil" ) func TestAll(t *testing.T) { - checks := map[string][]struct { - dir string - version string - }{ - "SA1000": {{dir: "CheckRegexps"}}, - "SA1001": {{dir: "CheckTemplate"}}, - "SA1002": {{dir: "CheckTimeParse"}}, - "SA1003": { - {dir: "CheckEncodingBinary"}, - {dir: "CheckEncodingBinary_go17", version: "1.7"}, - {dir: "CheckEncodingBinary_go18", version: "1.8"}, - }, - "SA1004": {{dir: "CheckTimeSleepConstant"}}, - "SA1005": {{dir: "CheckExec"}}, - "SA1006": {{dir: "CheckUnsafePrintf"}}, - "SA1007": {{dir: "CheckURLs"}}, - "SA1008": {{dir: "CheckCanonicalHeaderKey"}}, - "SA1010": {{dir: "checkStdlibUsageRegexpFindAll"}}, - "SA1011": {{dir: "checkStdlibUsageUTF8Cutset"}}, - "SA1012": {{dir: "checkStdlibUsageNilContext"}}, - "SA1013": {{dir: "checkStdlibUsageSeeker"}}, - "SA1014": {{dir: "CheckUnmarshalPointer"}}, - "SA1015": { - {dir: "CheckLeakyTimeTick"}, - {dir: "CheckLeakyTimeTick-main"}, - }, - "SA1016": {{dir: "CheckUntrappableSignal"}}, - "SA1017": {{dir: "CheckUnbufferedSignalChan"}}, - "SA1018": {{dir: "CheckStringsReplaceZero"}}, - "SA1019": { - {dir: "CheckDeprecated"}, - {dir: "CheckDeprecated_go14", version: "1.4"}, - {dir: "CheckDeprecated_go18", version: "1.8"}, - }, - "SA1020": {{dir: "CheckListenAddress"}}, - "SA1021": {{dir: "CheckBytesEqualIP"}}, - "SA1023": {{dir: "CheckWriterBufferModified"}}, - "SA1024": {{dir: "CheckNonUniqueCutset"}}, - "SA1025": {{dir: "CheckTimerResetReturnValue"}}, - "SA1026": {{dir: "CheckUnsupportedMarshal"}}, - "SA2000": {{dir: "CheckWaitgroupAdd"}}, - "SA2001": {{dir: "CheckEmptyCriticalSection"}}, - "SA2002": {{dir: "CheckConcurrentTesting"}}, - "SA2003": {{dir: "CheckDeferLock"}}, - "SA3000": { - {dir: "CheckTestMainExit-1"}, - {dir: "CheckTestMainExit-2"}, - {dir: "CheckTestMainExit-3"}, - {dir: "CheckTestMainExit-4"}, - {dir: "CheckTestMainExit-5"}, - }, - "SA3001": {{dir: "CheckBenchmarkN"}}, - "SA4000": {{dir: "CheckLhsRhsIdentical"}}, - "SA4001": {{dir: "CheckIneffectiveCopy"}}, - "SA4002": {{dir: "CheckDiffSizeComparison"}}, - "SA4003": {{dir: "CheckExtremeComparison"}}, - "SA4004": {{dir: "CheckIneffectiveLoop"}}, - "SA4006": {{dir: "CheckUnreadVariableValues"}}, - "SA4008": {{dir: "CheckLoopCondition"}}, - "SA4009": {{dir: "CheckArgOverwritten"}}, - "SA4010": {{dir: "CheckIneffectiveAppend"}}, - "SA4011": {{dir: "CheckScopedBreak"}}, - "SA4012": {{dir: "CheckNaNComparison"}}, - "SA4013": {{dir: "CheckDoubleNegation"}}, - "SA4014": {{dir: "CheckRepeatedIfElse"}}, - "SA4015": {{dir: "CheckMathInt"}}, - "SA4016": {{dir: "CheckSillyBitwiseOps"}}, - "SA4017": {{dir: "CheckPureFunctions"}}, - "SA4018": {{dir: "CheckSelfAssignment"}}, - "SA4019": {{dir: "CheckDuplicateBuildConstraints"}}, - "SA4020": {{dir: "CheckUnreachableTypeCases"}}, - "SA4021": {{dir: "CheckSingleArgAppend"}}, - "SA5000": {{dir: "CheckNilMaps"}}, - "SA5001": {{dir: "CheckEarlyDefer"}}, - "SA5002": {{dir: "CheckInfiniteEmptyLoop"}}, - "SA5003": {{dir: "CheckDeferInInfiniteLoop"}}, - "SA5004": {{dir: "CheckLoopEmptyDefault"}}, - "SA5005": {{dir: "CheckCyclicFinalizer"}}, - "SA5007": {{dir: "CheckInfiniteRecursion"}}, - "SA5008": {{dir: "CheckStructTags"}}, - "SA5009": {{dir: "CheckPrintf"}}, - "SA6000": {{dir: "CheckRegexpMatchLoop"}}, - "SA6002": {{dir: "CheckSyncPoolValue"}}, - "SA6003": {{dir: "CheckRangeStringRunes"}}, - "SA6005": {{dir: "CheckToLowerToUpperComparison"}}, - "SA9001": {{dir: "CheckDubiousDeferInChannelRangeLoop"}}, - "SA9002": {{dir: "CheckNonOctalFileMode"}}, - "SA9003": {{dir: "CheckEmptyBranch"}}, - "SA9004": {{dir: "CheckMissingEnumTypesInDeclaration"}}, - "SA9005": {{dir: "CheckNoopMarshal"}}, + checks := []testutil.Analyzer{ + {Analyzer: Analyzers["SA1000"], Tests: []testutil.Test{{Dir: "CheckRegexps"}}}, + {Analyzer: Analyzers["SA1001"], Tests: []testutil.Test{{Dir: "CheckTemplate"}}}, + {Analyzer: Analyzers["SA1002"], Tests: []testutil.Test{{Dir: "CheckTimeParse"}}}, + {Analyzer: Analyzers["SA1003"], Tests: []testutil.Test{{Dir: "CheckEncodingBinary"}, {Dir: "CheckEncodingBinary_go17", Version: "1.7"}, {Dir: "CheckEncodingBinary_go18", Version: "1.8"}}}, + {Analyzer: Analyzers["SA1004"], Tests: []testutil.Test{{Dir: "CheckTimeSleepConstant"}}}, + {Analyzer: Analyzers["SA1005"], Tests: []testutil.Test{{Dir: "CheckExec"}}}, + {Analyzer: Analyzers["SA1006"], Tests: []testutil.Test{{Dir: "CheckUnsafePrintf"}}}, + {Analyzer: Analyzers["SA1007"], Tests: []testutil.Test{{Dir: "CheckURLs"}}}, + {Analyzer: Analyzers["SA1008"], Tests: []testutil.Test{{Dir: "CheckCanonicalHeaderKey"}}}, + {Analyzer: Analyzers["SA1010"], Tests: []testutil.Test{{Dir: "checkStdlibUsageRegexpFindAll"}}}, + {Analyzer: Analyzers["SA1011"], Tests: []testutil.Test{{Dir: "checkStdlibUsageUTF8Cutset"}}}, + {Analyzer: Analyzers["SA1012"], Tests: []testutil.Test{{Dir: "checkStdlibUsageNilContext"}}}, + {Analyzer: Analyzers["SA1013"], Tests: []testutil.Test{{Dir: "checkStdlibUsageSeeker"}}}, + {Analyzer: Analyzers["SA1014"], Tests: []testutil.Test{{Dir: "CheckUnmarshalPointer"}}}, + {Analyzer: Analyzers["SA1015"], Tests: []testutil.Test{{Dir: "CheckLeakyTimeTick"}, {Dir: "CheckLeakyTimeTick-main"}}}, + {Analyzer: Analyzers["SA1016"], Tests: []testutil.Test{{Dir: "CheckUntrappableSignal"}}}, + {Analyzer: Analyzers["SA1017"], Tests: []testutil.Test{{Dir: "CheckUnbufferedSignalChan"}}}, + {Analyzer: Analyzers["SA1018"], Tests: []testutil.Test{{Dir: "CheckStringsReplaceZero"}}}, + {Analyzer: Analyzers["SA1019"], Tests: []testutil.Test{{Dir: "CheckDeprecated"}, {Dir: "CheckDeprecated_go14", Version: "1.4"}, {Dir: "CheckDeprecated_go18", Version: "1.8"}}}, + {Analyzer: Analyzers["SA1020"], Tests: []testutil.Test{{Dir: "CheckListenAddress"}}}, + {Analyzer: Analyzers["SA1021"], Tests: []testutil.Test{{Dir: "CheckBytesEqualIP"}}}, + {Analyzer: Analyzers["SA1023"], Tests: []testutil.Test{{Dir: "CheckWriterBufferModified"}}}, + {Analyzer: Analyzers["SA1024"], Tests: []testutil.Test{{Dir: "CheckNonUniqueCutset"}}}, + {Analyzer: Analyzers["SA1025"], Tests: []testutil.Test{{Dir: "CheckTimerResetReturnValue"}}}, + {Analyzer: Analyzers["SA1026"], Tests: []testutil.Test{{Dir: "CheckUnsupportedMarshal"}}}, + {Analyzer: Analyzers["SA2000"], Tests: []testutil.Test{{Dir: "CheckWaitgroupAdd"}}}, + {Analyzer: Analyzers["SA2001"], Tests: []testutil.Test{{Dir: "CheckEmptyCriticalSection"}}}, + {Analyzer: Analyzers["SA2002"], Tests: []testutil.Test{{Dir: "CheckConcurrentTesting"}}}, + {Analyzer: Analyzers["SA2003"], Tests: []testutil.Test{{Dir: "CheckDeferLock"}}}, + {Analyzer: Analyzers["SA3000"], Tests: []testutil.Test{{Dir: "CheckTestMainExit-1"}, {Dir: "CheckTestMainExit-2"}, {Dir: "CheckTestMainExit-3"}, {Dir: "CheckTestMainExit-4"}, {Dir: "CheckTestMainExit-5"}}}, + {Analyzer: Analyzers["SA3001"], Tests: []testutil.Test{{Dir: "CheckBenchmarkN"}}}, + {Analyzer: Analyzers["SA4000"], Tests: []testutil.Test{{Dir: "CheckLhsRhsIdentical"}}}, + {Analyzer: Analyzers["SA4001"], Tests: []testutil.Test{{Dir: "CheckIneffectiveCopy"}}}, + {Analyzer: Analyzers["SA4002"], Tests: []testutil.Test{{Dir: "CheckDiffSizeComparison"}}}, + {Analyzer: Analyzers["SA4003"], Tests: []testutil.Test{{Dir: "CheckExtremeComparison"}}}, + {Analyzer: Analyzers["SA4004"], Tests: []testutil.Test{{Dir: "CheckIneffectiveLoop"}}}, + {Analyzer: Analyzers["SA4006"], Tests: []testutil.Test{{Dir: "CheckUnreadVariableValues"}}}, + {Analyzer: Analyzers["SA4008"], Tests: []testutil.Test{{Dir: "CheckLoopCondition"}}}, + {Analyzer: Analyzers["SA4009"], Tests: []testutil.Test{{Dir: "CheckArgOverwritten"}}}, + {Analyzer: Analyzers["SA4010"], Tests: []testutil.Test{{Dir: "CheckIneffectiveAppend"}}}, + {Analyzer: Analyzers["SA4011"], Tests: []testutil.Test{{Dir: "CheckScopedBreak"}}}, + {Analyzer: Analyzers["SA4012"], Tests: []testutil.Test{{Dir: "CheckNaNComparison"}}}, + {Analyzer: Analyzers["SA4013"], Tests: []testutil.Test{{Dir: "CheckDoubleNegation"}}}, + {Analyzer: Analyzers["SA4014"], Tests: []testutil.Test{{Dir: "CheckRepeatedIfElse"}}}, + {Analyzer: Analyzers["SA4015"], Tests: []testutil.Test{{Dir: "CheckMathInt"}}}, + {Analyzer: Analyzers["SA4016"], Tests: []testutil.Test{{Dir: "CheckSillyBitwiseOps"}}}, + {Analyzer: Analyzers["SA4017"], Tests: []testutil.Test{{Dir: "CheckPureFunctions"}}}, + {Analyzer: Analyzers["SA4018"], Tests: []testutil.Test{{Dir: "CheckSelfAssignment"}}}, + {Analyzer: Analyzers["SA4019"], Tests: []testutil.Test{{Dir: "CheckDuplicateBuildConstraints"}}}, + {Analyzer: Analyzers["SA4020"], Tests: []testutil.Test{{Dir: "CheckUnreachableTypeCases"}}}, + {Analyzer: Analyzers["SA4021"], Tests: []testutil.Test{{Dir: "CheckSingleArgAppend"}}}, + {Analyzer: Analyzers["SA5000"], Tests: []testutil.Test{{Dir: "CheckNilMaps"}}}, + {Analyzer: Analyzers["SA5001"], Tests: []testutil.Test{{Dir: "CheckEarlyDefer"}}}, + {Analyzer: Analyzers["SA5002"], Tests: []testutil.Test{{Dir: "CheckInfiniteEmptyLoop"}}}, + {Analyzer: Analyzers["SA5003"], Tests: []testutil.Test{{Dir: "CheckDeferInInfiniteLoop"}}}, + {Analyzer: Analyzers["SA5004"], Tests: []testutil.Test{{Dir: "CheckLoopEmptyDefault"}}}, + {Analyzer: Analyzers["SA5005"], Tests: []testutil.Test{{Dir: "CheckCyclicFinalizer"}}}, + {Analyzer: Analyzers["SA5007"], Tests: []testutil.Test{{Dir: "CheckInfiniteRecursion"}}}, + {Analyzer: Analyzers["SA5008"], Tests: []testutil.Test{{Dir: "CheckStructTags"}}}, + {Analyzer: Analyzers["SA5009"], Tests: []testutil.Test{{Dir: "CheckPrintf"}}}, + {Analyzer: Analyzers["SA6000"], Tests: []testutil.Test{{Dir: "CheckRegexpMatchLoop"}}}, + {Analyzer: Analyzers["SA6002"], Tests: []testutil.Test{{Dir: "CheckSyncPoolValue"}}}, + {Analyzer: Analyzers["SA6003"], Tests: []testutil.Test{{Dir: "CheckRangeStringRunes"}}}, + {Analyzer: Analyzers["SA6005"], Tests: []testutil.Test{{Dir: "CheckToLowerToUpperComparison"}}}, + {Analyzer: Analyzers["SA9001"], Tests: []testutil.Test{{Dir: "CheckDubiousDeferInChannelRangeLoop"}}}, + {Analyzer: Analyzers["SA9002"], Tests: []testutil.Test{{Dir: "CheckNonOctalFileMode"}}}, + {Analyzer: Analyzers["SA9003"], Tests: []testutil.Test{{Dir: "CheckEmptyBranch"}}}, + {Analyzer: Analyzers["SA9004"], Tests: []testutil.Test{{Dir: "CheckMissingEnumTypesInDeclaration"}}}, + {Analyzer: Analyzers["SA9005"], Tests: []testutil.Test{{Dir: "CheckNoopMarshal"}}}, } - for check, dirs := range checks { - a := Analyzers[check] - for _, dir := range dirs { - if dir.version != "" { - if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { - t.Fatal(err) - } - } - analysistest.Run(t, analysistest.TestData(), a, dir.dir) - } - } + testutil.Run(t, checks) } diff --git a/stylecheck/lint_test.go b/stylecheck/lint_test.go index 7b3dd3cc6..fa6b8ddb2 100644 --- a/stylecheck/lint_test.go +++ b/stylecheck/lint_test.go @@ -3,44 +3,25 @@ package stylecheck import ( "testing" - "golang.org/x/tools/go/analysis/analysistest" + "honnef.co/go/tools/lint/testutil" ) func TestAll(t *testing.T) { - checks := map[string][]struct { - dir string - version string - }{ - "ST1000": { - {dir: "CheckPackageComment-1"}, - {dir: "CheckPackageComment-2"}, - }, - "ST1001": {{dir: "CheckDotImports"}}, - "ST1003": { - {dir: "CheckNames"}, - {dir: "CheckNames_generated"}, - }, - "ST1005": {{dir: "CheckErrorStrings"}}, - "ST1006": {{dir: "CheckReceiverNames"}}, - "ST1008": {{dir: "CheckErrorReturn"}}, - "ST1011": {{dir: "CheckTimeNames"}}, - "ST1012": {{dir: "CheckErrorVarNames"}}, - "ST1013": {{dir: "CheckHTTPStatusCodes"}}, - "ST1015": {{dir: "CheckDefaultCaseOrder"}}, - "ST1016": {{dir: "CheckReceiverNamesIdentical"}}, - "ST1017": {{dir: "CheckYodaConditions"}}, - "ST1018": {{dir: "CheckInvisibleCharacters"}}, + checks := []testutil.Analyzer{ + {Analyzer: Analyzers["ST1000"], Tests: []testutil.Test{{Dir: "CheckPackageComment-1"}, {Dir: "CheckPackageComment-2"}}}, + {Analyzer: Analyzers["ST1001"], Tests: []testutil.Test{{Dir: "CheckDotImports"}}}, + {Analyzer: Analyzers["ST1003"], Tests: []testutil.Test{{Dir: "CheckNames"}, {Dir: "CheckNames_generated"}}}, + {Analyzer: Analyzers["ST1005"], Tests: []testutil.Test{{Dir: "CheckErrorStrings"}}}, + {Analyzer: Analyzers["ST1006"], Tests: []testutil.Test{{Dir: "CheckReceiverNames"}}}, + {Analyzer: Analyzers["ST1008"], Tests: []testutil.Test{{Dir: "CheckErrorReturn"}}}, + {Analyzer: Analyzers["ST1011"], Tests: []testutil.Test{{Dir: "CheckTimeNames"}}}, + {Analyzer: Analyzers["ST1012"], Tests: []testutil.Test{{Dir: "CheckErrorVarNames"}}}, + {Analyzer: Analyzers["ST1013"], Tests: []testutil.Test{{Dir: "CheckHTTPStatusCodes"}}}, + {Analyzer: Analyzers["ST1015"], Tests: []testutil.Test{{Dir: "CheckDefaultCaseOrder"}}}, + {Analyzer: Analyzers["ST1016"], Tests: []testutil.Test{{Dir: "CheckReceiverNamesIdentical"}}}, + {Analyzer: Analyzers["ST1017"], Tests: []testutil.Test{{Dir: "CheckYodaConditions"}}}, + {Analyzer: Analyzers["ST1018"], Tests: []testutil.Test{{Dir: "CheckInvisibleCharacters"}}}, } - for check, dirs := range checks { - a := Analyzers[check] - for _, dir := range dirs { - if dir.version != "" { - if err := a.Flags.Lookup("go").Value.Set(dir.version); err != nil { - t.Fatal(err) - } - } - analysistest.Run(t, analysistest.TestData(), a, dir.dir) - } - } + testutil.Run(t, checks) } From c85582a6916c1f75a6e0707a5a469e6f86783999 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 25 May 2019 09:11:38 +0200 Subject: [PATCH 206/254] cmd/go-module-query: prototype utility for analysing Go modules Right now, go-module-query just incrementally processes index.golang.org/index, fetching go.mod files for all known modules. In the future, it will allow answering queries on the module graph. --- cmd/go-module-query/main.go | 155 ++++++++++++++++++++++++++++++++++++ 1 file changed, 155 insertions(+) create mode 100644 cmd/go-module-query/main.go diff --git a/cmd/go-module-query/main.go b/cmd/go-module-query/main.go new file mode 100644 index 000000000..cf49be18a --- /dev/null +++ b/cmd/go-module-query/main.go @@ -0,0 +1,155 @@ +package main + +import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" + "log" + "net/http" + "os" + "path" + "path/filepath" + "sync" + "sync/atomic" + "time" + + "github.com/google/renameio" + "golang.org/x/mod/module" +) + +/* +Q: which versions of our module are being used +A: find the latest version of every Go module, find the dependency on our module + +Q: what modules have stopped using our module +A: find every module where a version [0..N) uses us, but version N doesn't. +*/ + +func Fetch(since time.Time) ([]module.Version, time.Time, error) { + var out []module.Version + for { + out2, since2, err := fetch(since, out) + if err != nil { + return nil, since, err + } + if len(out) == len(out2) { + break + } + out = out2 + since = since2 + } + return out, since, nil +} + +func fetch(since time.Time, out []module.Version) ([]module.Version, time.Time, error) { + // +1µs because of bug in index.golang.org that returns results + // >=since instead of >since + ts := since.Add(1 * time.Microsecond) + u := `https://index.golang.org/index?since=` + ts.Format(time.RFC3339Nano) + resp, err := http.Get(u) + if err != nil { + return nil, since, err + } + defer resp.Body.Close() + dec := json.NewDecoder(resp.Body) + + var entry struct { + module.Version + Timestamp time.Time + } + for { + if err := dec.Decode(&entry); err != nil { + if err == io.EOF { + break + } + return out, since, err + } + + out = append(out, entry.Version) + since = entry.Timestamp + } + + return out, since, nil +} + +func main() { + cache, err := os.UserCacheDir() + if err != nil { + log.Fatal(err) + } + + var since time.Time + b, err := ioutil.ReadFile(filepath.Join(cache, "go-module-query", "last")) + if err == nil { + t, err := time.Parse(time.RFC3339Nano, string(b)) + if err != nil { + log.Fatal(err) + } + since = t + log.Println("Resuming at", since) + } else if !os.IsNotExist(err) { + log.Fatal(err) + } + + out, since, err := Fetch(since) + if err != nil { + log.Fatal(err) + } + + sem := make(chan struct{}, 8) + var wg sync.WaitGroup + var errs uint64 + for _, v := range out { + mpath, _ := module.EscapePath(v.Path) + p := filepath.Join(cache, "go-module-query", mpath, "@v", v.Version+".mod") + // XXX is this atomic? + if err := os.MkdirAll(filepath.Join(cache, "go-module-query", mpath, "@v"), 0777); err != nil { + log.Println(err) + continue + } + if _, err := os.Stat(p); os.IsNotExist(err) { + fmt.Println("Fetching", v) + sem <- struct{}{} + wg.Add(1) + go func(p string, v module.Version) { + defer wg.Done() + defer func() { <-sem }() + resp, err := http.Get("https://proxy.golang.org/" + path.Join(mpath, "@v", v.Version+".mod")) + if err != nil { + atomic.AddUint64(&errs, 1) + log.Println(err) + return + } + defer resp.Body.Close() + pf, err := renameio.TempFile("", p) + if err != nil { + atomic.AddUint64(&errs, 1) + log.Println(err) + return + } + defer pf.Cleanup() + if _, err := io.Copy(pf, resp.Body); err != nil { + atomic.AddUint64(&errs, 1) + log.Println(err) + return + } + if err := pf.CloseAtomicallyReplace(); err != nil { + atomic.AddUint64(&errs, 1) + log.Println("Couldn't store go.mod:", err) + } + }(p, v) + } + } + + wg.Wait() + + if errs > 0 { + log.Println("Couldn't download all go.mod, not storing timestamp") + return + } + + if err := renameio.WriteFile(filepath.Join(cache, "go-module-query", "last"), []byte(since.Format(time.RFC3339Nano)), 0666); err != nil { + log.Println("Couldn't store timestamp:", err) + } +} From ced03d0c33247b8f0a9a9ba1f9b9ad3857303e9e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 25 May 2019 09:14:02 +0200 Subject: [PATCH 207/254] Add go.mod and go.sum --- go.mod | 11 +++++++++++ go.sum | 23 +++++++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 go.mod create mode 100644 go.sum diff --git a/go.mod b/go.mod new file mode 100644 index 000000000..139dd2093 --- /dev/null +++ b/go.mod @@ -0,0 +1,11 @@ +module honnef.co/go/tools + +go 1.11 + +require ( + github.com/BurntSushi/toml v0.3.1 + github.com/google/renameio v0.1.0 + github.com/kisielk/gotool v1.0.0 + golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e + golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd +) diff --git a/go.sum b/go.sum new file mode 100644 index 000000000..f406139cb --- /dev/null +++ b/go.sum @@ -0,0 +1,23 @@ +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/google/renameio v0.1.0 h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529 h1:iMGN4xG0cnqj3t+zOM8wUB0BiPKHEwSxEZCvzcbZuvk= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e h1:JgcxKXxCjrA2tyDP/aNU9K0Ck5Czfk6C7e2tMw7+bSI= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3 h1:0GoQqolDA55aaLxZyTzK/Y2ePZzZTUrRacwib7cNsYQ= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d h1:+R4KGOnez64A81RvjARKc4UT5/tI9ujCIVX+P5KiHuI= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd h1:7E3PabyysDSEjnaANKBgums/hyvMI/HoHQ50qZEzTrg= +golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= From 7d8e77bd9e8d3feaaf92895223306b253983f924 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 25 May 2019 10:54:01 +0200 Subject: [PATCH 208/254] version: add verbose version information --- lint/lintutil/util.go | 7 +++++++ version/buildinfo.go | 32 ++++++++++++++++++++++++++++++ version/buildinfo111.go | 6 ++++++ version/version.go | 44 +++++++++++++++++++++++++++++++++++++++-- 4 files changed, 87 insertions(+), 2 deletions(-) create mode 100644 version/buildinfo.go create mode 100644 version/buildinfo111.go diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index bbcdea68d..1eb61c944 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -112,6 +112,7 @@ func FlagSet(name string) *flag.FlagSet { flags.String("debug.cpuprofile", "", "Write CPU profile to `file`") flags.String("debug.memprofile", "", "Write memory profile to `file`") + flags.Bool("debug.version", false, "Print detailed version information about this program") checks := list{"inherit"} fail := list{"all"} @@ -150,6 +151,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string) memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) + debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool) cfg := config.Config{} cfg.Checks = *fs.Lookup("checks").Value.(*list) @@ -176,6 +178,11 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * pprof.StartCPUProfile(f) } + if debugVersion { + version.Verbose() + exit(0) + } + if printVersion { version.Print() exit(0) diff --git a/version/buildinfo.go b/version/buildinfo.go new file mode 100644 index 000000000..2119743bc --- /dev/null +++ b/version/buildinfo.go @@ -0,0 +1,32 @@ +// +build go1.12 + +package version + +import ( + "fmt" + "runtime/debug" +) + +func printBuildInfo() { + if info, ok := debug.ReadBuildInfo(); ok { + fmt.Println("Main module:") + printModule(&info.Main) + fmt.Println("Dependencies:") + for _, dep := range info.Deps { + printModule(dep) + } + } else { + fmt.Println("Built without Go modules") + } +} + +func buildInfoVersion() (string, bool) { + info, ok := debug.ReadBuildInfo() + if !ok { + return "", false + } + if info.Main.Version == "(devel)" { + return "", false + } + return info.Main.Version, true +} diff --git a/version/buildinfo111.go b/version/buildinfo111.go new file mode 100644 index 000000000..06aae1e65 --- /dev/null +++ b/version/buildinfo111.go @@ -0,0 +1,6 @@ +// +build !go1.12 + +package version + +func printBuildInfo() {} +func buildInfoVersion() (string, bool) { return "", false } diff --git a/version/version.go b/version/version.go index 232cf7e74..b01fc43d7 100644 --- a/version/version.go +++ b/version/version.go @@ -4,14 +4,54 @@ import ( "fmt" "os" "path/filepath" + "runtime" + "runtime/debug" ) const Version = "devel" +// version returns a version descriptor and reports whether the +// version is a known release. +func version() (string, bool) { + if Version != "devel" { + return Version, true + } + v, ok := buildInfoVersion() + if ok { + return v, false + } + return "devel", false +} + func Print() { - if Version == "devel" { + v, release := version() + + if release { + fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), v) + } else if v == "devel" { fmt.Printf("%s (no version)\n", filepath.Base(os.Args[0])) } else { - fmt.Printf("%s %s\n", filepath.Base(os.Args[0]), Version) + fmt.Printf("%s (devel, %s)\n", filepath.Base(os.Args[0]), v) + } +} + +func Verbose() { + Print() + fmt.Println() + fmt.Println("Compiled with Go version:", runtime.Version()) + printBuildInfo() +} + +func printModule(m *debug.Module) { + fmt.Printf("\t%s", m.Path) + if m.Version != "(devel)" { + fmt.Printf("@%s", m.Version) + } + if m.Sum != "" { + fmt.Printf(" (sum: %s)", m.Sum) + } + if m.Replace != nil { + fmt.Printf(" (replace: %s)", m.Replace.Path) } + fmt.Println() } From 1da3061645b400d55be4855f104d8a68ca0a61e5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 25 May 2019 20:38:15 +0200 Subject: [PATCH 209/254] cmd/go-module-query: print digraph --- cmd/go-module-query/main.go | 34 ++++++++++++++++++++++++++++++++++ go.mod | 1 + go.sum | 7 +++++++ 3 files changed, 42 insertions(+) diff --git a/cmd/go-module-query/main.go b/cmd/go-module-query/main.go index cf49be18a..aa59ba09e 100644 --- a/cmd/go-module-query/main.go +++ b/cmd/go-module-query/main.go @@ -10,11 +10,13 @@ import ( "os" "path" "path/filepath" + "strings" "sync" "sync/atomic" "time" "github.com/google/renameio" + "github.com/rogpeppe/go-internal/modfile" "golang.org/x/mod/module" ) @@ -122,6 +124,7 @@ func main() { return } defer resp.Body.Close() + // XXX handle response code pf, err := renameio.TempFile("", p) if err != nil { atomic.AddUint64(&errs, 1) @@ -153,3 +156,34 @@ func main() { log.Println("Couldn't store timestamp:", err) } } + +func printGraph() { + cache, err := os.UserCacheDir() + if err != nil { + log.Fatal(err) + } + filepath.Walk(filepath.Join(cache, "go-module-query"), func(path string, info os.FileInfo, err error) error { + if err != nil { + return nil + } + if strings.HasSuffix(path, ".mod") { + name := filepath.Base(path) + name = name[:len(name)-4] + b, err := ioutil.ReadFile(path) + if err != nil { + log.Println(err) + return nil + } + f, err := modfile.Parse(path, b, nil) + if err != nil { + log.Println(err) + return nil + } + f.Module.Mod.Version = name + for _, dep := range f.Require { + fmt.Printf("%s@%s %s@%s\n", f.Module.Mod.Path, f.Module.Mod.Version, dep.Mod.Path, dep.Mod.Version) + } + } + return nil + }) +} diff --git a/go.mod b/go.mod index 139dd2093..6cf738645 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/BurntSushi/toml v0.3.1 github.com/google/renameio v0.1.0 github.com/kisielk/gotool v1.0.0 + github.com/rogpeppe/go-internal v1.3.0 golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd ) diff --git a/go.sum b/go.sum index f406139cb..85d349605 100644 --- a/go.sum +++ b/go.sum @@ -4,6 +4,11 @@ github.com/google/renameio v0.1.0 h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/rogpeppe/go-internal v1.3.0 h1:RR9dF3JtopPvtkroDZuVD7qquD0bnHlKSqaQhgwt8yk= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529 h1:iMGN4xG0cnqj3t+zOM8wUB0BiPKHEwSxEZCvzcbZuvk= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -21,3 +26,5 @@ golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd h1:7E3PabyysDSEjnaANKBgums/hyvMI/HoHQ50qZEzTrg= golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= From e221f64b6cb64fa3538a8a8c37af17bbb9c2b602 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 30 May 2019 16:34:33 +0200 Subject: [PATCH 210/254] lint/lintutil: add debug flag for hiding compile errors --- lint/lintutil/util.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 1eb61c944..0d79e7fb5 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -113,6 +113,7 @@ func FlagSet(name string) *flag.FlagSet { flags.String("debug.cpuprofile", "", "Write CPU profile to `file`") flags.String("debug.memprofile", "", "Write memory profile to `file`") flags.Bool("debug.version", false, "Print detailed version information about this program") + flags.Bool("debug.no-compile-errors", false, "Don't print compile errors") checks := list{"inherit"} fail := list{"all"} @@ -152,6 +153,7 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * cpuProfile := fs.Lookup("debug.cpuprofile").Value.(flag.Getter).Get().(string) memProfile := fs.Lookup("debug.memprofile").Value.(flag.Getter).Get().(string) debugVersion := fs.Lookup("debug.version").Value.(flag.Getter).Get().(bool) + debugNoCompile := fs.Lookup("debug.no-compile-errors").Value.(flag.Getter).Get().(bool) cfg := config.Config{} cfg.Checks = *fs.Lookup("checks").Value.(*list) @@ -249,6 +251,9 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * total = len(ps) for _, p := range ps { + if p.Check == "compile" && debugNoCompile { + continue + } if p.Severity == lint.Ignored && !showIgnored { continue } From a1efa522b896190dac280da996652c3394f89ff7 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 30 May 2019 19:00:28 +0200 Subject: [PATCH 211/254] version: don't use debug.Module in Go 1.11 --- version/buildinfo.go | 14 ++++++++++++++ version/version.go | 15 --------------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/version/buildinfo.go b/version/buildinfo.go index 2119743bc..b6034bb7d 100644 --- a/version/buildinfo.go +++ b/version/buildinfo.go @@ -30,3 +30,17 @@ func buildInfoVersion() (string, bool) { } return info.Main.Version, true } + +func printModule(m *debug.Module) { + fmt.Printf("\t%s", m.Path) + if m.Version != "(devel)" { + fmt.Printf("@%s", m.Version) + } + if m.Sum != "" { + fmt.Printf(" (sum: %s)", m.Sum) + } + if m.Replace != nil { + fmt.Printf(" (replace: %s)", m.Replace.Path) + } + fmt.Println() +} diff --git a/version/version.go b/version/version.go index b01fc43d7..a12f70fb4 100644 --- a/version/version.go +++ b/version/version.go @@ -5,7 +5,6 @@ import ( "os" "path/filepath" "runtime" - "runtime/debug" ) const Version = "devel" @@ -41,17 +40,3 @@ func Verbose() { fmt.Println("Compiled with Go version:", runtime.Version()) printBuildInfo() } - -func printModule(m *debug.Module) { - fmt.Printf("\t%s", m.Path) - if m.Version != "(devel)" { - fmt.Printf("@%s", m.Version) - } - if m.Sum != "" { - fmt.Printf(" (sum: %s)", m.Sum) - } - if m.Replace != nil { - fmt.Printf(" (replace: %s)", m.Replace.Path) - } - fmt.Println() -} From 42df64e2171aa2a244fed527cdbe487268fb9a76 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 31 May 2019 17:14:00 +0200 Subject: [PATCH 212/254] lint: improve error reporting Work around pecularities in go/packages and go list to produce more user-friendly errors. --- lint/lint.go | 49 ++++++++-- lint/lint_test.go | 103 ++++++++++++++++++++ lint/runner.go | 10 +- lint/testdata/src/broken_dep/pkg.go | 3 + lint/testdata/src/broken_parse/pkg.go | 3 + lint/testdata/src/broken_pkgerror/broken.go | 1 + lint/testdata/src/broken_typeerror/pkg.go | 6 ++ 7 files changed, 160 insertions(+), 15 deletions(-) create mode 100644 lint/lint_test.go create mode 100644 lint/testdata/src/broken_dep/pkg.go create mode 100644 lint/testdata/src/broken_parse/pkg.go create mode 100644 lint/testdata/src/broken_pkgerror/broken.go create mode 100644 lint/testdata/src/broken_typeerror/pkg.go diff --git a/lint/lint.go b/lint/lint.go index f53d6d946..4306e70b9 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -127,29 +127,58 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error for _, pkg := range pkgs { tpkgToPkg[pkg.Types] = pkg - for _, err := range pkg.errs { - switch err := err.(type) { + for _, e := range pkg.errs { + switch e := e.(type) { case types.Error: p := Problem{ - Pos: err.Fset.PositionFor(err.Pos, false), - Message: err.Msg, + Pos: e.Fset.PositionFor(e.Pos, false), + Message: e.Msg, Severity: Error, Check: "compile", } pkg.problems = append(pkg.problems, p) case packages.Error: + msg := e.Msg + if len(msg) != 0 && msg[0] == '\n' { + // TODO(dh): See https://github.com/golang/go/issues/32363 + msg = msg[1:] + } + + var pos token.Position + if e.Pos == "" { + // Under certain conditions (malformed package + // declarations, multiple packages in the same + // directory), go list emits an error on stderr + // instead of JSON. Those errors do not have + // associated position information in + // go/packages.Error, even though the output on + // stderr may contain it. + if p, n, err := parsePos(msg); err == nil { + if abs, err := filepath.Abs(p.Filename); err == nil { + p.Filename = abs + } + pos = p + msg = msg[n+2:] + } + } else { + var err error + pos, _, err = parsePos(e.Pos) + if err != nil { + panic(fmt.Sprintf("internal error: %s", e)) + } + } p := Problem{ - Pos: parsePos(err.Pos), - Message: err.Msg, + Pos: pos, + Message: msg, Severity: Error, Check: "compile", } pkg.problems = append(pkg.problems, p) case scanner.ErrorList: - for _, err := range err { + for _, e := range e { p := Problem{ - Pos: err.Pos, - Message: err.Msg, + Pos: e.Pos, + Message: e.Msg, Severity: Error, Check: "compile", } @@ -158,7 +187,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error case error: p := Problem{ Pos: token.Position{}, - Message: err.Error(), + Message: e.Error(), Severity: Error, Check: "compile", } diff --git a/lint/lint_test.go b/lint/lint_test.go new file mode 100644 index 000000000..37d2cba96 --- /dev/null +++ b/lint/lint_test.go @@ -0,0 +1,103 @@ +package lint + +import ( + "go/token" + "log" + "os" + "path/filepath" + "strings" + "testing" + + "golang.org/x/tools/go/packages" +) + +func testdata() string { + testdata, err := filepath.Abs("testdata") + if err != nil { + log.Fatal(err) + } + return testdata +} + +func lintPackage(t *testing.T, name string) []Problem { + l := Linter{} + cfg := &packages.Config{ + Env: append(os.Environ(), "GOPATH="+testdata(), "GO111MODULE=off"), + } + ps, err := l.Lint(cfg, []string{name}) + if err != nil { + t.Fatal(err) + } + return ps +} + +func trimPosition(pos *token.Position) { + idx := strings.Index(pos.Filename, "/testdata/src/") + if idx >= 0 { + pos.Filename = pos.Filename[idx+len("/testdata/src/"):] + } +} + +func TestErrors(t *testing.T) { + t.Run("invalid package declaration", func(t *testing.T) { + ps := lintPackage(t, "broken_pkgerror") + if len(ps) != 1 { + t.Fatalf("got %d problems, want 1", len(ps)) + } + if want := "expected 'package', found pckage"; ps[0].Message != want { + t.Errorf("got message %q, want %q", ps[0].Message, want) + } + if ps[0].Pos.Filename == "" { + t.Errorf("didn't get useful position") + } + }) + + t.Run("type error", func(t *testing.T) { + ps := lintPackage(t, "broken_typeerror") + if len(ps) != 1 { + t.Fatalf("got %d problems, want 1", len(ps)) + } + trimPosition(&ps[0].Pos) + want := Problem{ + Pos: token.Position{ + Filename: "broken_typeerror/pkg.go", + Offset: 42, + Line: 5, + Column: 10, + }, + Message: "cannot convert \"\" (untyped string constant) to int", + Check: "compile", + Severity: 0, + } + if ps[0] != want { + t.Errorf("got %#v, want %#v", ps[0], want) + } + }) + + t.Run("missing dep", func(t *testing.T) { + t.Skip("Go 1.12 behaves incorrectly for missing packages") + }) + + t.Run("parse error", func(t *testing.T) { + ps := lintPackage(t, "broken_parse") + if len(ps) != 1 { + t.Fatalf("got %d problems, want 1", len(ps)) + } + + trimPosition(&ps[0].Pos) + want := Problem{ + Pos: token.Position{ + Filename: "broken_parse/pkg.go", + Offset: 13, + Line: 3, + Column: 1, + }, + Message: "expected declaration, found asd", + Check: "compile", + Severity: 0, + } + if ps[0] != want { + t.Errorf("got %#v, want %#v", ps[0], want) + } + }) +} diff --git a/lint/runner.go b/lint/runner.go index 0b22eff30..d0211c210 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -518,15 +518,15 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy return pkgs, nil } -var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?$`) +var posRe = regexp.MustCompile(`^(.+?):(\d+)(?::(\d+)?)?`) -func parsePos(pos string) token.Position { +func parsePos(pos string) (token.Position, int, error) { if pos == "-" || pos == "" { - return token.Position{} + return token.Position{}, 0, nil } parts := posRe.FindStringSubmatch(pos) if parts == nil { - panic(fmt.Sprintf("internal error: malformed position %q", pos)) + return token.Position{}, 0, fmt.Errorf("malformed position %q", pos) } file := parts[1] line, _ := strconv.Atoi(parts[2]) @@ -535,7 +535,7 @@ func parsePos(pos string) token.Position { Filename: file, Line: line, Column: col, - } + }, len(parts[0]), nil } // loadPkg loads a Go package. If the package is in the set of initial diff --git a/lint/testdata/src/broken_dep/pkg.go b/lint/testdata/src/broken_dep/pkg.go new file mode 100644 index 000000000..9909eea81 --- /dev/null +++ b/lint/testdata/src/broken_dep/pkg.go @@ -0,0 +1,3 @@ +package pkg + +import _ "unknown_package" diff --git a/lint/testdata/src/broken_parse/pkg.go b/lint/testdata/src/broken_parse/pkg.go new file mode 100644 index 000000000..e165ed120 --- /dev/null +++ b/lint/testdata/src/broken_parse/pkg.go @@ -0,0 +1,3 @@ +package pkg + +asd diff --git a/lint/testdata/src/broken_pkgerror/broken.go b/lint/testdata/src/broken_pkgerror/broken.go new file mode 100644 index 000000000..e5ef3456b --- /dev/null +++ b/lint/testdata/src/broken_pkgerror/broken.go @@ -0,0 +1 @@ +pckage pkg diff --git a/lint/testdata/src/broken_typeerror/pkg.go b/lint/testdata/src/broken_typeerror/pkg.go new file mode 100644 index 000000000..4a897b075 --- /dev/null +++ b/lint/testdata/src/broken_typeerror/pkg.go @@ -0,0 +1,6 @@ +package pkg + +func fn() { + x := 1 + _ = x + "" +} From dd204386772794f89bf44f7e2a274017d1e78d87 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 01:14:21 +0200 Subject: [PATCH 213/254] stylecheck: document ST1017 --- stylecheck/doc.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/stylecheck/doc.go b/stylecheck/doc.go index efc65092e..10a28e257 100644 --- a/stylecheck/doc.go +++ b/stylecheck/doc.go @@ -159,6 +159,12 @@ Available since var docST1017 = `Don't use Yoda conditions +Yoda conditions are conditions of the kind 'if 42 == x', where the +literal is on the left side of the comparison. These are a common +idiom in languages in which assignment is an expression, to avoid bugs +of the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of +bug, we prefer the more idiomatic 'if x == 42'. + Available since Unreleased ` From 133a2c477a70326d1ff714d34bb4b4fc635f4938 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 14:15:47 +0200 Subject: [PATCH 214/254] all: emit range information Use the new go/analysis.Diagnostic.End field to provide range information where possible. Currently we can only do this for AST-based checks, as SSA form doesn't directly provide range information. In the future we may do the effort of mapping from SSA back to AST nodes. --- go.mod | 2 +- go.sum | 3 + lint/lint.go | 6 +- lint/lintdsl/lintdsl.go | 14 +++++ lint/lintutil/format/format.go | 6 ++ lint/runner.go | 1 + simple/lint.go | 80 ++++++++++++------------ staticcheck/lint.go | 108 ++++++++++++++++----------------- stylecheck/lint.go | 24 ++++---- 9 files changed, 136 insertions(+), 108 deletions(-) diff --git a/go.mod b/go.mod index 6cf738645..3bbe16aa8 100644 --- a/go.mod +++ b/go.mod @@ -8,5 +8,5 @@ require ( github.com/kisielk/gotool v1.0.0 github.com/rogpeppe/go-internal v1.3.0 golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e - golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd + golang.org/x/tools v0.0.0-20190530171427-2b03ca6e44eb ) diff --git a/go.sum b/go.sum index 85d349605..e56784c91 100644 --- a/go.sum +++ b/go.sum @@ -26,5 +26,8 @@ golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd h1:7E3PabyysDSEjnaANKBgums/hyvMI/HoHQ50qZEzTrg= golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190530171427-2b03ca6e44eb h1:mnQlcVx8Qq8L70HV0DxUGuiuAtiEHTwF1gYJE/EL9nU= +golang.org/x/tools v0.0.0-20190530171427-2b03ca6e44eb/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190601110225-0abef6e9ecb8 h1:KFgOV120pDm8h0MBnt26wwMmwdhSXE+K+G9jg1ZjxbE= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= diff --git a/lint/lint.go b/lint/lint.go index 4306e70b9..535ba43e4 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -81,6 +81,7 @@ const ( // Problem represents a problem in some source code. type Problem struct { Pos token.Position + End token.Position Message string Check string Severity Severity @@ -357,10 +358,13 @@ type Positioner interface { } func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { + if p == token.NoPos { + return token.Position{} + } + // Only use the adjusted position if it points to another Go file. // This means we'll point to the original file for cgo files, but // we won't point to a YACC grammar file. - pos := fset.PositionFor(p, false) adjPos := fset.PositionFor(p, true) diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index 4eb2b40fb..df802cb41 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -376,3 +376,17 @@ func ReportfFG(pass *analysis.Pass, pos token.Pos, f string, args ...interface{} } pass.Reportf(pos, f, args...) } + +func ReportNodef(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) { + msg := fmt.Sprintf(format, args...) + pass.Report(analysis.Diagnostic{Pos: node.Pos(), End: node.End(), Message: msg}) +} + +func ReportNodefFG(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) { + file := lint.DisplayPosition(pass.Fset, node.Pos()).Filename + m := pass.ResultOf[facts.Generated].(map[string]bool) + if m[file] { + return + } + ReportNodef(pass, node, format, args...) +} diff --git a/lint/lintutil/format/format.go b/lint/lintutil/format/format.go index e4c3fd315..9385431f8 100644 --- a/lint/lintutil/format/format.go +++ b/lint/lintutil/format/format.go @@ -80,6 +80,7 @@ func (o JSON) Format(p lint.Problem) { Code string `json:"code"` Severity string `json:"severity,omitempty"` Location location `json:"location"` + End location `json:"end"` Message string `json:"message"` }{ Code: p.Check, @@ -89,6 +90,11 @@ func (o JSON) Format(p lint.Problem) { Line: p.Pos.Line, Column: p.Pos.Column, }, + End: location{ + File: p.End.Filename, + Line: p.End.Line, + Column: p.End.Column, + }, Message: p.Message, } _ = json.NewEncoder(o.W).Encode(jp) diff --git a/lint/runner.go b/lint/runner.go index d0211c210..e55fb5437 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -201,6 +201,7 @@ func (ac *analysisAction) exportPackageFact(fact analysis.Fact) { func (ac *analysisAction) report(pass *analysis.Pass, d analysis.Diagnostic) { p := Problem{ Pos: DisplayPosition(pass.Fset, d.Pos), + End: DisplayPosition(pass.Fset, d.End), Message: d.Message, Check: pass.Analyzer.Name, } diff --git a/simple/lint.go b/simple/lint.go index b1fcf988c..45e824608 100644 --- a/simple/lint.go +++ b/simple/lint.go @@ -46,7 +46,7 @@ func LintSingleCaseSelect(pass *analysis.Pass) (interface{}, error) { return } seen[v.Body.List[0]] = struct{}{} - ReportfFG(pass, node.Pos(), "should use for range instead of for { select {} }") + ReportNodefFG(pass, node, "should use for range instead of for { select {} }") case *ast.SelectStmt: if _, ok := seen[v]; ok { return @@ -54,7 +54,7 @@ func LintSingleCaseSelect(pass *analysis.Pass) (interface{}, error) { if !isSingleSelect(v) { return } - ReportfFG(pass, node.Pos(), "should use a simple channel send/receive instead of select with a single case") + ReportNodefFG(pass, node, "should use a simple channel send/receive instead of select with a single case") } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.SelectStmt)(nil)}, fn) @@ -131,7 +131,7 @@ func LintLoopCopy(pass *analysis.Pass) (interface{}, error) { } else { return } - ReportfFG(pass, loop.Pos(), "should use copy() instead of a loop") + ReportNodefFG(pass, loop, "should use copy() instead of a loop") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) return nil, nil @@ -174,7 +174,7 @@ func LintIfBoolCmp(pass *analysis.Pass) (interface{}, error) { if IsInTest(pass, node) { return } - ReportfFG(pass, expr.Pos(), "should omit comparison to bool constant, can be simplified to %s", r) + ReportNodefFG(pass, expr, "should omit comparison to bool constant, can be simplified to %s", r) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) return nil, nil @@ -198,9 +198,9 @@ func LintBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { typ := pass.TypesInfo.TypeOf(call.Fun) if typ == types.Universe.Lookup("string").Type() && IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).Bytes") { - ReportfFG(pass, call.Pos(), "should use %v.String() instead of %v", Render(pass, sel.X), Render(pass, call)) + ReportNodefFG(pass, call, "should use %v.String() instead of %v", Render(pass, sel.X), Render(pass, call)) } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && IsCallToAST(pass, call.Args[0], "(*bytes.Buffer).String") { - ReportfFG(pass, call.Pos(), "should use %v.Bytes() instead of %v", Render(pass, sel.X), Render(pass, call)) + ReportNodefFG(pass, call, "should use %v.Bytes() instead of %v", Render(pass, sel.X), Render(pass, call)) } } @@ -268,7 +268,7 @@ func LintStringsContains(pass *analysis.Pass) (interface{}, error) { if !b { prefix = "!" } - ReportfFG(pass, node.Pos(), "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(pass, call.Args)) + ReportNodefFG(pass, node, "should use %s%s.%s(%s) instead", prefix, pkgIdent.Name, newFunc, RenderArgs(pass, call.Args)) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) return nil, nil @@ -296,7 +296,7 @@ func LintBytesCompare(pass *analysis.Pass) (interface{}, error) { if expr.Op == token.NEQ { prefix = "!" } - ReportfFG(pass, node.Pos(), "should use %sbytes.Equal(%s) instead", prefix, args) + ReportNodefFG(pass, node, "should use %sbytes.Equal(%s) instead", prefix, args) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) return nil, nil @@ -311,7 +311,7 @@ func LintForTrue(pass *analysis.Pass) (interface{}, error) { if !IsBoolConst(pass, loop.Cond) || !BoolConst(pass, loop.Cond) { return } - ReportfFG(pass, loop.Pos(), "should use for {} instead of for true {}") + ReportNodefFG(pass, loop, "should use for {} instead of for true {}") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) return nil, nil @@ -369,7 +369,7 @@ func LintRegexpRaw(pass *analysis.Pass) (interface{}, error) { } } - ReportfFG(pass, call.Pos(), "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) + ReportNodefFG(pass, call, "should use raw string (`...`) with regexp.%s to avoid having to escape twice", sel.Sel.Name) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) return nil, nil @@ -431,7 +431,7 @@ func LintIfReturn(pass *analysis.Pass) (interface{}, error) { if !IsBoolConst(pass, ret2.Results[0]) { return } - ReportfFG(pass, n1.Pos(), "should use 'return ' instead of 'if { return }; return '") + ReportNodefFG(pass, n1, "should use 'return ' instead of 'if { return }; return '") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) return nil, nil @@ -558,7 +558,7 @@ func LintRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { default: return } - ReportfFG(pass, expr.Pos(), "should omit nil check; len() for %s is defined as zero", nilType) + ReportNodefFG(pass, expr, "should omit nil check; len() for %s is defined as zero", nilType) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) return nil, nil @@ -589,7 +589,7 @@ func LintSlicing(pass *analysis.Pass) (interface{}, error) { if !ok || arg.Obj != s.Obj { return } - ReportfFG(pass, n.Pos(), "should omit second index in slice, s[a:len(s)] is identical to s[a:]") + ReportNodefFG(pass, n, "should omit second index in slice, s[a:len(s)] is identical to s[a:]") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.SliceExpr)(nil)}, fn) return nil, nil @@ -674,7 +674,7 @@ func LintLoopAppend(pass *analysis.Pass) (interface{}, error) { if pass.TypesInfo.ObjectOf(val) != pass.TypesInfo.ObjectOf(el) { return } - ReportfFG(pass, loop.Pos(), "should replace loop with %s = append(%s, %s...)", + ReportNodefFG(pass, loop, "should replace loop with %s = append(%s, %s...)", Render(pass, stmt.Lhs[0]), Render(pass, call.Args[Arg("append.slice")]), Render(pass, loop.X)) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.RangeStmt)(nil)}, fn) @@ -694,7 +694,7 @@ func LintTimeSince(pass *analysis.Pass) (interface{}, error) { if sel.Sel.Name != "Sub" { return } - ReportfFG(pass, call.Pos(), "should use time.Since instead of time.Now().Sub") + ReportNodefFG(pass, call, "should use time.Since instead of time.Now().Sub") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) return nil, nil @@ -712,7 +712,7 @@ func LintTimeUntil(pass *analysis.Pass) (interface{}, error) { if !IsCallToAST(pass, call.Args[Arg("(time.Time).Sub.u")], "time.Now") { return } - ReportfFG(pass, call.Pos(), "should use time.Until instead of t.Sub(time.Now())") + ReportNodefFG(pass, call, "should use time.Until instead of t.Sub(time.Now())") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) return nil, nil @@ -743,7 +743,7 @@ func LintUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { } cp := *assign cp.Lhs = cp.Lhs[0:1] - ReportfFG(pass, assign.Pos(), "should write %s instead of %s", Render(pass, &cp), Render(pass, assign)) + ReportNodefFG(pass, assign, "should write %s instead of %s", Render(pass, &cp), Render(pass, assign)) } fn2 := func(node ast.Node) { @@ -763,7 +763,7 @@ func LintUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { if expr.Op != token.ARROW { continue } - ReportfFG(pass, lh.Pos(), "'_ = <-ch' can be simplified to '<-ch'") + ReportNodefFG(pass, lh, "'_ = <-ch' can be simplified to '<-ch'") } } @@ -772,11 +772,11 @@ func LintUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { // for x, _ if !IsBlank(rs.Key) && IsBlank(rs.Value) { - ReportfFG(pass, rs.Value.Pos(), "should omit value from range; this loop is equivalent to `for %s %s range ...`", Render(pass, rs.Key), rs.Tok) + ReportNodefFG(pass, rs.Value, "should omit value from range; this loop is equivalent to `for %s %s range ...`", Render(pass, rs.Key), rs.Tok) } // for _, _ || for _ if IsBlank(rs.Key) && (IsBlank(rs.Value) || rs.Value == nil) { - ReportfFG(pass, rs.Key.Pos(), "should omit values from range; this loop is equivalent to `for range ...`") + ReportNodefFG(pass, rs.Key, "should omit values from range; this loop is equivalent to `for range ...`") } } @@ -904,7 +904,7 @@ func LintSimplerStructConversion(pass *analysis.Pass) (interface{}, error) { return } } - ReportfFG(pass, node.Pos(), "should convert %s (type %s) to %s instead of using struct literal", + ReportNodefFG(pass, node, "should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil), (*ast.CompositeLit)(nil)}, fn) @@ -1008,7 +1008,7 @@ func LintTrim(pass *analysis.Pass) (interface{}, error) { IsCallToAST(pass, condCall, "bytes.HasPrefix") && IsCallToAST(pass, rhs, "bytes.TrimPrefix") || IsCallToAST(pass, condCall, "bytes.HasSuffix") && IsCallToAST(pass, rhs, "bytes.TrimSuffix") || IsCallToAST(pass, condCall, "bytes.Contains") && IsCallToAST(pass, rhs, "bytes.Replace") { - ReportfFG(pass, ifstmt.Pos(), "should replace this if statement with an unconditional %s", CallNameAST(pass, rhs)) + ReportNodefFG(pass, ifstmt, "should replace this if statement with an unconditional %s", CallNameAST(pass, rhs)) } return case *ast.SliceExpr: @@ -1106,7 +1106,7 @@ func LintTrim(pass *analysis.Pass) (interface{}, error) { case "HasSuffix": replacement = "TrimSuffix" } - ReportfFG(pass, ifstmt.Pos(), "should replace this if statement with an unconditional %s.%s", pkg, replacement) + ReportNodefFG(pass, ifstmt, "should replace this if statement with an unconditional %s.%s", pkg, replacement) } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) @@ -1210,7 +1210,7 @@ func LintLoopSlide(pass *analysis.Pass) (interface{}, error) { return } - ReportfFG(pass, loop.Pos(), "should use copy(%s[:%s], %s[%s:]) instead", Render(pass, bs1), Render(pass, biny), Render(pass, bs1), Render(pass, add1)) + ReportNodefFG(pass, loop, "should use copy(%s[:%s], %s[%s:]) instead", Render(pass, bs1), Render(pass, biny), Render(pass, bs1), Render(pass, add1)) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) return nil, nil @@ -1230,12 +1230,12 @@ func LintMakeLenCap(pass *analysis.Pass) (interface{}, error) { break } if IsZero(call.Args[Arg("make.size[0]")]) { - ReportfFG(pass, call.Args[Arg("make.size[0]")].Pos(), "should use make(%s) instead", Render(pass, call.Args[Arg("make.t")])) + ReportNodefFG(pass, call.Args[Arg("make.size[0]")], "should use make(%s) instead", Render(pass, call.Args[Arg("make.t")])) } case 3: // make(T, len, cap) if Render(pass, call.Args[Arg("make.size[0]")]) == Render(pass, call.Args[Arg("make.size[1]")]) { - ReportfFG(pass, call.Args[Arg("make.size[0]")].Pos(), + ReportNodefFG(pass, call.Args[Arg("make.size[0]")], "should use make(%s, %s) instead", Render(pass, call.Args[Arg("make.t")]), Render(pass, call.Args[Arg("make.size[0]")])) } @@ -1293,7 +1293,7 @@ func LintAssertNotNil(pass *analysis.Pass) (interface{}, error) { !(isNilCheck(assertIdent, binop.Y) && isOKCheck(assignIdent, binop.X)) { return } - ReportfFG(pass, ifstmt.Pos(), "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent)) + ReportNodefFG(pass, ifstmt, "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent)) } fn2 := func(node ast.Node) { // Check that outer ifstmt is an 'if x != nil {}' @@ -1349,7 +1349,7 @@ func LintAssertNotNil(pass *analysis.Pass) (interface{}, error) { if !isOKCheck(assignIdent, ifstmt.Cond) { return } - ReportfFG(pass, ifstmt.Pos(), "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent)) + ReportNodefFG(pass, ifstmt, "when %s is true, %s can't be nil", Render(pass, assignIdent), Render(pass, assertIdent)) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn1) pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn2) @@ -1421,7 +1421,7 @@ func LintDeclareAssign(pass *analysis.Pass) (interface{}, error) { continue } - ReportfFG(pass, decl.Pos(), "should merge variable declaration with assignment on next line") + ReportNodefFG(pass, decl, "should merge variable declaration with assignment on next line") } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) @@ -1438,7 +1438,7 @@ func LintRedundantBreak(pass *analysis.Pass) (interface{}, error) { if !ok || branch.Tok != token.BREAK || branch.Label != nil { return } - ReportfFG(pass, branch.Pos(), "redundant break statement") + ReportNodefFG(pass, branch, "redundant break statement") } fn2 := func(node ast.Node) { var ret *ast.FieldList @@ -1465,7 +1465,7 @@ func LintRedundantBreak(pass *analysis.Pass) (interface{}, error) { } // we don't need to check rst.Results as we already // checked x.Type.Results to be nil. - ReportfFG(pass, rst.Pos(), "redundant return statement") + ReportNodefFG(pass, rst, "redundant return statement") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CaseClause)(nil)}, fn1) pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)}, fn2) @@ -1513,15 +1513,15 @@ func LintRedundantSprintf(pass *analysis.Pass) (interface{}, error) { ssapkg := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).Pkg if isStringer(typ, &ssapkg.Prog.MethodSets) { - pass.Reportf(call.Pos(), "should use String() instead of fmt.Sprintf") + ReportNodef(pass, call, "should use String() instead of fmt.Sprintf") return } if typ.Underlying() == types.Universe.Lookup("string").Type() { if typ == types.Universe.Lookup("string").Type() { - ReportfFG(pass, call.Pos(), "the argument is already a string, there's no need to use fmt.Sprintf") + ReportNodefFG(pass, call, "the argument is already a string, there's no need to use fmt.Sprintf") } else { - ReportfFG(pass, call.Pos(), "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") + ReportNodefFG(pass, call, "the argument's underlying type is a string, should use a simple conversion instead of fmt.Sprintf") } } } @@ -1538,7 +1538,7 @@ func LintErrorsNewSprintf(pass *analysis.Pass) (interface{}, error) { if !IsCallToAST(pass, call.Args[Arg("errors.New.text")], "fmt.Sprintf") { return } - ReportfFG(pass, node.Pos(), "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") + ReportNodefFG(pass, node, "should use fmt.Errorf(...) instead of errors.New(fmt.Sprintf(...))") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) return nil, nil @@ -1577,7 +1577,7 @@ func LintNilCheckAroundRange(pass *analysis.Pass) (interface{}, error) { } switch pass.TypesInfo.TypeOf(rangeXIdent).(type) { case *types.Slice, *types.Map: - ReportfFG(pass, node.Pos(), "unnecessary nil check around range") + ReportNodefFG(pass, node, "unnecessary nil check around range") } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) @@ -1607,7 +1607,7 @@ func isPermissibleSort(pass *analysis.Pass, node ast.Node) bool { func LintSortHelpers(pass *analysis.Pass) (interface{}, error) { type Error struct { - node lint.Positioner + node ast.Node msg string } var allErrors []Error @@ -1670,7 +1670,7 @@ func LintSortHelpers(pass *analysis.Pass) (interface{}, error) { continue } prev = err.node.Pos() - ReportfFG(pass, err.node.Pos(), "%s", err.msg) + ReportNodefFG(pass, err.node, "%s", err.msg) } return nil, nil } @@ -1732,7 +1732,7 @@ func LintGuardedDelete(pass *analysis.Pass) (interface{}, error) { if Render(pass, call.Args[0]) != Render(pass, m) || Render(pass, call.Args[1]) != Render(pass, key) { return } - ReportfFG(pass, stmt.Pos(), "unnecessary guard around call to delete") + ReportNodefFG(pass, stmt, "unnecessary guard around call to delete") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.IfStmt)(nil)}, fn) return nil, nil @@ -1801,7 +1801,7 @@ func LintSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) { pos := lint.DisplayPosition(pass.Fset, offender.Pos()) at += "\n\t" + pos.String() } - ReportfFG(pass, expr.Pos(), "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(pass, ident), Render(pass, ident), at) + ReportNodefFG(pass, expr, "assigning the result of this type assertion to a variable (switch %s := %s.(type)) could eliminate the following type assertions:%s", Render(pass, ident), Render(pass, ident), at) } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.TypeSwitchStmt)(nil)}, fn) diff --git a/staticcheck/lint.go b/staticcheck/lint.go index 83834ef7e..fee2d7860 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -765,10 +765,10 @@ func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { } if isName(pass, arg, "os.Kill") || isName(pass, arg, "syscall.SIGKILL") { - pass.Reportf(arg.Pos(), "%s cannot be trapped (did you mean syscall.SIGTERM?)", Render(pass, arg)) + ReportNodef(pass, arg, "%s cannot be trapped (did you mean syscall.SIGTERM?)", Render(pass, arg)) } if isName(pass, arg, "syscall.SIGSTOP") { - pass.Reportf(arg.Pos(), "%s signal cannot be trapped", Render(pass, arg)) + ReportNodef(pass, arg, "%s signal cannot be trapped", Render(pass, arg)) } } } @@ -810,7 +810,7 @@ func CheckTemplate(pass *analysis.Pass) (interface{}, error) { if err != nil { // TODO(dominikh): whitelist other parse errors, if any if strings.Contains(err.Error(), "unexpected") { - pass.Reportf(call.Args[Arg("(*text/template.Template).Parse.text")].Pos(), "%s", err) + ReportNodef(pass, call.Args[Arg("(*text/template.Template).Parse.text")], "%s", err) } } } @@ -842,7 +842,7 @@ func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) { if n != 1 { recommendation = fmt.Sprintf("time.Sleep(%d * time.Nanosecond)", n) } - pass.Reportf(call.Args[Arg("time.Sleep.d")].Pos(), + ReportNodef(pass, call.Args[Arg("time.Sleep.d")], "sleeping for %d nanoseconds is probably a bug. Be explicit if it isn't: %s", n, recommendation) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) @@ -864,7 +864,7 @@ func CheckWaitgroupAdd(pass *analysis.Pass) (interface{}, error) { return } if IsCallToAST(pass, stmt.X, "(*sync.WaitGroup).Add") { - pass.Reportf(stmt.Pos(), "should call %s before starting the goroutine to avoid a race", + ReportNodef(pass, stmt, "should call %s before starting the goroutine to avoid a race", Render(pass, stmt)) } } @@ -908,9 +908,9 @@ func CheckInfiniteEmptyLoop(pass *analysis.Pass) (interface{}, error) { } } } - pass.Reportf(loop.Pos(), "loop condition never changes or has a race condition") + ReportNodef(pass, loop, "loop condition never changes or has a race condition") } - pass.Reportf(loop.Pos(), "this loop will spin, using 100%% CPU") + ReportNodef(pass, loop, "this loop will spin, using 100%% CPU") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) return nil, nil @@ -951,7 +951,7 @@ func CheckDeferInInfiniteLoop(pass *analysis.Pass) (interface{}, error) { return } for _, stmt := range defers { - pass.Reportf(stmt.Pos(), "defers in this infinite loop will never run") + ReportNodef(pass, stmt, "defers in this infinite loop will never run") } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.ForStmt)(nil)}, fn) @@ -969,7 +969,7 @@ func CheckDubiousDeferInChannelRangeLoop(pass *analysis.Pass) (interface{}, erro fn2 := func(node ast.Node) bool { switch stmt := node.(type) { case *ast.DeferStmt: - pass.Reportf(stmt.Pos(), "defers in this range loop won't run unless the channel gets closed") + ReportNodef(pass, stmt, "defers in this range loop won't run unless the channel gets closed") case *ast.FuncLit: // Don't look into function bodies return false @@ -993,7 +993,7 @@ func CheckTestMainExit(pass *analysis.Pass) (interface{}, error) { if !push { if fnmain != nil && node == fnmain { if !callsExit && callsRun { - pass.Reportf(fnmain.Pos(), "TestMain should call os.Exit to set exit code") + ReportNodef(pass, fnmain, "TestMain should call os.Exit to set exit code") } fnmain = nil callsExit = false @@ -1071,7 +1071,7 @@ func CheckExec(pass *analysis.Pass) (interface{}, error) { if !strings.Contains(val, " ") || strings.Contains(val, `\`) || strings.Contains(val, "/") { return } - pass.Reportf(call.Args[Arg("os/exec.Command.name")].Pos(), + ReportNodef(pass, call.Args[Arg("os/exec.Command.name")], "first argument to exec.Command looks like a shell command, but a program name or path are expected") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) @@ -1090,7 +1090,7 @@ func CheckLoopEmptyDefault(pass *analysis.Pass) (interface{}, error) { } for _, c := range sel.Body.List { if comm, ok := c.(*ast.CommClause); ok && comm.Comm == nil && len(comm.Body) == 0 { - pass.Reportf(comm.Pos(), "should not have an empty default case in a for+select loop. The loop will spin.") + ReportNodef(pass, comm, "should not have an empty default case in a for+select loop. The loop will spin.") } } } @@ -1135,7 +1135,7 @@ func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) { // 0 == 0 are slim. return } - pass.Reportf(op.Pos(), "identical expressions on the left and right side of the '%s' operator", op.Op) + ReportNodef(pass, op, "identical expressions on the left and right side of the '%s' operator", op.Op) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) return nil, nil @@ -1191,7 +1191,7 @@ func CheckScopedBreak(pass *analysis.Pass) (interface{}, error) { if !ok || branch.Tok != token.BREAK || branch.Label != nil { continue } - pass.Reportf(branch.Pos(), "ineffective break statement. Did you mean to break out of the outer loop?") + ReportNodef(pass, branch, "ineffective break statement. Did you mean to break out of the outer loop?") } } } @@ -1219,7 +1219,7 @@ func CheckUnsafePrintf(pass *analysis.Pass) (interface{}, error) { default: return } - pass.Reportf(call.Args[arg].Pos(), + ReportNodef(pass, call.Args[arg], "printf-style function with dynamic format string and no further arguments should use print-style function instead") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) @@ -1288,7 +1288,7 @@ func CheckEarlyDefer(pass *analysis.Pass) (interface{}, error) { if sel.Sel.Name != "Close" { continue } - pass.Reportf(def.Pos(), "should check returned error before deferring %s", Render(pass, def.Call)) + ReportNodef(pass, def, "should check returned error before deferring %s", Render(pass, def.Call)) } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BlockStmt)(nil)}, fn) @@ -1356,7 +1356,7 @@ func CheckEmptyCriticalSection(pass *analysis.Pass) (interface{}, error) { } if (method1 == "Lock" && method2 == "Unlock") || (method1 == "RLock" && method2 == "RUnlock") { - pass.Reportf(block.List[i+1].Pos(), "empty critical section") + ReportNodef(pass, block.List[i+1], "empty critical section") } } } @@ -1374,14 +1374,14 @@ func CheckIneffectiveCopy(pass *analysis.Pass) (interface{}, error) { if star, ok := unary.X.(*ast.StarExpr); ok && unary.Op == token.AND { ident, ok := star.X.(*ast.Ident) if !ok || !cgoIdent.MatchString(ident.Name) { - pass.Reportf(unary.Pos(), "&*x will be simplified to x. It will not copy x.") + ReportNodef(pass, unary, "&*x will be simplified to x. It will not copy x.") } } } if star, ok := node.(*ast.StarExpr); ok { if unary, ok := star.X.(*ast.UnaryExpr); ok && unary.Op == token.AND { - pass.Reportf(star.Pos(), "*&x will be simplified to x. It will not copy x.") + ReportNodef(pass, star, "*&x will be simplified to x. It will not copy x.") } } } @@ -1456,7 +1456,7 @@ func CheckCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) { if s == http.CanonicalHeaderKey(s) { return true } - pass.Reportf(op.Pos(), "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) + ReportNodef(pass, op, "keys in http.Header are canonicalized, %q is not canonical; fix the constant or use http.CanonicalHeaderKey", s) return true } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes([]ast.Node{(*ast.AssignStmt)(nil), (*ast.IndexExpr)(nil)}, fn) @@ -1479,7 +1479,7 @@ func CheckBenchmarkN(pass *analysis.Pass) (interface{}, error) { if !IsOfType(pass, sel.X, "*testing.B") { return } - pass.Reportf(assign.Pos(), "should not assign to %s", Render(pass, sel)) + ReportNodef(pass, assign, "should not assign to %s", Render(pass, sel)) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) return nil, nil @@ -1526,7 +1526,7 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) { if ident, ok := lhs.(*ast.Ident); !ok || ok && ident.Name == "_" { continue } - pass.Reportf(lhs.Pos(), "this value of %s is never used", lhs) + ReportNodef(pass, lhs, "this value of %s is never used", lhs) } } return true @@ -1547,7 +1547,7 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) { return true } if len(FilterDebug(*refs)) == 0 { - pass.Reportf(lhs.Pos(), "this value of %s is never used", lhs) + ReportNodef(pass, lhs, "this value of %s is never used", lhs) } } return true @@ -1674,30 +1674,30 @@ func CheckExtremeComparison(pass *analysis.Pass) (interface{}, error) { if (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.Y, max) || (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.X, max) { - pass.Reportf(expr.Pos(), "no value of type %s is greater than %s", basic, max) + ReportNodef(pass, expr, "no value of type %s is greater than %s", basic, max) } if expr.Op == token.LEQ && isobj(expr.Y, max) || expr.Op == token.GEQ && isobj(expr.X, max) { - pass.Reportf(expr.Pos(), "every value of type %s is <= %s", basic, max) + ReportNodef(pass, expr, "every value of type %s is <= %s", basic, max) } if (basic.Info() & types.IsUnsigned) != 0 { if (expr.Op == token.LSS || expr.Op == token.LEQ) && IsIntLiteral(expr.Y, "0") || (expr.Op == token.GTR || expr.Op == token.GEQ) && IsIntLiteral(expr.X, "0") { - pass.Reportf(expr.Pos(), "no value of type %s is less than 0", basic) + ReportNodef(pass, expr, "no value of type %s is less than 0", basic) } if expr.Op == token.GEQ && IsIntLiteral(expr.Y, "0") || expr.Op == token.LEQ && IsIntLiteral(expr.X, "0") { - pass.Reportf(expr.Pos(), "every value of type %s is >= 0", basic) + ReportNodef(pass, expr, "every value of type %s is >= 0", basic) } } else { if (expr.Op == token.LSS || expr.Op == token.LEQ) && isobj(expr.Y, min) || (expr.Op == token.GTR || expr.Op == token.GEQ) && isobj(expr.X, min) { - pass.Reportf(expr.Pos(), "no value of type %s is less than %s", basic, min) + ReportNodef(pass, expr, "no value of type %s is less than %s", basic, min) } if expr.Op == token.GEQ && isobj(expr.Y, min) || expr.Op == token.LEQ && isobj(expr.X, min) { - pass.Reportf(expr.Pos(), "every value of type %s is >= %s", basic, min) + ReportNodef(pass, expr, "every value of type %s is >= %s", basic, min) } } @@ -1804,7 +1804,7 @@ func CheckLoopCondition(pass *analysis.Pass) (interface{}, error) { case *ssa.UnOp: return true } - pass.Reportf(cond.Pos(), "variable in loop condition never changes") + ReportNodef(pass, cond, "variable in loop condition never changes") return true } @@ -1872,7 +1872,7 @@ func CheckArgOverwritten(pass *analysis.Pass) (interface{}, error) { return true }) if assigned { - pass.Reportf(arg.Pos(), "argument %s is overwritten before first use", arg) + ReportNodef(pass, arg, "argument %s is overwritten before first use", arg) } } } @@ -1983,7 +1983,7 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { return true }) if unconditionalExit != nil { - pass.Reportf(unconditionalExit.Pos(), "the surrounding loop is unconditionally terminated") + ReportNodef(pass, unconditionalExit, "the surrounding loop is unconditionally terminated") } return true }) @@ -2011,7 +2011,7 @@ func CheckNilContext(pass *analysis.Pass) (interface{}, error) { if !IsType(sig.Params().At(0).Type(), "context.Context") { return } - pass.Reportf(call.Args[0].Pos(), + ReportNodef(pass, call.Args[0], "do not pass a nil Context, even if a function permits it; pass context.TODO if you are unsure about which Context to use") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) @@ -2047,7 +2047,7 @@ func CheckSeeker(pass *analysis.Pass) (interface{}, error) { if pkg.Name != "io" { return } - pass.Reportf(call.Pos(), "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") + ReportNodef(pass, call, "the first argument of io.Seeker is the offset, but an io.Seek* constant is being used instead") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.CallExpr)(nil)}, fn) return nil, nil @@ -2247,7 +2247,7 @@ func CheckSliceOutOfBounds(pass *analysis.Pass) (interface{}, error) { continue } if idxr.Lower.Cmp(sr.Length.Upper) >= 0 { - pass.Reportf(ia.Pos(), "index out of bounds") + ReportNodef(pass, ia, "index out of bounds") } } } @@ -2412,7 +2412,7 @@ func CheckDoubleNegation(pass *analysis.Pass) (interface{}, error) { if unary1.Op != token.NOT || unary2.Op != token.NOT { return } - pass.Reportf(unary1.Pos(), "negating a boolean twice has no effect; is this a typo?") + ReportNodef(pass, unary1, "negating a boolean twice has no effect; is this a typo?") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.UnaryExpr)(nil)}, fn) return nil, nil @@ -2470,7 +2470,7 @@ func CheckRepeatedIfElse(pass *analysis.Pass) (interface{}, error) { s := Render(pass, cond) counts[s]++ if counts[s] == 2 { - pass.Reportf(cond.Pos(), "this condition occurs multiple times in this if/else if chain") + ReportNodef(pass, cond, "this condition occurs multiple times in this if/else if chain") } } } @@ -2547,7 +2547,7 @@ func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) { if err != nil { continue } - pass.Reportf(call.Args[i].Pos(), "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) + ReportNodef(pass, call.Args[i], "file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value) } } } @@ -2657,7 +2657,7 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { return true } } - pass.Reportf(sel.Pos(), "%s is deprecated: %s", Render(pass, sel), depr.Msg) + ReportNodef(pass, sel, "%s is deprecated: %s", Render(pass, sel), depr.Msg) return true } return true @@ -2673,7 +2673,7 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { path := p[1 : len(p)-1] imp := imps[path] if depr, ok := deprs.Packages[imp]; ok { - pass.Reportf(spec.Pos(), "Package %s is deprecated: %s", path, depr.Msg) + ReportNodef(pass, spec, "Package %s is deprecated: %s", path, depr.Msg) } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Nodes(nil, fn) @@ -3043,7 +3043,7 @@ func CheckMissingEnumTypesInDeclaration(pass *analysis.Pass) (interface{}, error continue groupLoop } } - pass.Reportf(group[0].Pos(), "only the first constant in this group has an explicit type") + ReportNodef(pass, group[0], "only the first constant in this group has an explicit type") } } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.GenDecl)(nil)}, fn) @@ -3146,7 +3146,7 @@ func CheckToLowerToUpperComparison(pass *analysis.Pass) (interface{}, error) { bang = "!" } - pass.Reportf(binExpr.Pos(), "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) + ReportNodef(pass, binExpr, "should use %sstrings.EqualFold(a, b) instead of %s(a) %s %s(b)", bang, call, binExpr.Op, call) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) @@ -3211,7 +3211,7 @@ func CheckUnreachableTypeCases(pass *analysis.Pass) (interface{}, error) { for i, cc := range ccs[:len(ccs)-1] { for _, next := range ccs[i+1:] { if T, V, yes := subsumesAny(cc.types, next.types); yes { - pass.Reportf(next.cc.Pos(), "unreachable case clause: %s will always match before %s", T.String(), V.String()) + ReportNodef(pass, next.cc, "unreachable case clause: %s will always match before %s", T.String(), V.String()) } } } @@ -3244,12 +3244,12 @@ func CheckStructTags(pass *analysis.Pass) (interface{}, error) { } tags, err := parseStructTag(field.Tag.Value[1 : len(field.Tag.Value)-1]) if err != nil { - pass.Reportf(field.Tag.Pos(), "unparseable struct tag: %s", err) + ReportNodef(pass, field.Tag, "unparseable struct tag: %s", err) continue } for k, v := range tags { if len(v) > 1 { - pass.Reportf(field.Tag.Pos(), "duplicate struct tag %q", k) + ReportNodef(pass, field.Tag, "duplicate struct tag %q", k) continue } @@ -3273,7 +3273,7 @@ func checkJSONTag(pass *analysis.Pass, field *ast.Field, tag string) { fields := strings.Split(tag, ",") for _, r := range fields[0] { if !unicode.IsLetter(r) && !unicode.IsDigit(r) && !strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", r) { - pass.Reportf(field.Tag.Pos(), "invalid JSON field name %q", fields[0]) + ReportNodef(pass, field.Tag, "invalid JSON field name %q", fields[0]) } } var co, cs, ci int @@ -3289,22 +3289,22 @@ func checkJSONTag(pass *analysis.Pass, field *ast.Field, tag string) { T := Dereference(pass.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() basic, ok := T.(*types.Basic) if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { - pass.Reportf(field.Tag.Pos(), "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") + ReportNodef(pass, field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") } case "inline": ci++ default: - pass.Reportf(field.Tag.Pos(), "unknown JSON option %q", s) + ReportNodef(pass, field.Tag, "unknown JSON option %q", s) } } if co > 1 { - pass.Reportf(field.Tag.Pos(), `duplicate JSON option "omitempty"`) + ReportNodef(pass, field.Tag, `duplicate JSON option "omitempty"`) } if cs > 1 { - pass.Reportf(field.Tag.Pos(), `duplicate JSON option "string"`) + ReportNodef(pass, field.Tag, `duplicate JSON option "string"`) } if ci > 1 { - pass.Reportf(field.Tag.Pos(), `duplicate JSON option "inline"`) + ReportNodef(pass, field.Tag, `duplicate JSON option "inline"`) } } @@ -3326,15 +3326,15 @@ func checkXMLTag(pass *analysis.Pass, field *ast.Field, tag string) { counts[s]++ case "": default: - pass.Reportf(field.Tag.Pos(), "unknown XML option %q", s) + ReportNodef(pass, field.Tag, "unknown XML option %q", s) } } for k, v := range counts { if v > 1 { - pass.Reportf(field.Tag.Pos(), "duplicate XML option %q", k) + ReportNodef(pass, field.Tag, "duplicate XML option %q", k) } } if len(exclusives) > 1 { - pass.Reportf(field.Tag.Pos(), "XML options %s are mutually exclusive", strings.Join(exclusives, " and ")) + ReportNodef(pass, field.Tag, "XML options %s are mutually exclusive", strings.Join(exclusives, " and ")) } } diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 35ddc15b9..8b0031b3a 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -43,7 +43,7 @@ func CheckPackageComment(pass *analysis.Pass) (interface{}, error) { hasDocs = true prefix := "Package " + f.Name.Name + " " if !strings.HasPrefix(strings.TrimSpace(f.Doc.Text()), prefix) { - pass.Reportf(f.Doc.Pos(), `package comment should be of the form "%s..."`, prefix) + ReportNodef(pass, f.Doc, `package comment should be of the form "%s..."`, prefix) } f.Doc.Text() } @@ -54,7 +54,7 @@ func CheckPackageComment(pass *analysis.Pass) (interface{}, error) { if IsInTest(pass, f) { continue } - pass.Reportf(f.Pos(), "at least one file in a package should have a package comment") + ReportNodef(pass, f, "at least one file in a package should have a package comment") } } return nil, nil @@ -73,7 +73,7 @@ func CheckDotImports(pass *analysis.Pass) (interface{}, error) { } if imp.Name != nil && imp.Name.Name == "." && !IsInTest(pass, f) { - ReportfFG(pass, imp.Pos(), "should not use dot imports") + ReportNodefFG(pass, imp, "should not use dot imports") } } } @@ -132,7 +132,7 @@ func CheckBlankImports(pass *analysis.Pass) (interface{}, error) { } if imp.Doc == nil && imp.Comment == nil && !skip[imp] { - pass.Reportf(imp.Pos(), "a blank import should be only in a main or test package, or have a comment justifying it") + ReportNodef(pass, imp, "a blank import should be only in a main or test package, or have a comment justifying it") } } } @@ -164,7 +164,7 @@ func CheckIncDec(pass *analysis.Pass) (interface{}, error) { suffix = "--" } - pass.Reportf(assign.Pos(), "should replace %s with %s%s", Render(pass, assign), Render(pass, assign.Lhs[0]), suffix) + ReportNodef(pass, assign, "should replace %s with %s%s", Render(pass, assign), Render(pass, assign.Lhs[0]), suffix) } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.AssignStmt)(nil)}, fn) return nil, nil @@ -404,7 +404,7 @@ func CheckTimeNames(pass *analysis.Pass) (interface{}, error) { for _, name := range names { for _, suffix := range suffixes { if strings.HasSuffix(name.Name, suffix) { - pass.Reportf(name.Pos(), "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix) + ReportNodef(pass, name, "var %s is of type %v; don't use unit-specific suffix %q", name.Name, T, suffix) break } } @@ -452,7 +452,7 @@ func CheckErrorVarNames(pass *analysis.Pass) (interface{}, error) { prefix = "Err" } if !strings.HasPrefix(name.Name, prefix) { - pass.Reportf(name.Pos(), "error var %s should have name of the form %sFoo", name.Name, prefix) + ReportNodef(pass, name, "error var %s should have name of the form %sFoo", name.Name, prefix) } } } @@ -566,7 +566,7 @@ func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) { if !ok { return true } - ReportfFG(pass, lit.Pos(), "should use constant http.%s instead of numeric literal %d", s, n) + ReportNodefFG(pass, lit, "should use constant http.%s instead of numeric literal %d", s, n) return true } // OPT(dh): replace with inspector @@ -582,7 +582,7 @@ func CheckDefaultCaseOrder(pass *analysis.Pass) (interface{}, error) { list := stmt.Body.List for i, c := range list { if c.(*ast.CaseClause).List == nil && i != 0 && i != len(list)-1 { - ReportfFG(pass, c.Pos(), "default case should be first or last in switch statement") + ReportNodefFG(pass, c, "default case should be first or last in switch statement") break } } @@ -604,7 +604,7 @@ func CheckYodaConditions(pass *analysis.Pass) (interface{}, error) { // Don't flag lit == lit conditions, just in case return } - ReportfFG(pass, cond.Pos(), "don't use Yoda conditions") + ReportNodefFG(pass, cond, "don't use Yoda conditions") } pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder([]ast.Node{(*ast.BinaryExpr)(nil)}, fn) return nil, nil @@ -618,9 +618,9 @@ func CheckInvisibleCharacters(pass *analysis.Pass) (interface{}, error) { } for _, r := range lit.Value { if unicode.Is(unicode.Cf, r) { - pass.Reportf(lit.Pos(), "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r) + ReportNodef(pass, lit, "string literal contains the Unicode format character %U, consider using the %q escape sequence", r, r) } else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' { - pass.Reportf(lit.Pos(), "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) + ReportNodef(pass, lit, "string literal contains the Unicode control character %U, consider using the %q escape sequence", r, r) } } } From 28496d4e0f3010c77881ccf405ae2804cc91851e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 14:41:30 +0200 Subject: [PATCH 215/254] doc: add release notes of 2017.2 and 2019.1 --- doc/2017.2.html | 182 ++++++++++++++++++++++++++++++++++++++ doc/2019.1.html | 227 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 409 insertions(+) create mode 100644 doc/2017.2.html create mode 100644 doc/2019.1.html diff --git a/doc/2017.2.html b/doc/2017.2.html new file mode 100644 index 000000000..a6b522ad7 --- /dev/null +++ b/doc/2017.2.html @@ -0,0 +1,182 @@ +

Introduction to staticcheck 2017.2

+ +

+ The 2017.2 release of the staticcheck suite of tools focuses on + reducing friction – fewer false positives, more tools for suppressing + unwanted output, and JSON output for easier integration with other + tools. +

+ +

New features

+ +

Linter directives for ignoring problems

+ +

+ In the past, the only ways to ignore reported problems was by using + the -ignore flag. This led to overreaching ignore rules + which weren't maintained regularly. Now, //lint:ignore and + //lint:file-ignore comments can be used to ignore + problems, either on specific lines or file-wide. A full description of + these directives, their syntax and their behavior can be found + in the documentation. +

+ +

+ A related change adds the -show-ignored command line + flag, which outputs problems that would otherwise be ignored by + directives. This is primarily of use with the JSON output format, + for custom front ends. +

+ +

Output formats

+ +

+ All staticcheck tools now support multiple output formats, selectable + with the -f flag. +

+ +

+ Currently, two formats are supported. The first format is + text, which is the default and uses the existing terminal + output format. The other is json, which emits JSON. The + output is a stream of objects, allowing for a future streaming output + mode. Each object uses the following example schema: + +

{
+  "checker": "staticcheck",
+  "code": "SA4006",
+  "location": {
+    "file": "/usr/lib/go/src/database/sql/sql_test.go",
+    "line": 2701,
+    "column": 5
+  },
+  "message": "this value of err is never used",
+  "ignored": false
+}
+

+ +

Control over the exit code of megacheck

+ +

+ Megacheck, the tool for running multiple checkers at once, now has + per checker flags for controlling the overall exit code. Previously, + megacheck would exit non-zero if any checker found a problem. Now it + is possible to configure for each checker whether it should cause a + non-zero exit, by using the -<checker>.exit-non-zero + flags. This flag defaults to false for gosimple and to true for + the other checkers. +

+ +

Changes to checks

+ +

Support for NoCopy in unused

+ +

+ The unused tool now understands NoCopy sentinel types. The + NoCopy type, which is canonically a struct with no fields and only a + single, empty Lock method, can be used to mark structs as not safe + for copying. By declaring a field of this type, go vet will complain + when it sees instances of the struct being copied. +

+ +

+ In the past, unused marked these fields as unused, now it ignores + them. +

+ +

Detection of deprecated identifiers

+ +

+ SA1019 now + correctly identifies deprecated methods, in addition to fields and + package-level objects. Additionally, staticcheck now keeps track of + when each identifier in the Go standard library was deprecated, so + that using -go <version> can correctly + ignore deprecation warnings that don't apply to the targeted Go + version. +

+ +

Other

+ +
    +
  • + {{ check "SA4017" }} no + longer reports pure functions that are stubs – functions that + immediately panic or return a constant. +
  • +
  • + {{ check "SA5007" }} no + longer flags infinite recursion when the function call is spawned + as a new goroutine. +
  • +
  • + {{ check "SA6002" }} + now recognizes that unsafe.Pointer is + a pointer type. +
  • +
  • + {{ check "S1005" }} + no longer suggests for range when targeting a version + older than Go 1.4. +
  • +
  • + {{ check "S1026" }} has been + removed. In some rare instances, copying a string is necessary, and + all common ways of doing this were incorrectly flagged by the check. +
  • +
+ + +

Other changes

+ +
    +
  • + The -ignore flag now supports ignoring checks in all packages, + by using * as the path. +
  • +
  • + //line directives are now being ignored when + reporting problems. That is, problems will always be reported for + the actual position in the Go files they occur. +
  • +
  • + From now on, only the first compilation error encountered will be + reported. The tools expect to be run on valid Go code and there + was little (if any) value in reporting all compilation errors + encountered, especially because simple errors can lead to many + follow-up errors. +
  • +
+ +

Staticcheck 2017.2.1 Release Notes

+ +

+ The 2017.2.1 release of the staticcheck suite of tools is the first + bug fix release, fixing one bug. +

+ +

Fixed bugs

+ +
    +
  • + Staticcheck 2017.2 made the detection of deprecated objects + Go-version aware. Unfortunately, this only worked correctly for + fields and methods, but not package-level objects. This release + fixes that. +
  • +
+ +

Staticcheck 2017.2.2 Release Notes

+ +

+ The 2017.2.2 release of the staticcheck suite of tools is the second + bug fix release, fixing several bugs. +

+ +

Fixed bugs

+ +
    +
  • unused: correctly apply the NoCopy exemption when using the -exported flag.
  • +
  • keyify: support external test packages (package foo_test)
  • +
  • staticcheck: disable {{ check "SA4005" }} – the check, in its current form, is prone to false positives and will be reimplemented in a future release.
  • +
diff --git a/doc/2019.1.html b/doc/2019.1.html new file mode 100644 index 000000000..fec0d7f35 --- /dev/null +++ b/doc/2019.1.html @@ -0,0 +1,227 @@ +

Big restructuring

+ +

+ At the core of the 2019.1 release lies the grand restructuring of all of the staticcheck tools. + All of the individual checkers, as well as megacheck, have been merged into a single tool, + which is simply called staticcheck. + From this point forward, staticcheck will be the static analyzer for Go code. + It will cover all of the existing categories of checks – bugs, simplifications, performance – + as well as future categories, such as the new style checks. +

+ +

+ This change makes a series of simplifications possible. + Per-tool command line flags in megacheck have been replaced with unified flags + (-checks and -fail) + that operate on arbitrary subsets of checks. + Consumers of the JSON output no longer need to know about different checker names + and can instead rely solely on user-controllable severities. + And not to be neglected: gone is the silly name of megacheck. +

+ +

+ This change will require some changes to your pipelines. + Even though the gosimple, unused, and megacheck tools still exist, they have been deprecated + and will be removed in the next release of staticcheck. + Additionally, megacheck's -<tool>.exit-non-zero flags have been rendered inoperable. + Instead, you will have to use the -fail flag. + Furthermore,, -fail defaults to all, meaning all checks will cause non-zero exiting. + Previous versions of megacheck had different defaults for different checkers, trying to guess the user's intention. + Instead of guessing, staticcheck expects you to provide the correct flags. +

+ +

+ Since all of the tools have been merged into staticcheck, it will no longer run just one group of checks. + This may lead to additional problems being reported. + To restore the old behavior, you can use the new -checks flag. + -checks "SA*" will run the same set of checks that the old staticcheck tool did. + The same flag should be used in place of megacheck's – now deprecated – -<tool>.enabled flags. +

+ +

+ Details on all of the command-line flags can be found in the documentation. +

+ +

Configuration files

+ +

+ Staticcheck 2019.1 adds support for per-project configuration files. + With these it will be possible to standardize and codify linter settings, the set of enabled checks, and more. + Please see the documentation page on configuration for all the details! +

+ +

Build system integration

+ +

+ Beginning with this release, staticcheck calls out to the tools of the underlying build system + (go for most people) to determine the list of Go files to process. + This change should not affect most people. + It does, however, have some implications: + the system that staticcheck runs on needs access to a full Go toolchain – + just the source code of the standard library no longer suffices. + Furthermore, setting GOROOT to use a different Go installation no longer works as expected. + Instead, PATH has to be modified so that go resolves to the desired Go command. +

+ +

+ This change has been necessary to support Go modules. + Additionally, it will allow us to support alternative build systems such as Bazel in the future. +

+ +

Handling of broken packages

+ +

+ We have redesigned the way staticcheck handles broken packages. + Previously, if you ran staticcheck ... and any package wouldn't compile, + staticcheck would refuse to check any packages whatsoever. + Now, it will skip broken packages, as well as any of their dependents, and check only the remaining packages. + Any build errors that are encountered will be reported as problems. +

+ +

Checks

+ +

New checks

+

+ Staticcheck 2019.1 adds a new category of checks, ST1. + ST1 contains checks for common style violations – poor variable naming, incorrectly formatted comments and the like. + It brings the good parts of golint to staticcheck, + and adds some checks of its own. +

+ +

+ In addition, some other checks have been added. +

+ +

+ {{ check "S1032" }} recommends replacing sort.Sort(sort.StringSlice(...)) with sort.Strings(...); + similarly for other types that have helpers for sorting. +

+ +

+ {{ check "SA9004" }} flags groups of constants where only the first one is given an explicit type. +

+ +

+ {{ check "SA1025" }} checks for incorrect uses of (*time.Timer).Reset. +

+ +

Changed checks

+ +

+ Several checks have been tweaked, either making them more accurate or finding more issues. +

+ +

+ {{ check "S1002" }} no longer applies to code in tests. + While if aBool == true is usually an anti-pattern, + it can feel more natural in unit tests, + as it mirrors the if got != want pattern. +

+ +

+ {{ check "S1005" }} now flags for x, _ := range because of the unnecessary blank assignment. +

+ +

+ {{ check "S1007" }} no longer suggests using raw strings for regular expressions containing backquotes. +

+ +

+ {{ check "S1016" }} now considers the targeted Go version. + It will no longer suggest type conversions between struct types with different field tags + unless Go 1.8 or later is being targeted. +

+ +

+ {{ check "SA1000" }} now checks arguments passed to the regexp.Match class of functions. +

+ +

+ {{ check "SA1014" }} now checks arguments passed to (*encoding/xml.Decoder).DecodeElement. +

+ +

+ {{ check "SA6002" }} now realizes that unsafe.Pointer is a pointer. +

+ +

+ {{ check "U1000" }} has fewer false positives in the presence of embedding. +

+ +

Removed checks

+

+ {{ check "S1013" }} has been removed, + no longer suggesting replacing if err != nil { return err }; return nil with return err. + This check has been the source of contention and more often than not, it reduced the consistency of the surrounding code. +

+ +

Deprecation notices

+ +

+ This release deprecates various features of staticcheck. + These features will be removed in the next release. +

+ +

+ As already explained earlier, the unused, gosimple, and megacheck tools + have been replaced by staticcheck. + Similarly, the flags -<tool>.enabled and -<tool>.exit-non-zero + have been replaced by -checks and -fail. + Finally, the -ignore flag has been replaced + by linter directives. +

+ +

Binary releases

+ +

+ Beginning with this release, we're publishing + prebuilt binaries to GitHub. + These releases still require a functioning Go installation in order to operate, however. +

+ +

Other changes

+ +

+ We've removed the -min_confidence flag. + This flag hasn't been doing anything for years. +

+ +

+ A new formatter called Stylish + (usable with -f stylish) + provides output that is designed for easier consumption by humans. +

+ +

+ Due to the restructuring of checkers, the checker field in JSON output has been replaced + with the severity field. +

+ +

Staticcheck 2019.1.1 Release Notes

+ +

+ The 2019.1.1 release of Staticcheck is the first bug fix release, fixing several bugs and improving performance. +

+ +

Changes

+
    +
  • + The ST category of checks no longer flag style issues of + aliased types when the aliased type exists in a package + we aren't explicitly checking. This avoids crashes and + erratic error reports. +
  • +
  • + Compiler errors now have correct position information. +
  • +
  • + A crash in the Stylish reporter has been fixed. +
  • +
  • + We no longer flag unused objects that belong to cgo internals. +
  • +
  • + The {{ check "U1000" }} check has been optimized, reducing its memory + usage and runtime. +
  • +
From 9ede6c3c68259bdcf189bc2ad794dd2da59fa629 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 14:49:14 +0200 Subject: [PATCH 216/254] doc: add "Output formatters" and "Options" --- doc/formatters.html | 80 +++++++++++++++++++++++++++++++++++++++++++++ doc/options.html | 60 ++++++++++++++++++++++++++++++++++ 2 files changed, 140 insertions(+) create mode 100644 doc/formatters.html create mode 100644 doc/options.html diff --git a/doc/formatters.html b/doc/formatters.html new file mode 100644 index 000000000..1ab7d0fa6 --- /dev/null +++ b/doc/formatters.html @@ -0,0 +1,80 @@ + + +

Text

+ +

+ Text is the default output formatter. + It formats problems using the following format: file:line:col: message. + This format is commonly used by compilers and linters, + and is understood by most editors. +

+ +

Example output

+
go/src/fmt/print.go:1069:15: this value of afterIndex is never used (SA4006)
+ +

Stylish

+ +

+ Stylish is a formatter designed for human consumption. + It groups results by file name + and breaks up the various pieces of information into columns. + Additionally, it displays a final summary. +

+ +

+ This output format is not suited for automatic consumption by tools + and may change between versions. +

+ +
go/src/fmt/fmt_test.go
+  (43, 2)     S1021   should merge variable declaration with assignment on next line
+  (1185, 10)  SA9003  empty branch
+
+go/src/fmt/print.go
+  (77, 18)    ST1006  methods on the same type should have the same receiver name (seen 3x "b", 1x "bp")
+  (1069, 15)  SA4006  this value of afterIndex is never used
+
+go/src/fmt/scan.go
+  (465, 5)  ST1012  error var complexError should have name of the form errFoo
+  (466, 5)  ST1012  error var boolError should have name of the form errFoo
+
+ ✖ 6 problems (6 errors, 0 warnings)
+
+ +

JSON

+ +

+ The JSON formatter emits one JSON object per problem found – + that is, it is a stream of objects, not an array. + Most fields should be self-explanatory. +

+ +

+ The severity field may be one of + "error", + "warning" or + "ignored". + Whether a problem is an error or a warning is determined + by the -fail flag. + The value "ignored" is used for problems that were ignored, + if the -show-ignored flag was provided. +

+ +

Example output

+

+ Note that actual output is not formatted nicely. + The example has been formatted to improve readability. +

+ +
{
+  "code": "SA4006",
+  "severity": "error",
+  "location": {
+    "file": "/home/dominikh/go/src/fmt/print.go",
+    "line": 1069,
+    "column": 15
+  },
+  "message": "this value of afterIndex is never used"
+}
diff --git a/doc/options.html b/doc/options.html new file mode 100644 index 000000000..5e4075bd1 --- /dev/null +++ b/doc/options.html @@ -0,0 +1,60 @@ + + +

checks

+ +

+ This option sets which checks should be enabled. + By default, most checks will be enabled, except for those that are too opinionated or that only apply to packages in certain domains. +

+ +

+ All supported checks can be enabled with "all". + Subsets of checks can be enabled via prefixes and the * glob; for example, "S*", "SA*" and "SA1*" will + enable all checks in the S, SA and SA1 subgroups respectively. + Individual checks can be enabled by their full IDs. + To disable checks, prefix them with a minus sign. This works on all of the previously mentioned values. +

+ +

+ Default value: ["all", "-ST1003"] +

+ +

initialisms

+ +

+ ST1003 checks, among other + things, for the correct capitalization of initialisms. The + set of known initialisms can be configured with this option. +

+ +

+ Default value: ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", "IP", "JSON", "QPS", "RAM", "RPC", "SLA", "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", "XSS"] +

+ +

dot_import_whitelist

+ +

+ By default, ST1001 forbids + all uses of dot imports in non-test packages. This + setting allows setting a whitelist of import paths that can + be dot-imported anywhere. +

+ +

+ Default value: [] +

+ +

http_status_code_whitelist

+ +

+ ST1013 recommends using constants from the net/http package + instead of hard-coding numeric HTTP status codes. This + setting specifies a list of numeric status codes that this + check does not complain about. +

+ +

+ Default value: ["200", "400", "404", "500"] +

From fd167cec488991fa124699d9ec573cce648ae595 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 14:50:55 +0200 Subject: [PATCH 217/254] doc: add skeleton for 2019.2 release notes --- doc/2019.2.html | 97 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 doc/2019.2.html diff --git a/doc/2019.2.html b/doc/2019.2.html new file mode 100644 index 000000000..da398e9d6 --- /dev/null +++ b/doc/2019.2.html @@ -0,0 +1,97 @@ +- switched to go/analysis +- much faster, much lower memory usage, caching +- easier for other runners to integrate staticcheck +- print stats on SIGINFO / SIGUSR1 +- complete rewrite of `unused` +- deleted errcheck +- new -check flag +- -debug.version +- we're a module now + - no semver + - own proxy +- mention GitHub Sponsors +- show benchmarks of 2019.1 vs 2019.2 vs golangci-lint +- range information + +

Removal of previously deprecated functionality

+ +

+ Staticcheck 2019.1 deprecated the unused, gosimple, and megacheck + utilities, as they have been merged into staticcheck. This release + no longer includes these utilities. Additionally, the -ignore + flag has been removed. +

+ +

Checks

+ +Numerous new checks have been added in this release. + +

New checks

+

+ S1033 - unnecessary guard around call to delete +

+ +

+ S1034 simplifies type switches involving redundant type assertions. +

+ +

+ SA1026 flags attempts at marshaling invalid types. +

+ +

+ SA1027 flags incorrectly aligned atomic accesses. +

+ +

+ SA4020 flags unreachable case clauses in type switches. +

+ +

+ SA4021 flags calls to append with a single argument, + as x = append(y) is equivalent to x = y. +

+ +

+ SA5008 flags certain kinds of invalid struct tags. +

+ +

+ SA5009 verifies the correctness of Printf calls. +

+ +

+ SA6005 flags inefficient string comparisons involving + strings.ToLower or strings.ToUpper when they can be replaced with + strings.EqualFold. +

+ +

+ SA9005 flags attempts at marshaling structs with no public fields + nor custom marshaling. +

+ +

+ ST1017 flags so-called yoda conditions, + which take the form of if 42 == x. +

+ +

+ ST1018 flags string literals containing zero-width characters. +

+ + +

Changed checks

+* SA4000 - staticcheck: don't flag identical lhs/rhs for custom float types +* 58d974c - staticcheck: don't flag `for false {}` +* d36bf90 - staticcheck: don't flag 0 == 0 generated by cgo +* 8a21b10 - stylecheck: allow error messages to start with type names +* 090066f - simple: don't merge declaration and assignments if there are multiple assignments +* d3cee46 - config: add SIP and RTP as default initialisms +* 466a047 - staticcheck: flag imports of deprecated packages +* 072eb0b - simple: fix false positives on guarded delete check with else-statements +* 6f3d766 - staticcheck: also flag Fprintf in SA1006 +* 51b3bec - staticcheck: flag more pointless comparisons in SA4003 +* 5ad80af - simple: catch unnecessary nil check around type assertion +* 1297f1d - simple: extend S1017 to match more unnecessary guards +* 30f18e4 - deprecated: update list From 5a4a2f4a438d01ba03c591f88ef312005a05063b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 14:51:19 +0200 Subject: [PATCH 218/254] doc: add skeleton for "Customizing staticcheck" article --- doc/articles/customizing_staticcheck.html | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 doc/articles/customizing_staticcheck.html diff --git a/doc/articles/customizing_staticcheck.html b/doc/articles/customizing_staticcheck.html new file mode 100644 index 000000000..81ad6d8e1 --- /dev/null +++ b/doc/articles/customizing_staticcheck.html @@ -0,0 +1,11 @@ +- how to customize staticcheck +- tools serve humans +- tools should assist workflows +- don't let tools bully you + +- exit status +- which checks run +- ignoring findings +- output format +- go version +- tests From 8c238bbef21b0709476a918e439606002b6ea650 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 15:12:54 +0200 Subject: [PATCH 219/254] doc: add utilities.html --- doc/utilities.html | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 doc/utilities.html diff --git a/doc/utilities.html b/doc/utilities.html new file mode 100644 index 000000000..4834a4d01 --- /dev/null +++ b/doc/utilities.html @@ -0,0 +1,43 @@ + + +

Overview

+ +

+ In addition to staticcheck, we provide a number of smaller + utilities. Some of them are useful for integrating + staticcheck in your workflows, while others provide + specific code insights. +

+ +

+ All of these utilities can be used manually from the command line, + but their true purpose is integration with scripts and editors. +

+ +

keyify

+

Coming soon

+ +

rdeps

+

+ The rdeps utility takes a list of Go packages and returns their + reverse dependencies. One application for this tool is Continuous + Integration and testing only those packages that are affected by a + commit. +

+ +

+ Packages can be provided as arguments, or on standard input if the + -stdin flag is provided. If the -r flag is + used, reverse dependencies will be printed recursively. +

+ + +

structlayout

+

Coming soon

+ +

structlayout-optimize

+

Coming soon

+

structlayout-pretty

+

Coming soon

From 5f32417de656520a5b4648ca805580554004277b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 21:24:36 +0200 Subject: [PATCH 220/254] doc: work on 2019.2 release notes --- doc/2019.2.html | 111 ++++++++++++++++++------------------------------ 1 file changed, 42 insertions(+), 69 deletions(-) diff --git a/doc/2019.2.html b/doc/2019.2.html index da398e9d6..4bc2501a5 100644 --- a/doc/2019.2.html +++ b/doc/2019.2.html @@ -3,7 +3,6 @@ - easier for other runners to integrate staticcheck - print stats on SIGINFO / SIGUSR1 - complete rewrite of `unused` -- deleted errcheck - new -check flag - -debug.version - we're a module now @@ -13,85 +12,59 @@ - show benchmarks of 2019.1 vs 2019.2 vs golangci-lint - range information -

Removal of previously deprecated functionality

+

Removal of functionality

Staticcheck 2019.1 deprecated the unused, gosimple, and megacheck - utilities, as they have been merged into staticcheck. This release - no longer includes these utilities. Additionally, the -ignore - flag has been removed. + utilities, as they have been merged into staticcheck. Furthermore, it deprecated the -ignore flag, + which has been replaced by linter directives. + + This release no longer includes these deprecated utilities, nor does + it provide the deprecated flag. Additionally, the errcheck utility + has been removed. It has never been an official or production-ready + part of staticcheck.

Checks

-Numerous new checks have been added in this release. -

New checks

-

- S1033 - unnecessary guard around call to delete -

- -

- S1034 simplifies type switches involving redundant type assertions. -

- -

- SA1026 flags attempts at marshaling invalid types. -

-

- SA1027 flags incorrectly aligned atomic accesses. -

+Numerous new checks have been added in this release: + +
    +
  • {{ check "S1033" }} flags unnecessary guards around calls to delete.
  • +
  • {{ check "S1034" }} simplifies type switches involving redundant type assertions.
  • +
  • {{ check "SA1026" }} flags attempts at marshaling invalid types.
  • +
  • {{ check "SA1027" }} flags incorrectly aligned atomic accesses.
  • +
  • {{ check "SA4020" }} flags unreachable case clauses in type switches.
  • +
  • {{ check "SA4021" }} flags calls to append with a single argument, as x = append(y) is equivalent to x = y.
  • +
  • {{ check "SA5008" }} flags certain kinds of invalid struct tags.
  • +
  • {{ check "SA5009" }} verifies the correctness of Printf calls.
  • +
  • + {{ check "SA6005" }} flags inefficient string comparisons + involving strings.ToLower + or strings.ToUpper when they can be replaced with strings.EqualFold. +
  • +
  • {{ check "SA9005" }} flags attempts at marshaling structs with no public fields nor custom marshaling.
  • +
  • + {{ check "ST1017" }} flags so-called yoda conditions, + which take the form of if 42 == x. +
  • +
  • {{ check "ST1018" }} flags string literals containing zero-width characters.
  • +
-

- SA4020 flags unreachable case clauses in type switches. -

-

- SA4021 flags calls to append with a single argument, - as x = append(y) is equivalent to x = y. -

- -

- SA5008 flags certain kinds of invalid struct tags. -

- -

- SA5009 verifies the correctness of Printf calls. -

- -

- SA6005 flags inefficient string comparisons involving - strings.ToLower or strings.ToUpper when they can be replaced with - strings.EqualFold. -

- -

- SA9005 flags attempts at marshaling structs with no public fields - nor custom marshaling. -

- -

- ST1017 flags so-called yoda conditions, - which take the form of if 42 == x. -

+

Changed checks

-

- ST1018 flags string literals containing zero-width characters. -

+Several checks have been improved: +
    +
  • {{ check "SA1019" }} now flags imports of deprecated packages.
  • +
  • {{ check "SA4000" }} no longer flags comparisons between custom float types. Additionally, it avoids a false positive caused by cgo.
  • +
  • {{ check "ST1005" }} no longer flags error messages that start with capitalized type names.
  • +
  • {{ check "SA5002" }} no longer suggests replacing for false { with for {.
  • +
  • Added "SIP" and "RTP" as default initialisms to {{ check "ST1003" }}.
  • +
  • {{ check "SA1006" }}, {{ check "SA4003" }}, {{ check "S1017" }}, and {{ check "S1020" }} match more code patterns.
  • +
  • {{ check "S1021" }} is less eager to merge declarations and assignments when multiple assignments are involved.
  • +
-

Changed checks

-* SA4000 - staticcheck: don't flag identical lhs/rhs for custom float types -* 58d974c - staticcheck: don't flag `for false {}` -* d36bf90 - staticcheck: don't flag 0 == 0 generated by cgo -* 8a21b10 - stylecheck: allow error messages to start with type names -* 090066f - simple: don't merge declaration and assignments if there are multiple assignments -* d3cee46 - config: add SIP and RTP as default initialisms -* 466a047 - staticcheck: flag imports of deprecated packages -* 072eb0b - simple: fix false positives on guarded delete check with else-statements -* 6f3d766 - staticcheck: also flag Fprintf in SA1006 -* 51b3bec - staticcheck: flag more pointless comparisons in SA4003 -* 5ad80af - simple: catch unnecessary nil check around type assertion -* 1297f1d - simple: extend S1017 to match more unnecessary guards -* 30f18e4 - deprecated: update list From 4edfa6ba4904d034ac9dc9735023dc764f29e839 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 2 Jun 2019 21:24:50 +0200 Subject: [PATCH 221/254] doc: add main staticcheck documentation --- doc/staticcheck.html | 394 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 394 insertions(+) create mode 100644 doc/staticcheck.html diff --git a/doc/staticcheck.html b/doc/staticcheck.html new file mode 100644 index 000000000..38e1c7243 --- /dev/null +++ b/doc/staticcheck.html @@ -0,0 +1,394 @@ + + + +

Overview

+ +

+ Staticcheck is a static analysis toolset for the Go programming language. + It comes with a large number of checks, + integrates with various Go build systems + and offers enough customizability to fit into your workflows. +

+ +

Running staticcheck

+ +

+ Staticcheck can be run on code in several ways, + mimicking the way the official Go tools work. + At the core, it expects to be run on well-formed Go packages. + The most common way of specifying packages is via their import paths. + One or more packages can be specified in a single command, + and the ... glob operator is supported. + All of the following examples are valid invocations: + +

staticcheck github.com/example/foo
+staticcheck github.com/example/foo github.com/example/bar
+staticcheck github.com/example/...
+

+ +

+ In addition, a single package can be specified as a list of files: + +

staticcheck file1.go file2.go file3.go
+ + Note that all files of the package need to be specified, + similar to how go build works. +

+ +

Configuration

+ +

+ Various aspects of staticcheck can be customized with configuration files. +

+ +

+ These files are placed in Go packages and apply recursively to the package tree rooted at the containing package. + For example, configuration placed in pkg will apply to pkg, pkg/subpkg, pkg/subpkg/subsubpkg and so on. +

+ +

+ Configuration files in subpackages can override or inherit from settings of configuration files higher up the package tree. + Staticcheck's default configuration is represented as the virtual root of the configuration tree and can be inherited from. +

+ +

Configuration format

+ +

+ Staticcheck configuration files are named staticcheck.conf and contain TOML. +

+ +

+ Any set option will override the same option from further up the package tree, + whereas unset options will inherit their values. + Additionally, the special value "inherit" can be used to inherit values. + This is especially useful for array values, as it allows adding and removing values to the inherited option. +

+ +

+ The special value "all" matches all possible values. + Currently, this is only used when enabling checks. +

+ +

+ Values prefixed with a minus sign, + such as "-S1000" + will exclude values from a list. + This can be used in combination with "all" to express "all but", + or in combination with "inherit" to remove values from the inherited option. +

+ +

Options

+ +

+ A list of all options and their explanations can be found on the Options page. +

+ +

Example configuration

+ +

+ The following example configuration is the textual representation of staticcheck's default configuration. +

+ +
{{ option "checks" }} = ["all", "-{{ check "ST1000" }}", "-{{ check "ST1003" }}", "-{{ check "ST1016" }}"]
+{{ option "initialisms" }} = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS",
+	"EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID",
+	"IP", "JSON", "QPS", "RAM", "RPC", "SLA",
+	"SMTP", "SQL", "SSH", "TCP", "TLS", "TTL",
+	"UDP", "UI", "GID", "UID", "UUID", "URI",
+	"URL", "UTF8", "VM", "XML", "XMPP", "XSRF",
+	"XSS"]
+{{ option "dot_import_whitelist" }} = []
+{{ option "http_status_code_whitelist" }} = ["200", "400", "404", "500"]
+ +

Command-line flags

+ +

+ In addition to configuration files, some aspects of staticcheck can be controlled via command-line flags. + These are settings that can vary between individual invocations or environments (CI, editors, ...) and shouldn't be stored in configuration files. +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FlagDescription
-checks + Allows overriding the list of checks to run. + Has the same syntax as the checks setting + in configuration files. +
-f + Select between the different output formats. +
-fail + Specifiy the list of checks which, + if they find any issues in your code, + should cause staticcheck to exit with a non-zero status. + This can be used, for example, to not fail your CI + pipeline because of possible code simplifications. +
-go + Select the Go version to target. + See + Targeting Go versions + for more details. +
-show-ignored + Show all problems found, + even those that were ignored by linter directives. +
-tags + Similar to go build -tags, + allows specifiying the build tags to use. +
-tests + Include tests in the analysis. +
-version + Display the version of staticcheck and exit. +
+ +

Targeting Go versions

+ +

+ By default, staticcheck will make suggestions that are correct for the current version of Go. + If you're wishing to support older versions of Go, + not all suggestions are applicable – + some simplifications are only valid for newer versions of Go + and deprecated functions may not have had viable alternatives in older versions. +

+ +

+ To target a specific Go version you can use the -go command line flag. + For example, with -go 1.6, only suggestions that are valid for Go 1.6 will be made. +

+ +

Ignoring problems

+ +

+ In general, you shouldn't have to ignore problems reported by staticcheck. + Great care is taken to minimize the number of false positives and subjective suggestions. + Dubious code should be rewritten and genuine false positives should be reported so that they can be fixed. +

+ +

+ The reality of things, however, is that not all corner cases can be taken into consideration. + Sometimes code just has to look weird enough to confuse tools, + and sometimes suggestions, though well-meant, just aren't applicable. + For those rare cases, there are several ways of ignoring unwanted problems. +

+ +

Line-based linter directives

+ +

+ The most fine-grained way of ignoring reported problems is to annotate the offending lines of code with linter directives. +

+ +

+ The //lint:ignore Check1[,Check2,...,CheckN] reason directive + ignores one or more checks on the following line of code. + The reason is a required field + that must describe why the checks should be ignored for that line of code. + This field acts as documentation for other people (including future you) reading the code. +

+ +

+ Let's consider the following example, + which intentionally checks that the results of two identical function calls are not equal: + +

func TestNewEqual(t *testing.T) {
+  if errors.New("abc") == errors.New("abc") {
+    t.Errorf(`New("abc") == New("abc")`)
+  }
+}
+

+ +

+ {{ check "SA4000" }} of staticcheck + will flag this code, + pointing out that the left and right side of == are identical – + usually indicative of a typo and a bug. +

+ +

+ To silence this problem, we can use a linter directive: + +

func TestNewEqual(t *testing.T) {
+  //lint:ignore SA4000 we want to make sure that no two results of errors.New are ever the same
+  if errors.New("abc") == errors.New("abc") {
+    t.Errorf(`New("abc") == New("abc")`)
+  }
+}
+

+ +
Maintenance of linter directives
+ +

+ It is crucial to update or remove outdated linter directives when code has been changed. + Staticcheck helps you with this by making unnecessary directives a problem of its own. + For example, for this (admittedly contrived) snippet of code + +

//lint:ignore SA1000 we love invalid regular expressions!
+regexp.Compile(".+")
+ + staticcheck will report the following: + +
tmp.go:1:2: this linter directive didn't match anything; should it be removed?
+

+ +

+ Checks that have been disabled via configuration files + will not cause directives to be considered unnecessary. +

+ +

File-based linter directives

+ +

+ In some cases, you may want to disable checks for an entire file. + For example, code generation may leave behind a lot of unused code, + as it simplifies the generation process. + Instead of manually annotating every instance of unused code, + the code generator can inject a single, file-wide ignore directive to ignore the problem. +

+ +

+ File-based linter directives look a lot like line-based ones: + +

//lint:file-ignore U1000 Ignore all unused code, it's generated
+

+ +

+ The only difference is that these comments aren't associated with any specific line of code. + Conventionally, these comments should be placed near the top of the file. +

+ +

+ Unlike line-based directives, file-based ones will not be flagged for being unnecessary. +

+ +

Resource usage

+ +

+ Static analysis is a rather resource intensive process, + having to apply expensive algorithms on a lot of data. + Depending on the complexity of the checked code, + this can result in many gigabytes of memory usage and minutes (if not hours) of CPU time. +

+ +

+ When using staticcheck, there are two different ways of running it: + one package per invocation, or many packages per invocation. + These two modes have opposite CPU and memory usage characteristics. +

+ +

+ Checking one package per invocation will keep the memory usage low, + but may end up recompiling the same packages over and over again, wasting CPU time. +

+ +

+ Checking many packages, on the other hand, + needs to keep more data in memory at once, + but can reuse compiled packages, saving CPU time. +

+ +

+ The following tables will show the characteristics of the different modes. +

+ + + + + + + + + + + + + + + + + + + + + + +
Resource consumption in different operation modes
ModeCPU usageMemory usage
One package per invocationHighLow
Many packages per invocationLowHigh
+ + + + + + + + + + + + + + + + + + + + + + + + + +
Using staticcheck to check the Go standard library
ModeCPU time spentWall time spentMax memory used
One package per invocation1692%2241%28%
All packages in one invocation (reference value)100%100%100%
+ +

+ The maximum memory usage of a single invocation depends + on the combined complexity of the checked packages and their dependencies. + The entire standard library combined requires roughly 2.31 GB of memory, + while the most complex package, net/http, only needs 640 MB. + Other packages, such as the tiny image/color/palette, may only need as little as 17 MB. +

+ +

+ Checking packages individually reduces both the maximum and the average (over time) memory usage, + but requires a much greater amount of CPU time, + as shared dependencies have to be compiled over and over again. + You should use the method that best suits your available resources. +

+ + + +

Checks

+ +A list of all checks can be found on the Checks page. From 63e9ff576adb1c03487a48bb6cbfc9a6aff931aa Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 3 Jun 2019 15:09:22 +0200 Subject: [PATCH 222/254] doc: finish first draft of 2019.2 release notes --- doc/2019.2.html | 356 +++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 340 insertions(+), 16 deletions(-) diff --git a/doc/2019.2.html b/doc/2019.2.html index 4bc2501a5..de0a5076c 100644 --- a/doc/2019.2.html +++ b/doc/2019.2.html @@ -1,24 +1,305 @@ -- switched to go/analysis -- much faster, much lower memory usage, caching -- easier for other runners to integrate staticcheck -- print stats on SIGINFO / SIGUSR1 -- complete rewrite of `unused` -- new -check flag -- -debug.version -- we're a module now - - no semver - - own proxy -- mention GitHub Sponsors -- show benchmarks of 2019.1 vs 2019.2 vs golangci-lint -- range information +

Performance improvements

+ +

+ Staticcheck 2019.2 brings major performance improvements and a + reduction in memory usage. +

+ +

+ Staticcheck has been redesigned to only keep those packages in memory that are actively being processed. + This allows for much larger workspaces to be checked in one go. + While previously it may have been necessary to split a list of packages into many invocations of staticcheck, + this is now handled intelligently and efficiently by staticcheck itself. +

+ +

+ In particular, memory usage is now closely tied to parallelism: + having more CPU cores available allows for more packages to be processed in parallel, + which increases the number of packages held in memory at any one time. + Not only does this make good use of available resources – + systems with more CPU cores also tend to have more memory available – + it also exposes a single, easy to use knob for trading execution time for memory use. + By setting GOMAXPROCS to a value lower than the number of available cores, + memory usage of staticcheck will be reduced, at the cost of taking longer to complete. +

+ +

+ We've observed reductions in memory usage of 2x to 8x when checking large code bases. + Due to the reduced load on the garbage collector, the execution time often improves, too. +

+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Package2019.1.12019.2¹Change
net/http3.374 s / 680 MB3.687 s / 248 MB+9.28% / -63.53%
strconv1.511 s / 294 MB1.655 s / 120 MB+9.53% / -59.18%
image/color1.296 s / 224 MB1.631 s / 133 MB+25.85% / -40.63%
std25.164 s / 4028 MB19.028 s / 1063 MB-24.38% / -73.61%
github.com/cockroachdb/cockroach/pkg/...82.090 s / 15718 MB91.410 s / 3941 MB+11.35% / -74.93 %
+ ¹: The fact cache was empty for all benchmarks. +
+

+ +

+ In addition, staticcheck now employs caching to speed up repeated checking of packages. + In the past, when checking a package, all of its dependencies had to be loaded from source and analyzed. + Now, we can make use of Go's build cache, as well as cache our own analysis facts. + This makes staticcheck behave a lot more like go build, where repeated builds are much faster. +

+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PackageUncachedCachedChange
net/http3.687 s / 248 MB1.507 s / 199 MB-59.13% / -19.76%
strconv1.655 s / 120 MB0.454 s / 57 MB-72.57% / -52.5%
image/color1.631 s / 133 MB0.324 s / 31 MB-80.13% / -76.69%
std19.028 s / 1063 MB14.308 s / 900 MB-24.81% / -15.33%
github.com/cockroachdb/cockroach/pkg/...91.410 s / 3941 MB45.840 s / 2800 MB-49.85% / -28.95%
+

+ +

+ This combination of improvements not only compensates for the + increased memory usage that 2019.1 introduced, it also brings the + memory usage and execution times way below the levels of those seen in the + 2017.2 release, which had previously been our most efficient + release. +

+ +

+ It should be noted that all of these improvements are part of the staticcheck command itself, not the individual checks. + Tools such as golangci-lint will have to replicate our efforts to benefit from these improvements. +

+ +

The go/analysis framework

+ +

+ Part of the redesign of staticcheck involved porting our code to the go/analysis framework. +

+ +

+The go/analysis framework is a framework for writing static analysis tools such as staticcheck and go vet. +It provides an API that enables interoperability between different analyses and analysis drivers – drivers being the code that actually executes analyses. + The intention is that any driver can trivially use any analysis that is implemented using go/analysis. +

+ +

+ With the exception of {{ check "U1000" }}, all of our checks are now go/analysis analyses. Furthermore, the staticcheck command is now a go/analysis driver. +

+ +

+ With our move to this framework we enable other drivers to reuse our checks without having to patch them. + This should be of particular interest to golangci-lint, which previously took to patching staticcheck, sometimes in subtly incorrect ways. + Another high-profile go/analysis driver is gopls, the Go language server. It will now be much easier for gopls to use staticcheck to analyze code, should it so desire. +

+ +

+ Theoretically it would also allow us to use third-party analyses as part of staticcheck. + Due to quality control reasons, however, we will likely refrain from doing so. + Nonetheless it would be trivial for users to maintain internal forks of cmd/staticcheck that use third-party analyses. +

+ +

Improvements to the CLI

+ +

+ We've made several minor improvements to the command-line interface of staticcheck that improve usability and debuggability. +

+ +

SIGINFO handler

+ +

+ Upon receiving the SIGINFO signal – or SIGUSR1 on platforms that lack + SIGINFO – staticcheck will dump statistics, such as the current phase + and how many packages are left to analyze. +

+ +

+Packages: 37/619 initial, 38/1011 total; Workers: 8/8; Problems: 73
+
+ +

Explaining checks

+ +

+ Using the new -explain flag, a check's documentation can be displayed right in the terminal, + eliminating the need to browse to https://staticcheck.io/docs/checks. +

+ +

+$ staticcheck -explain S1007
+Simplify regular expression by using raw string literal
+
+Raw string literals use ` instead of " and do not support
+any escape sequences. This means that the backslash (\) can be used
+freely, without the need of escaping.
+
+Since regular expressions have their own escape sequences, raw strings
+can improve their readability.
+
+Before:
+
+    regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z")
+
+After:
+
+    regexp.Compile(`\A(\w+) profile: total \d+\n\z`)
+
+Available since
+    2017.1
+
+ +

-debug.version

+ +

+ The -debug.version flag causes staticcheck to print + detailed version information, such as the Go version used to compile + it, as well as the versions of all dependencies if built using Go + modules. This feature is intended for debugging issues, and we will + ask for its output from users who file issues. +

+ +

+$ staticcheck -debug.version
+staticcheck (devel, v0.0.0-20190602125119-5a4a2f4a438d)
+
+Compiled with Go version: go1.12.5
+Main module:
+	honnef.co/go/tools@v0.0.0-20190602125119-5a4a2f4a438d (sum: h1:U5vSGN1Bjr0Yd/4pRcp8iRUCs3S5TIPzoAeTEFV2aiU=)
+Dependencies:
+	github.com/BurntSushi/toml@v0.3.1 (sum: h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=)
+	golang.org/x/tools@v0.0.0-20190530171427-2b03ca6e44eb (sum: h1:mnQlcVx8Qq8L70HV0DxUGuiuAtiEHTwF1gYJE/EL9nU=)
+
+ +

Enabling unused's whole program mode

+ +When we merged unused into staticcheck, we lost the ability to specify the -exported flag to report unused exported identifiers. +Staticcheck 2019.2 restores this ability with the new -unused.whole-program flag. + +

Range information in diagnostics

+ +

+ Many of our checks now emit [start, end] ranges for findings instead of just positions. + These ranges can be accessed via the json output formatter, as well as by using go/analysis.Diagnostic directly, such as in gopls. +

+ +

+ Note that not all checks are able to emit range information. +

+ +

Installing staticcheck as a module

+ +

+ As part of the 2019.2 release we've turned staticcheck into a Go module. + From now on, you can install specific versions of staticcheck with go get honnef.co/go/tools/cmd/staticcheck@<version>, + though do note that older releases do not have a go.mod file. + You can still download them as modules, but Go will record indirect dependencies in the main module's go.mod file, and no minimum versions are specified. +

+ +

+ Staticcheck will not use Semantic Versioning. + It is our belief that Semver is a poor fit for applications and is more suited towards libraries. + In particular, almost every release of staticcheck has backwards incompatible changes to some APIs that aren't meant for public consumption, + but which we expose nevertheless so that tinkerers can use them. +

+ +

+ However, honnef.co/go runs a custom module proxy so that + go get honnef.co/go/tools/cmd/staticcheck will download the latest released version of staticcheck, and not the master branch. + That way, even though we don't use Semver, you will still get fixed, known working versions of staticcheck. + To download the master branch, use go get honnef.co/go/tools/cmd/staticcheck@master +

Removal of functionality

Staticcheck 2019.1 deprecated the unused, gosimple, and megacheck utilities, as they have been merged into staticcheck. Furthermore, it deprecated the -ignore flag, - which has been replaced by linter directives. + which has been replaced by linter directives. +

+

This release no longer includes these deprecated utilities, nor does it provide the deprecated flag. Additionally, the errcheck utility has been removed. It has never been an official or production-ready @@ -29,7 +310,9 @@

Checks

New checks

-Numerous new checks have been added in this release: +

+ Numerous new checks have been added in this release: +

  • {{ check "S1033" }} flags unnecessary guards around calls to delete.
  • @@ -56,7 +339,9 @@

    New checks

    Changed checks

    -Several checks have been improved: +

    + Several checks have been improved: +

    • {{ check "SA1019" }} now flags imports of deprecated packages.
    • @@ -66,5 +351,44 @@

      Changed checks

    • Added "SIP" and "RTP" as default initialisms to {{ check "ST1003" }}.
    • {{ check "SA1006" }}, {{ check "SA4003" }}, {{ check "S1017" }}, and {{ check "S1020" }} match more code patterns.
    • {{ check "S1021" }} is less eager to merge declarations and assignments when multiple assignments are involved.
    • +
    • {{ check "U1000" }} has been rewritten, eliminating a variety of false positives.
    +

    Sustainable open source and a personal plea

    + +

    + Staticcheck is an open source project developed primarily by me, Dominik Honnef, in my free time. + While this model of software development has gotten increasingly common, it is not very sustainable. + Time has to be split between open source work and paid work to sustain one's life. + This is made especially unfortunate by the fact that hundreds of companies rely on open source each day, + but few consider giving back to it, even though it would directly benefit their business, + ensuring that the software they rely on keeps being developed. +

    + +

    + I have long been soliciting donations for staticcheck on Patreon to make its development more sustainable. + A fair number of individuals have generously pledged their support and I am very grateful to them. + Unfortunately, only few companies support staticcheck's development, and I'd like for that to change. +

    + +

    + To people who are familiar with Patreon, it might've always seemed like an odd choice for a software project. + Patreon focuses on art and creative work, and on individuals supporting said work, not companies. + I am therefore excited to announce my participation in GitHub Sponsors, + a new way of supporting developers, directly on GitHub. +

    + +

    + GitHub Sponsors allows you to easily support developers by sponsoring them on a monthly basis, via a few simple clicks. + It is fully integrated with the platform and can use your existing billing information, making it an effortless process. + To encourage more company sponsorships I offer to display your company's logo prominently on + staticcheck's website + for + $250 USD a month, + to show my appreciation for your contribution and to show to the world how much you care about code quality. +

    + +

    + Please don't hesitate contacting me directly if neither GitHub Sponsors nor Patreon seem suitable to you but you'd like to support me nevertheless. + I am sure we can work something out. +

    From deba4d6cb4bda1e49659c522357d5604eb1974e6 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 5 Jun 2019 15:27:11 +0200 Subject: [PATCH 223/254] doc: update example JSON output --- doc/formatters.html | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/doc/formatters.html b/doc/formatters.html index 1ab7d0fa6..4903fbbe5 100644 --- a/doc/formatters.html +++ b/doc/formatters.html @@ -53,11 +53,8 @@

    JSON

    The severity field may be one of - "error", - "warning" or - "ignored". - Whether a problem is an error or a warning is determined - by the -fail flag. + "error", "warning" or "ignored". + Whether a problem is an error or a warning is determined by the -fail flag. The value "ignored" is used for problems that were ignored, if the -show-ignored flag was provided.

    @@ -72,9 +69,14 @@

    Example output

    "code": "SA4006", "severity": "error", "location": { - "file": "/home/dominikh/go/src/fmt/print.go", - "line": 1069, + "file": "/usr/lib/go/src/fmt/print.go", + "line": 1082, "column": 15 }, + "end": { + "file": "/usr/lib/go/src/fmt/print.go", + "line": 1082, + "column": 25 + }, "message": "this value of afterIndex is never used" } From a52a8e57f5a3d73c22736de29da041391793666f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 5 Jun 2019 15:27:25 +0200 Subject: [PATCH 224/254] doc: slight improvements to 2019.2 release notes --- doc/2019.2.html | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/2019.2.html b/doc/2019.2.html index de0a5076c..c7dbfcde6 100644 --- a/doc/2019.2.html +++ b/doc/2019.2.html @@ -161,8 +161,8 @@

    The go/analysis framework

    -The go/analysis framework is a framework for writing static analysis tools such as staticcheck and go vet. -It provides an API that enables interoperability between different analyses and analysis drivers – drivers being the code that actually executes analyses. + The go/analysis framework is a framework for writing static analysis tools such as staticcheck and go vet. + It provides an API that enables interoperability between different analyses and analysis drivers – drivers being the code that actually executes analyses. The intention is that any driver can trivially use any analysis that is implemented using go/analysis.

    @@ -361,7 +361,7 @@

    Sustainable open source and a personal plea

    While this model of software development has gotten increasingly common, it is not very sustainable. Time has to be split between open source work and paid work to sustain one's life. This is made especially unfortunate by the fact that hundreds of companies rely on open source each day, - but few consider giving back to it, even though it would directly benefit their business, + but few consider giving back to it, even though it would directly benefit their businesses, ensuring that the software they rely on keeps being developed.

    From 9ada6990d089186397254dbd7ece9cfa23c0d84e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 19 May 2019 12:46:44 +0200 Subject: [PATCH 225/254] Add Circle CI configuration --- .circleci/config.yml | 138 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 138 insertions(+) create mode 100644 .circleci/config.yml diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 000000000..bb0943f01 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,138 @@ +version: 2.1 + +download: &download + steps: + - checkout + - run: + name: "Download dependencies" + command: "go get -d -t ./..." + - run: + name: "Download staticcheck" + command: "go get honnef.co/go/tools/cmd/staticcheck" + - run: + name: "Download go-junit-report" + command: "go get github.com/jstemmer/go-junit-report" + - persist_to_workspace: + root: "/go/" + paths: + - "src/*" + - "bin/staticcheck" + - "bin/go-junit-report" + +gofmt: &gofmt + steps: + - attach_workspace: + at: "/go/" + - run: + name: "gofmt" + command: "[ -z \"$(gofmt -l .)\" ] || exit $?" + +test: &test + steps: + - attach_workspace: + at: "/go/" + - run: + name: "Run tests" + command: "go test -v ./... >/tmp/test.out" + - run: + name: "Process test results" + when: always + command: | + mkdir -p /tmp/test-results/go-test + go-junit-report < /tmp/test.out > /tmp/test-results/go-test/results.xml + cat /tmp/test.out + - store_test_results: + path: "/tmp/test-results" + +vet: &vet + steps: + - attach_workspace: + at: "/go/" + - run: + name: "Run go vet" + command: "go vet ./..." + +staticcheck: &staticcheck + steps: + - attach_workspace: + at: "/go/" + - run: + Name: "Run staticcheck" + command: "staticcheck -go 1.11 ./..." + +executors: + go11: + docker: + - image: "circleci/golang:1.11.9" + working_directory: "/go/src/honnef.co/go/tools" + go12: + docker: + - image: "circleci/golang:1.12.5" + working_directory: "/go/src/honnef.co/go/tools" + +jobs: + download12: + executor: go12 + <<: *download + gofmt12: + executor: go12 + <<: *gofmt + test12: + executor: go12 + <<: *test + vet12: + executor: go12 + <<: *vet + staticcheck12: + executor: go12 + <<: *staticcheck + + download11: + executor: go11 + <<: *download + gofmt11: + executor: go11 + <<: *gofmt + test11: + executor: go11 + <<: *test + vet11: + executor: go11 + <<: *vet + staticcheck11: + executor: go11 + <<: *staticcheck + + +workflows: + version: 2 + ci12: + jobs: + - download12 + - gofmt12: + requires: + - download12 + - test12: + requires: + - download12 + - vet12: + requires: + - download12 + - staticcheck12: + requires: + - download12 + ci11: + jobs: + - download11 + - gofmt11: + requires: + - download11 + - test11: + requires: + - download11 + - vet11: + requires: + - download11 + - staticcheck11: + requires: + - download11 From 0a11fc526260d1bcd0686bd3c9bd1167895aeea9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 5 Jun 2019 16:20:22 +0200 Subject: [PATCH 226/254] all: add some staticcheck.conf files --- cmd/go-module-query/staticcheck.conf | 2 ++ ssa/staticcheck.conf | 3 +++ 2 files changed, 5 insertions(+) create mode 100644 cmd/go-module-query/staticcheck.conf create mode 100644 ssa/staticcheck.conf diff --git a/cmd/go-module-query/staticcheck.conf b/cmd/go-module-query/staticcheck.conf new file mode 100644 index 000000000..16403869c --- /dev/null +++ b/cmd/go-module-query/staticcheck.conf @@ -0,0 +1,2 @@ +# this package is WIP, unused code will occur. +checks = ["inherit", "-U1000"] diff --git a/ssa/staticcheck.conf b/ssa/staticcheck.conf new file mode 100644 index 000000000..d7b38bc35 --- /dev/null +++ b/ssa/staticcheck.conf @@ -0,0 +1,3 @@ +# ssa/... is mostly imported from upstream and we don't want to +# deviate from it too much, hence disabling SA1019 +checks = ["inherit", "-SA1019"] From 0abc5489195730a6c7b2723437a93a9ee25d2b7f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 5 Jun 2019 17:29:12 +0200 Subject: [PATCH 227/254] lint/lintutil: remove the deprecated ignore flag --- lint/lintutil/util.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/lint/lintutil/util.go b/lint/lintutil/util.go index 0d79e7fb5..5b1a60d34 100644 --- a/lint/lintutil/util.go +++ b/lint/lintutil/util.go @@ -103,7 +103,6 @@ func FlagSet(name string) *flag.FlagSet { flags := flag.NewFlagSet("", flag.ExitOnError) flags.Usage = usage(name, flags) flags.String("tags", "", "List of `build tags`") - flags.String("ignore", "", "Deprecated: use linter directives instead") flags.Bool("tests", true, "Include tests") flags.Bool("version", false, "Print version and exit") flags.Bool("show-ignored", false, "Don't filter ignored problems") @@ -142,7 +141,6 @@ func findCheck(cs []*analysis.Analyzer, check string) (*analysis.Analyzer, bool) func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs *flag.FlagSet) { tags := fs.Lookup("tags").Value.(flag.Getter).Get().(string) - ignore := fs.Lookup("ignore").Value.(flag.Getter).Get().(string) tests := fs.Lookup("tests").Value.(flag.Getter).Get().(bool) goVersion := fs.Lookup("go").Value.(flag.Getter).Get().(int) formatter := fs.Lookup("f").Value.(flag.Getter).Get().(string) @@ -212,7 +210,6 @@ func ProcessFlagSet(cs []*analysis.Analyzer, cums []lint.CumulativeChecker, fs * ps, err := Lint(cs, cums, fs.Args(), &Options{ Tags: strings.Fields(tags), LintTests: tests, - Ignores: ignore, GoVersion: goVersion, Config: cfg, }) @@ -279,7 +276,6 @@ type Options struct { Tags []string LintTests bool - Ignores string GoVersion int } From d3c0ed50b7f16e405792638985a9078023e72753 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 5 Jun 2019 17:54:38 +0200 Subject: [PATCH 228/254] doc: document new flags --- doc/staticcheck.html | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/doc/staticcheck.html b/doc/staticcheck.html index 38e1c7243..6b03da190 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -122,6 +122,12 @@

    Command-line flags

    in configuration files. + + -explain + + Print the description of a check. + + -f @@ -131,7 +137,7 @@

    Command-line flags

    -fail - Specifiy the list of checks which, + Specify the list of checks which, if they find any issues in your code, should cause staticcheck to exit with a non-zero status. This can be used, for example, to not fail your CI @@ -167,6 +173,12 @@

    Command-line flags

    Include tests in the analysis. + + -unused.whole-program + + Run unused in whole program mode. + + -version From 5a514854c85db3e1f31da47016826b9103c89e06 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 5 Jun 2019 22:57:34 +0200 Subject: [PATCH 229/254] lint: don't run analyzers that are completely disabled --- lint/lint.go | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index 535ba43e4..cd9f17936 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -107,10 +107,23 @@ type CumulativeChecker interface { } func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error) { + var allAnalyzers []*analysis.Analyzer + allAnalyzers = append(allAnalyzers, l.Checkers...) + for _, cum := range l.CumulativeCheckers { + allAnalyzers = append(allAnalyzers, cum.Analyzer()) + } + + allowed := FilterChecks(allAnalyzers, config.DefaultConfig.Merge(l.Config).Checks) var analyzers []*analysis.Analyzer - analyzers = append(analyzers, l.Checkers...) + for _, c := range l.Checkers { + if allowed[c.Name] { + analyzers = append(analyzers, c) + } + } for _, cum := range l.CumulativeCheckers { - analyzers = append(analyzers, cum.Analyzer()) + if allowed[cum.Analyzer().Name] { + analyzers = append(analyzers, cum.Analyzer()) + } } r, err := NewRunner(&l.Stats) From 91bb868a58cea8b57f3ce5030f72bf12d0b6ca69 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 5 Jun 2019 22:59:04 +0200 Subject: [PATCH 230/254] lint: drop more data we no longer need when done processing package --- lint/lint.go | 4 +++- lint/runner.go | 27 ++++++++++++++++++++++++++- unused/unused_test.go | 2 +- 3 files changed, 30 insertions(+), 3 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index cd9f17936..f36577c8d 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -120,8 +120,10 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error analyzers = append(analyzers, c) } } + hasCumulative := false for _, cum := range l.CumulativeCheckers { if allowed[cum.Analyzer().Name] { + hasCumulative = true analyzers = append(analyzers, cum.Analyzer()) } } @@ -132,7 +134,7 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error } r.goVersion = l.GoVersion - pkgs, err := r.Run(cfg, patterns, analyzers) + pkgs, err := r.Run(cfg, patterns, analyzers, hasCumulative) if err != nil { return nil, err } diff --git a/lint/runner.go b/lint/runner.go index e55fb5437..dcc6e2b21 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -79,6 +79,25 @@ type Package struct { // these slices are indexed by analysis facts []map[types.Object][]analysis.Fact pkgFacts [][]analysis.Fact + + canClearTypes bool + dependents uint64 +} + +func (pkg *Package) decUse() { + atomic.AddUint64(&pkg.dependents, ^uint64(0)) + if atomic.LoadUint64(&pkg.dependents) == 0 { + // nobody depends on this package anymore + if pkg.canClearTypes { + pkg.Types = nil + } + pkg.facts = nil + pkg.pkgFacts = nil + + for _, imp := range pkg.Imports { + imp.decUse() + } + } } type result struct { @@ -420,7 +439,7 @@ func NewRunner(stats *Stats) (*Runner, error) { // Note that diagnostics have not been filtered at this point yet, to // accomodate cumulative analyzes that require additional steps to // produce diagnostics. -func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer) ([]*Package, error) { +func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analysis.Analyzer, hasCumulative bool) ([]*Package, error) { r.analyzerIDs = analyzerIDs{m: map[*analysis.Analyzer]int{}} id := 0 seen := map[*analysis.Analyzer]struct{}{} @@ -471,6 +490,9 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy facts: make([]map[types.Object][]analysis.Fact, len(r.analyzerIDs.m)), pkgFacts: make([][]analysis.Fact, len(r.analyzerIDs.m)), done: make(chan struct{}), + // every package needs itself + dependents: 1, + canClearTypes: !hasCumulative, } allPkgs = append(allPkgs, m[l]) for i := range m[l].facts { @@ -480,6 +502,7 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy m[l].errs = append(m[l].errs, err) } for _, v := range l.Imports { + m[v].dependents++ m[l].Imports = append(m[l].Imports, m[v]) } @@ -488,6 +511,7 @@ func (r *Runner) Run(cfg *packages.Config, patterns []string, analyzers []*analy m[l].errs = append(m[l].errs, err) } }) + pkgs := make([]*Package, len(initialPkgs)) for i, l := range initialPkgs { pkgs[i] = m[l] @@ -665,6 +689,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { pkg.results = nil atomic.AddUint64(&r.stats.ProcessedPackages, 1) + pkg.decUse() close(pkg.done) }() diff --git a/unused/unused_test.go b/unused/unused_test.go index 3035f2017..3a6ff5d47 100644 --- a/unused/unused_test.go +++ b/unused/unused_test.go @@ -187,7 +187,7 @@ func TestAll(t *testing.T) { Tests: true, Env: append(os.Environ(), "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off"), } - pkgs, err := r.Run(cfg, []string{"./..."}, []*analysis.Analyzer{c.Analyzer()}) + pkgs, err := r.Run(cfg, []string{"./..."}, []*analysis.Analyzer{c.Analyzer()}, true) if err != nil { t.Fatal(err) } From 0d9fc573086e7fd691fbd17979b137f83ce87203 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Wed, 5 Jun 2019 23:11:28 +0200 Subject: [PATCH 231/254] doc: update section on resource usage --- doc/staticcheck.html | 91 +++++++++----------------------------------- 1 file changed, 19 insertions(+), 72 deletions(-) diff --git a/doc/staticcheck.html b/doc/staticcheck.html index 6b03da190..a3908eea9 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -309,97 +309,44 @@

    Resource usage

    Static analysis is a rather resource intensive process, - having to apply expensive algorithms on a lot of data. + having to apply expensive algorithms to a lot of data. Depending on the complexity of the checked code, this can result in many gigabytes of memory usage and minutes (if not hours) of CPU time.

    - When using staticcheck, there are two different ways of running it: - one package per invocation, or many packages per invocation. - These two modes have opposite CPU and memory usage characteristics. + To combat the time complexity of static analysis, staticcheck makes use of caching. + It reuses Go's build cache as well as its own facts cache to avoid analysing dependencies whenever possible. + In development environments, there is usually nothing to do to benefit from these caches. + In CI, however, you have to ensure that the caches persist across successive runs of CI. + The build cache and fact cache are stored beneath the os.UserCacheDir() directory, in go-build and staticcheck respectively. + On Linux, by default, these directories can be found in ~/.cache/go-build and ~/.cache/staticcheck.

    - Checking one package per invocation will keep the memory usage low, - but may end up recompiling the same packages over and over again, wasting CPU time. + The overall memory consumption of staticcheck is controlled by the degree of parallelism. + The more CPU cores a system has available, the more packages will be checked in parallel, increasing the total amount of memory needed. + Staticcheck respects the GOMAXPROCS environment variable to control the degree of parallelism.

    - Checking many packages, on the other hand, - needs to keep more data in memory at once, - but can reuse compiled packages, saving CPU time. + Note that reducing GOMAXPROCS only benefits systems with a lot of cores and code bases with a lot of packages. + As GOMAXPROCS approaches 1, peak memory usage will be dominated by the most complex package in the code base. + Additionally, smaller code bases may have such interconnected dependencies that peak parallelism is never reached, or there may simply be fewer packages than cores. + For example, when checking 10 packages it makes no difference if GOMAXPROCS is set to 32 or 16, at most 10 packages can be processed in parallel.

    - The following tables will show the characteristics of the different modes. + Furthermore, a certain amount of type information per package needs to be retained until the end of the process, + which means that overall memory usage grows with the number of checked packages. + You can reduce this effect by disabling the U1000 and U1001 checks via the -checks command line flag (e.g. via staticcheck -checks="inherit,-U1000,-U1001").

    - - - - - - - - - - - - - - - - - - - - - -
    Resource consumption in different operation modes
    ModeCPU usageMemory usage
    One package per invocationHighLow
    Many packages per invocationLowHigh
    - - - - - - - - - - - - - - - - - - - - - - - - - -
    Using staticcheck to check the Go standard library
    ModeCPU time spentWall time spentMax memory used
    One package per invocation1692%2241%28%
    All packages in one invocation (reference value)100%100%100%
    -

    - The maximum memory usage of a single invocation depends - on the combined complexity of the checked packages and their dependencies. - The entire standard library combined requires roughly 2.31 GB of memory, - while the most complex package, net/http, only needs 640 MB. - Other packages, such as the tiny image/color/palette, may only need as little as 17 MB. + Finally, you can trade execution time for memory usage by setting the GOGC environment variable to a value less than 100. + This will run more frequent garbage collection, potentially lowering peak memory usage, at the cost of spending more CPU.

    -

    - Checking packages individually reduces both the maximum and the average (over time) memory usage, - but requires a much greater amount of CPU time, - as shared dependencies have to be compiled over and over again. - You should use the method that best suits your available resources. -

    - -

    Checks

    From 80fb6a033103e26a505502c319ca7dd847e545c1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 6 Jun 2019 00:50:31 +0200 Subject: [PATCH 232/254] doc: update benchmarks --- doc/2019.2.html | 64 ++++++++++++++++++++++++------------------------- 1 file changed, 31 insertions(+), 33 deletions(-) diff --git a/doc/2019.2.html b/doc/2019.2.html index c7dbfcde6..ff39d8a66 100644 --- a/doc/2019.2.html +++ b/doc/2019.2.html @@ -41,40 +41,37 @@

    Performance improvements

    net/http - 3.374 s / 680 MB - 3.687 s / 248 MB - +9.28% / -63.53% + 3.543 s / 677 MB + 3.747 s / 254 MB + +5.76% / -62.48% strconv - 1.511 s / 294 MB - 1.655 s / 120 MB - +9.53% / -59.18% + 1.628 s / 294 MB + 1.678 s / 118 MB + +3.07% / -59.86% image/color - 1.296 s / 224 MB - 1.631 s / 133 MB - +25.85% / -40.63% + 1.304 s / 225 MB + 1.702 s / 138 MB + +30.52% / -38.67% std - 25.164 s / 4028 MB - 19.028 s / 1063 MB - -24.38% / -73.61% + 26.234 s / 3987 MB + 19.444 s / 1054 MB + -25.88% / -73.56% github.com/cockroachdb/cockroach/pkg/... - 82.090 s / 15718 MB - 91.410 s / 3941 MB - +11.35% / -74.93 % - - - + 88.644 s / 15959 MB + 93.798 s / 4156 MB + +5.81% / -73.96% @@ -104,39 +101,40 @@

    Performance improvements

    Change + net/http - 3.687 s / 248 MB - 1.507 s / 199 MB - -59.13% / -19.76% + 3.747 s / 254 MB + 1.545 s / 195 MB + -58.77% / -23.23% strconv - 1.655 s / 120 MB - 0.454 s / 57 MB - -72.57% / -52.5% + 1.678 s / 118 MB + 0.495 s / 57 MB + -70.5% / -51.69% image/color - 1.631 s / 133 MB - 0.324 s / 31 MB - -80.13% / -76.69% + 1.702 s / 138 MB + 0.329 s / 31 MB + -80.67% / -77.54% std - 19.028 s / 1063 MB - 14.308 s / 900 MB - -24.81% / -15.33% + 19.444 s / 1054 MB + 15.099 s / 887 MB + -22.35% / -15.84% github.com/cockroachdb/cockroach/pkg/... - 91.410 s / 3941 MB - 45.840 s / 2800 MB - -49.85% / -28.95% + 93.798 s / 4156 MB + 47.205 s / 2516 MB + -49.67% / -39.46%

    From fbbbf9fd180c05ac8e6c6bfb6b1a97a14682676f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 6 Jun 2019 01:55:30 +0200 Subject: [PATCH 233/254] doc: add install instructions --- doc/staticcheck.html | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/doc/staticcheck.html b/doc/staticcheck.html index a3908eea9..06a6d3fa3 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -12,6 +12,35 @@

    Overview

    and offers enough customizability to fit into your workflows.

    +

    Installation

    + +

    + There are various ways in which you can install staticcheck, + but they all boil down to obtaining the command located at honnef.co/go/tools/cmd/staticcheck +

    + +

    + If you use Go modules, you can simply run go get honnef.co/go/tools/cmd/staticcheck to obtain the latest released version. + If you're still using a GOPATH-based workflow, then the above command will instead fetch the master branch. + It is suggested that you explicitly check out the latest release branch instead, which is currently 2019.2. + One way of doing so would be as follows: +

    + +
    cd $GOPATH/src/honnef.co/go/tools/cmd/staticcheck
    +git checkout 2019.2
    +go get
    +go install
    +
    + +

    + Alternatively, you can download pre-compiled binaries from GitHub. +

    + +

    + If you'd like to be notified of new releases, you can use GitHub's + Releases only watches. +

    +

    Running staticcheck

    From 009ed7b0ba5f0fcf718ff72592eec2925b9c7b8b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 6 Jun 2019 02:54:37 +0200 Subject: [PATCH 234/254] all: structured documentation Instead of putting documentation in opaque string, put them in structs with some well-known fields, such as the first release containing the check, or the options supported by the check. This will make it much simpler to generate the website. --- lint/lint.go | 34 ++ simple/analysis.go | 58 +-- simple/doc.go | 430 +++++++++---------- staticcheck/analysis.go | 142 +++---- staticcheck/doc.go | 889 ++++++++++++++++++---------------------- stylecheck/analysis.go | 26 +- stylecheck/doc.go | 206 +++++----- 7 files changed, 825 insertions(+), 960 deletions(-) diff --git a/lint/lint.go b/lint/lint.go index f36577c8d..d86d321b2 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -19,6 +19,40 @@ import ( "honnef.co/go/tools/config" ) +type Documentation struct { + Title string + Text string + Since string + NonDefault bool + Options []string +} + +func (doc *Documentation) String() string { + b := &strings.Builder{} + fmt.Fprintf(b, "%s\n\n", doc.Title) + if doc.Text != "" { + fmt.Fprintf(b, "%s\n\n", doc.Text) + } + fmt.Fprint(b, "Available since\n ") + if doc.Since == "" { + fmt.Fprint(b, "unreleased") + } else { + fmt.Fprintf(b, "%s", doc.Since) + } + if doc.NonDefault { + fmt.Fprint(b, ", non-default") + } + fmt.Fprint(b, "\n") + if len(doc.Options) > 0 { + fmt.Fprintf(b, "\nOptions\n") + for _, opt := range doc.Options { + fmt.Fprintf(b, " %s", opt) + } + fmt.Fprint(b, "\n") + } + return b.String() +} + type Ignore interface { Match(p Problem) bool } diff --git a/simple/analysis.go b/simple/analysis.go index a3dd37f7d..abb1648fa 100644 --- a/simple/analysis.go +++ b/simple/analysis.go @@ -20,203 +20,203 @@ var Analyzers = map[string]*analysis.Analyzer{ "S1000": { Name: "S1000", Run: LintSingleCaseSelect, - Doc: docS1000, + Doc: Docs["S1000"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1001": { Name: "S1001", Run: LintLoopCopy, - Doc: docS1001, + Doc: Docs["S1001"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1002": { Name: "S1002", Run: LintIfBoolCmp, - Doc: docS1002, + Doc: Docs["S1002"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1003": { Name: "S1003", Run: LintStringsContains, - Doc: docS1003, + Doc: Docs["S1003"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1004": { Name: "S1004", Run: LintBytesCompare, - Doc: docS1004, + Doc: Docs["S1004"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1005": { Name: "S1005", Run: LintUnnecessaryBlank, - Doc: docS1005, + Doc: Docs["S1005"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1006": { Name: "S1006", Run: LintForTrue, - Doc: docS1006, + Doc: Docs["S1006"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1007": { Name: "S1007", Run: LintRegexpRaw, - Doc: docS1007, + Doc: Docs["S1007"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1008": { Name: "S1008", Run: LintIfReturn, - Doc: docS1008, + Doc: Docs["S1008"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1009": { Name: "S1009", Run: LintRedundantNilCheckWithLen, - Doc: docS1009, + Doc: Docs["S1009"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1010": { Name: "S1010", Run: LintSlicing, - Doc: docS1010, + Doc: Docs["S1010"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1011": { Name: "S1011", Run: LintLoopAppend, - Doc: docS1011, + Doc: Docs["S1011"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1012": { Name: "S1012", Run: LintTimeSince, - Doc: docS1012, + Doc: Docs["S1012"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1016": { Name: "S1016", Run: LintSimplerStructConversion, - Doc: docS1016, + Doc: Docs["S1016"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1017": { Name: "S1017", Run: LintTrim, - Doc: docS1017, + Doc: Docs["S1017"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1018": { Name: "S1018", Run: LintLoopSlide, - Doc: docS1018, + Doc: Docs["S1018"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1019": { Name: "S1019", Run: LintMakeLenCap, - Doc: docS1019, + Doc: Docs["S1019"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1020": { Name: "S1020", Run: LintAssertNotNil, - Doc: docS1020, + Doc: Docs["S1020"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1021": { Name: "S1021", Run: LintDeclareAssign, - Doc: docS1021, + Doc: Docs["S1021"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1023": { Name: "S1023", Run: LintRedundantBreak, - Doc: docS1023, + Doc: Docs["S1023"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1024": { Name: "S1024", Run: LintTimeUntil, - Doc: docS1024, + Doc: Docs["S1024"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1025": { Name: "S1025", Run: LintRedundantSprintf, - Doc: docS1025, + Doc: Docs["S1025"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1028": { Name: "S1028", Run: LintErrorsNewSprintf, - Doc: docS1028, + Doc: Docs["S1028"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1029": { Name: "S1029", Run: LintRangeStringRunes, - Doc: docS1029, + Doc: Docs["S1029"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "S1030": { Name: "S1030", Run: LintBytesBufferConversions, - Doc: docS1030, + Doc: Docs["S1030"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1031": { Name: "S1031", Run: LintNilCheckAroundRange, - Doc: docS1031, + Doc: Docs["S1031"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1032": { Name: "S1032", Run: LintSortHelpers, - Doc: docS1032, + Doc: Docs["S1032"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1033": { Name: "S1033", Run: LintGuardedDelete, - Doc: docS1033, + Doc: Docs["S1033"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "S1034": { Name: "S1034", Run: LintSimplifyTypeSwitch, - Doc: docS1034, + Doc: Docs["S1034"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated}, Flags: newFlagSet(), }, diff --git a/simple/doc.go b/simple/doc.go index 09d3d4e34..f6c32b8d7 100644 --- a/simple/doc.go +++ b/simple/doc.go @@ -1,89 +1,82 @@ package simple -var docS1000 = `Use plain channel send or receive +import "honnef.co/go/tools/lint" -Select statements with a single case can be replaced with a simple +var Docs = map[string]*lint.Documentation{ + "S1000": &lint.Documentation{ + Title: `Use plain channel send or receive`, + Text: `Select statements with a single case can be replaced with a simple send or receive. Before: select { case x := <-ch: - fmt.Println(x) + fmt.Println(x) } After: x := <-ch - fmt.Println(x) + fmt.Println(x)`, + Since: "2017.1", + }, -Available since - 2017.1 -` - -var docS1001 = `Replace with copy() - -Use copy() for copying elements from one slice to another. + "S1001": &lint.Documentation{ + Title: `Replace with copy()`, + Text: `Use copy() for copying elements from one slice to another. Before: for i, x := range src { - dst[i] = x + dst[i] = x } After: - copy(dst, src) - -Available since - 2017.1 -` + copy(dst, src)`, + Since: "2017.1", + }, -var docS1002 = `Omit comparison with boolean constant - -Before: + "S1002": &lint.Documentation{ + Title: `Omit comparison with boolean constant`, + Text: `Before: if x == true {} After: - if x {} + if x {}`, + Since: "2017.1", + }, -Available since - 2017.1 -` - -var docS1003 = `Replace with strings.Contains - -Before: + "S1003": &lint.Documentation{ + Title: `Replace with strings.Contains`, + Text: `Before: if strings.Index(x, y) != -1 {} After: - if strings.Contains(x, y) {} - -Available since - 2017.1 -` + if strings.Contains(x, y) {}`, + Since: "2017.1", + }, -var docS1004 = `Replace with bytes.Equal - -Before: + "S1004": &lint.Documentation{ + Title: `Replace with bytes.Equal`, + Text: `Before: if bytes.Compare(x, y) == 0 {} After: - if bytes.Equal(x, y) {} - -Available since - 2017.1 -` + if bytes.Equal(x, y) {}`, + Since: "2017.1", + }, -var docS1005 = `Drop unnecessary use of the blank identifier - -In many cases, assigning to the blank identifier is unnecessary. + "S1005": &lint.Documentation{ + Title: `Drop unnecessary use of the blank identifier`, + Text: `In many cases, assigning to the blank identifier is unnecessary. Before: @@ -95,23 +88,19 @@ After: for range s{} x = someMap[key] - <-ch - -Available since - 2017.1 -` - -var docS1006 = `Replace with for { ... } - -For infinite loops, using for { ... } is the most idiomatic choice. - -Available since - 2017.1 -` - -var docS1007 = `Simplify regular expression by using raw string literal - -Raw string literals use ` + "`" + ` instead of " and do not support + <-ch`, + Since: "2017.1", + }, + + "S1006": &lint.Documentation{ + Title: `Replace with for { ... }`, + Text: `For infinite loops, using for { ... } is the most idiomatic choice.`, + Since: "2017.1", + }, + + "S1007": &lint.Documentation{ + Title: `Simplify regular expression by using raw string literal`, + Text: `Raw string literals use ` + "`" + ` instead of " and do not support any escape sequences. This means that the backslash (\) can be used freely, without the need of escaping. @@ -124,32 +113,28 @@ Before: After: - regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `) - -Available since - 2017.1 -` + regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `)`, + Since: "2017.1", + }, -var docS1008 = `Simplify returning boolean expression - -Before: + "S1008": &lint.Documentation{ + Title: `Simplify returning boolean expression`, + Text: `Before: if { - return true + return true } return false After: - return - -Available since - 2017.1 -` + return `, + Since: "2017.1", + }, -var docS1009 = `Omit redundant nil check on slices - -The len function is defined for all slices, even nil ones, which have + "S1009": &lint.Documentation{ + Title: `Omit redundant nil check on slices`, + Text: `The len function is defined for all slices, even nil ones, which have a length of zero. It is not necessary to check if a slice is not nil before checking that its length is not zero. @@ -159,40 +144,34 @@ Before: After: - if len(x) != 0 {} - -Available since - 2017.1 -` - -var docS1010 = `Omit default slice index - -When slicing, the second index defaults to the length of the value, -making s[n:len(s)] and s[n:] equivalent. + if len(x) != 0 {}`, + Since: "2017.1", + }, -Available since - 2017.1 -` + "S1010": &lint.Documentation{ + Title: `Omit default slice index`, + Text: `When slicing, the second index defaults to the length of the value, +making s[n:len(s)] and s[n:] equivalent.`, + Since: "2017.1", + }, -var docS1011 = `Use a single append to concatenate two slices - -Before: + "S1011": &lint.Documentation{ + Title: `Use a single append to concatenate two slices`, + Text: `Before: for _, e := range y { - x = append(x, e) + x = append(x, e) } After: - x = append(x, y...) - -Available since - 2017.1 -` - -var docS1012 = `Replace with time.Since(x) + x = append(x, y...)`, + Since: "2017.1", + }, -The time.Since helper has the same effect as using time.Now().Sub(x) + "S1012": &lint.Documentation{ + Title: `Replace with time.Since(x)`, + Text: `The time.Since helper has the same effect as using time.Now().Sub(x) but is easier to read. Before: @@ -201,15 +180,13 @@ Before: After: - time.Since(x) + time.Since(x)`, + Since: "2017.1", + }, -Available since - 2017.1 -` - -var docS1016 = `Use a type conversion - -Two struct types with identical fields can be converted between each + "S1016": &lint.Documentation{ + Title: `Use a type conversion`, + Text: `Two struct types with identical fields can be converted between each other. In older versions of Go, the fields had to have identical struct tags. Since Go 1.8, however, struct tags are ignored during conversions. It is thus not necessary to manually copy every field @@ -219,22 +196,20 @@ Before: var x T1 y := T2{ - Field1: x.Field1, - Field2: x.Field2, + Field1: x.Field1, + Field2: x.Field2, } After: var x T1 - y := T2(x) - -Available since - 2017.1 -` - -var docS1017 = `Replace with strings.TrimPrefix + y := T2(x)`, + Since: "2017.1", + }, -Instead of using strings.HasPrefix and manual slicing, use the + "S1017": &lint.Documentation{ + Title: `Replace with strings.TrimPrefix`, + Text: `Instead of using strings.HasPrefix and manual slicing, use the strings.TrimPrefix function. If the string doesn't start with the prefix, the original string will be returned. Using strings.TrimPrefix reduces complexity, and avoids common bugs, such as off-by-one @@ -243,91 +218,80 @@ mistakes. Before: if strings.HasPrefix(str, prefix) { - str = str[len(prefix):] + str = str[len(prefix):] } After: - str = strings.TrimPrefix(str, prefix) + str = strings.TrimPrefix(str, prefix)`, + Since: "2017.1", + }, -Available since - 2017.1 -` - -var docS1018 = `Replace with copy() - -copy() permits using the same source and destination slice, even with + "S1018": &lint.Documentation{ + Title: `Replace with copy()`, + Text: `copy() permits using the same source and destination slice, even with overlapping ranges. This makes it ideal for sliding elements in a slice. Before: for i := 0; i < n; i++ { - bs[i] = bs[offset+i] + bs[i] = bs[offset+i] } After: - copy(bs[:n], bs[offset:]) - -Available since - 2017.1 -` + copy(bs[:n], bs[offset:])`, + Since: "2017.1", + }, -var docS1019 = `Simplify make call - -The make function has default values for the length and capacity + "S1019": &lint.Documentation{ + Title: `Simplify make call`, + Text: `The make function has default values for the length and capacity arguments. For channels and maps, the length defaults to zero. -Additionally, for slices the capacity defaults to the length. - -Available since - 2017.1 -` +Additionally, for slices the capacity defaults to the length.`, + Since: "2017.1", + }, -var docS1020 = `Omit redundant nil check in type assertion - -Before: + "S1020": &lint.Documentation{ + Title: `Omit redundant nil check in type assertion`, + Text: `Before: if _, ok := i.(T); ok && i != nil {} After: - if _, ok := i.(T); ok {} - -Available since - 2017.1 -` - -var docS1021 = `Merge variable declaration and assignment + if _, ok := i.(T); ok {}`, + Since: "2017.1", + }, -Before: + "S1021": &lint.Documentation{ + Title: `Merge variable declaration and assignment`, + Text: `Before: var x uint x = 1 After: - var x uint = 1 - -Available since - 2017.1 -` -var docS1023 = `Omit redundant control flow + var x uint = 1`, + Since: "2017.1", + }, -Functions that have no return value do not need a return statement as + "S1023": &lint.Documentation{ + Title: `Omit redundant control flow`, + Text: `Functions that have no return value do not need a return statement as the final statement of the function. Switches in Go do not have automatic fallthrough, unlike languages like C. It is not necessary to have a break statement as the final -statement in a case block. - -Available since - 2017.1 -` +statement in a case block.`, + Since: "2017.1", + }, -var docS1024 = `Replace with time.Until(x) - -The time.Until helper has the same effect as using x.Sub(time.Now()) + "S1024": &lint.Documentation{ + Title: `Replace with time.Until(x)`, + Text: `The time.Until helper has the same effect as using x.Sub(time.Now()) but is easier to read. Before: @@ -336,15 +300,13 @@ Before: After: - time.Until(x) - -Available since - 2017.1 -` + time.Until(x)`, + Since: "2017.1", + }, -var docS1025 = `Don't use fmt.Sprintf("%s", x) unnecessarily - -In many instances, there are easier and more efficient ways of getting + "S1025": &lint.Documentation{ + Title: `Don't use fmt.Sprintf("%s", x) unnecessarily`, + Text: `In many instances, there are easier and more efficient ways of getting a value's string representation. Whenever a value's underlying type is a string already, or the type has a String method, they should be used directly. @@ -370,29 +332,25 @@ to x string(y) - z.String() - -Available since - 2017.1 -` + z.String()`, + Since: "2017.1", + }, -var docS1028 = `replace with fmt.Errorf - -Before: + "S1028": &lint.Documentation{ + Title: `replace with fmt.Errorf`, + Text: `Before: errors.New(fmt.Sprintf(...)) After: - fmt.Errorf(...) + fmt.Errorf(...)`, + Since: "2017.1", + }, -Available since - 2017.1 -` - -var docS1029 = `Range over the string - -Ranging over a string will yield byte offsets and runes. If the offset + "S1029": &lint.Documentation{ + Title: `Range over the string`, + Text: `Ranging over a string will yield byte offsets and runes. If the offset isn't used, this is functionally equivalent to converting the string to a slice of runes and ranging over that. Ranging directly over the string will be more performant, however, as it avoids allocating a new @@ -404,49 +362,43 @@ Before: After: - for _, r := range s {} - -Available since - 2017.1 -` + for _, r := range s {}`, + Since: "2017.1", + }, -var docS1030 = `Use bytes.Buffer.String or bytes.Buffer.Bytes - -bytes.Buffer has both a String and a Bytes method. It is never + "S1030": &lint.Documentation{ + Title: `Use bytes.Buffer.String or bytes.Buffer.Bytes`, + Text: `bytes.Buffer has both a String and a Bytes method. It is never necessary to use string(buf.Bytes()) or []byte(buf.String()) – simply -use the other method. - -Available since - 2017.1 -` +use the other method.`, + Since: "2017.1", + }, -var docS1031 = `Omit redundant nil check around loop - -You can use range on nil slices and maps, the loop will simply never + "S1031": &lint.Documentation{ + Title: `Omit redundant nil check around loop`, + Text: `You can use range on nil slices and maps, the loop will simply never execute. This makes an additional nil check around the loop unnecessary. Before: if s != nil { - for _, x := range s { - ... - } + for _, x := range s { + ... + } } After: for _, x := range s { - ... - } - -Available since - 2017.1 -` - -var docS1032 = `Replace with sort.Ints(x), sort.Float64s(x), sort.Strings(x) + ... + }`, + Since: "2017.1", + }, -The sort.Ints, sort.Float64s and sort.Strings functions are easier to + "S1032": &lint.Documentation{ + Title: `Replace with sort.Ints(x), sort.Float64s(x), sort.Strings(x)`, + Text: `The sort.Ints, sort.Float64s and sort.Strings functions are easier to read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) and sort.Sort(sort.StringSlice(x)). @@ -456,22 +408,18 @@ Before: After: - sort.Strings(x) - -Available since - 2019.1 -` - -var docS1033 = `Unnecessary guard around call to delete - -Calling delete on a nil map is a no-op. - -Available since - Unreleased -` - -var docS1034 = `Use result of type assertion to simplify cases - -Available since - Unreleased -` + sort.Strings(x)`, + Since: "2019.1", + }, + + "S1033": &lint.Documentation{ + Title: `Unnecessary guard around call to delete`, + Text: `Calling delete on a nil map is a no-op.`, + Since: "2019.2", + }, + + "S1034": &lint.Documentation{ + Title: `Use result of type assertion to simplify cases`, + Since: "2019.2", + }, +} diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index 1e0513bf3..633a4973f 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -21,182 +21,182 @@ var Analyzers = map[string]*analysis.Analyzer{ "SA1000": { Name: "SA1000", Run: callChecker(checkRegexpRules), - Doc: docSA1000, + Doc: Docs["SA1000"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1001": { Name: "SA1001", Run: CheckTemplate, - Doc: docSA1001, + Doc: Docs["SA1001"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA1002": { Name: "SA1002", Run: callChecker(checkTimeParseRules), - Doc: docSA1002, + Doc: Docs["SA1002"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1003": { Name: "SA1003", Run: callChecker(checkEncodingBinaryRules), - Doc: docSA1003, + Doc: Docs["SA1003"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1004": { Name: "SA1004", Run: CheckTimeSleepConstant, - Doc: docSA1004, + Doc: Docs["SA1004"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA1005": { Name: "SA1005", Run: CheckExec, - Doc: docSA1005, + Doc: Docs["SA1005"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA1006": { Name: "SA1006", Run: CheckUnsafePrintf, - Doc: docSA1006, + Doc: Docs["SA1006"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA1007": { Name: "SA1007", Run: callChecker(checkURLsRules), - Doc: docSA1007, + Doc: Docs["SA1007"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1008": { Name: "SA1008", Run: CheckCanonicalHeaderKey, - Doc: docSA1008, + Doc: Docs["SA1008"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA1010": { Name: "SA1010", Run: callChecker(checkRegexpFindAllRules), - Doc: docSA1010, + Doc: Docs["SA1010"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1011": { Name: "SA1011", Run: callChecker(checkUTF8CutsetRules), - Doc: docSA1011, + Doc: Docs["SA1011"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1012": { Name: "SA1012", Run: CheckNilContext, - Doc: docSA1012, + Doc: Docs["SA1012"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA1013": { Name: "SA1013", Run: CheckSeeker, - Doc: docSA1013, + Doc: Docs["SA1013"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA1014": { Name: "SA1014", Run: callChecker(checkUnmarshalPointerRules), - Doc: docSA1014, + Doc: Docs["SA1014"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1015": { Name: "SA1015", Run: CheckLeakyTimeTick, - Doc: docSA1015, + Doc: Docs["SA1015"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA1016": { Name: "SA1016", Run: CheckUntrappableSignal, - Doc: docSA1016, + Doc: Docs["SA1016"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA1017": { Name: "SA1017", Run: callChecker(checkUnbufferedSignalChanRules), - Doc: docSA1017, + Doc: Docs["SA1017"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1018": { Name: "SA1018", Run: callChecker(checkStringsReplaceZeroRules), - Doc: docSA1018, + Doc: Docs["SA1018"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1019": { Name: "SA1019", Run: CheckDeprecated, - Doc: docSA1019, + Doc: Docs["SA1019"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Deprecated}, Flags: newFlagSet(), }, "SA1020": { Name: "SA1020", Run: callChecker(checkListenAddressRules), - Doc: docSA1020, + Doc: Docs["SA1020"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1021": { Name: "SA1021", Run: callChecker(checkBytesEqualIPRules), - Doc: docSA1021, + Doc: Docs["SA1021"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1023": { Name: "SA1023", Run: CheckWriterBufferModified, - Doc: docSA1023, + Doc: Docs["SA1023"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA1024": { Name: "SA1024", Run: callChecker(checkUniqueCutsetRules), - Doc: docSA1024, + Doc: Docs["SA1024"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1025": { Name: "SA1025", Run: CheckTimerResetReturnValue, - Doc: docSA1025, + Doc: Docs["SA1025"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA1026": { Name: "SA1026", Run: callChecker(checkUnsupportedMarshal), - Doc: docSA1026, + Doc: Docs["SA1026"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA1027": { Name: "SA1027", Run: callChecker(checkAtomicAlignment), - Doc: docSA1027, + Doc: Docs["SA1027"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, @@ -204,28 +204,28 @@ var Analyzers = map[string]*analysis.Analyzer{ "SA2000": { Name: "SA2000", Run: CheckWaitgroupAdd, - Doc: docSA2000, + Doc: Docs["SA2000"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA2001": { Name: "SA2001", Run: CheckEmptyCriticalSection, - Doc: docSA2001, + Doc: Docs["SA2001"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA2002": { Name: "SA2002", Run: CheckConcurrentTesting, - Doc: docSA2002, + Doc: Docs["SA2002"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA2003": { Name: "SA2003", Run: CheckDeferLock, - Doc: docSA2003, + Doc: Docs["SA2003"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, @@ -233,14 +233,14 @@ var Analyzers = map[string]*analysis.Analyzer{ "SA3000": { Name: "SA3000", Run: CheckTestMainExit, - Doc: docSA3000, + Doc: Docs["SA3000"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA3001": { Name: "SA3001", Run: CheckBenchmarkN, - Doc: docSA3001, + Doc: Docs["SA3001"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, @@ -248,140 +248,140 @@ var Analyzers = map[string]*analysis.Analyzer{ "SA4000": { Name: "SA4000", Run: CheckLhsRhsIdentical, - Doc: docSA4000, + Doc: Docs["SA4000"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.TokenFile, facts.Generated}, Flags: newFlagSet(), }, "SA4001": { Name: "SA4001", Run: CheckIneffectiveCopy, - Doc: docSA4001, + Doc: Docs["SA4001"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA4002": { Name: "SA4002", Run: CheckDiffSizeComparison, - Doc: docSA4002, + Doc: Docs["SA4002"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA4003": { Name: "SA4003", Run: CheckExtremeComparison, - Doc: docSA4003, + Doc: Docs["SA4003"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA4004": { Name: "SA4004", Run: CheckIneffectiveLoop, - Doc: docSA4004, + Doc: Docs["SA4004"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA4006": { Name: "SA4006", Run: CheckUnreadVariableValues, - Doc: docSA4006, + Doc: Docs["SA4006"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA4008": { Name: "SA4008", Run: CheckLoopCondition, - Doc: docSA4008, + Doc: Docs["SA4008"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA4009": { Name: "SA4009", Run: CheckArgOverwritten, - Doc: docSA4009, + Doc: Docs["SA4009"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA4010": { Name: "SA4010", Run: CheckIneffectiveAppend, - Doc: docSA4010, + Doc: Docs["SA4010"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA4011": { Name: "SA4011", Run: CheckScopedBreak, - Doc: docSA4011, + Doc: Docs["SA4011"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA4012": { Name: "SA4012", Run: CheckNaNComparison, - Doc: docSA4012, + Doc: Docs["SA4012"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA4013": { Name: "SA4013", Run: CheckDoubleNegation, - Doc: docSA4013, + Doc: Docs["SA4013"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA4014": { Name: "SA4014", Run: CheckRepeatedIfElse, - Doc: docSA4014, + Doc: Docs["SA4014"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA4015": { Name: "SA4015", Run: callChecker(checkMathIntRules), - Doc: docSA4015, + Doc: Docs["SA4015"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA4016": { Name: "SA4016", Run: CheckSillyBitwiseOps, - Doc: docSA4016, + Doc: Docs["SA4016"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile}, Flags: newFlagSet(), }, "SA4017": { Name: "SA4017", Run: CheckPureFunctions, - Doc: docSA4017, + Doc: Docs["SA4017"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Purity}, Flags: newFlagSet(), }, "SA4018": { Name: "SA4018", Run: CheckSelfAssignment, - Doc: docSA4018, + Doc: Docs["SA4018"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, "SA4019": { Name: "SA4019", Run: CheckDuplicateBuildConstraints, - Doc: docSA4019, + Doc: Docs["SA4019"].String(), Requires: []*analysis.Analyzer{facts.Generated}, Flags: newFlagSet(), }, "SA4020": { Name: "SA4020", Run: CheckUnreachableTypeCases, - Doc: docSA4020, + Doc: Docs["SA4020"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA4021": { Name: "SA4021", Run: CheckSingleArgAppend, - Doc: docSA4021, + Doc: Docs["SA4021"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, @@ -389,63 +389,63 @@ var Analyzers = map[string]*analysis.Analyzer{ "SA5000": { Name: "SA5000", Run: CheckNilMaps, - Doc: docSA5000, + Doc: Docs["SA5000"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA5001": { Name: "SA5001", Run: CheckEarlyDefer, - Doc: docSA5001, + Doc: Docs["SA5001"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA5002": { Name: "SA5002", Run: CheckInfiniteEmptyLoop, - Doc: docSA5002, + Doc: Docs["SA5002"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA5003": { Name: "SA5003", Run: CheckDeferInInfiniteLoop, - Doc: docSA5003, + Doc: Docs["SA5003"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA5004": { Name: "SA5004", Run: CheckLoopEmptyDefault, - Doc: docSA5004, + Doc: Docs["SA5004"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA5005": { Name: "SA5005", Run: CheckCyclicFinalizer, - Doc: docSA5005, + Doc: Docs["SA5005"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA5007": { Name: "SA5007", Run: CheckInfiniteRecursion, - Doc: docSA5007, + Doc: Docs["SA5007"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA5008": { Name: "SA5008", Run: CheckStructTags, - Doc: docSA5008, + Doc: Docs["SA5008"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA5009": { Name: "SA5009", Run: callChecker(checkPrintfRules), - Doc: docSA5009, + Doc: Docs["SA5009"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, @@ -453,35 +453,35 @@ var Analyzers = map[string]*analysis.Analyzer{ "SA6000": { Name: "SA6000", Run: callChecker(checkRegexpMatchLoopRules), - Doc: docSA6000, + Doc: Docs["SA6000"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA6001": { Name: "SA6001", Run: CheckMapBytesKey, - Doc: docSA6001, + Doc: Docs["SA6001"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA6002": { Name: "SA6002", Run: callChecker(checkSyncPoolValueRules), - Doc: docSA6002, + Doc: Docs["SA6002"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer}, Flags: newFlagSet(), }, "SA6003": { Name: "SA6003", Run: CheckRangeStringRunes, - Doc: docSA6003, + Doc: Docs["SA6003"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "SA6005": { Name: "SA6005", Run: CheckToLowerToUpperComparison, - Doc: docSA6005, + Doc: Docs["SA6005"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, @@ -489,28 +489,28 @@ var Analyzers = map[string]*analysis.Analyzer{ "SA9001": { Name: "SA9001", Run: CheckDubiousDeferInChannelRangeLoop, - Doc: docSA9001, + Doc: Docs["SA9001"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA9002": { Name: "SA9002", Run: CheckNonOctalFileMode, - Doc: docSA9002, + Doc: Docs["SA9002"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, "SA9003": { Name: "SA9003", Run: CheckEmptyBranch, - Doc: docSA9003, + Doc: Docs["SA9003"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.TokenFile, facts.Generated}, Flags: newFlagSet(), }, "SA9004": { Name: "SA9004", Run: CheckMissingEnumTypesInDeclaration, - Doc: docSA9004, + Doc: Docs["SA9004"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, @@ -518,7 +518,7 @@ var Analyzers = map[string]*analysis.Analyzer{ "SA9005": { Name: "SA9005", Run: callChecker(checkNoopMarshal), - Doc: docSA9005, + Doc: Docs["SA9005"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer, valueRangesAnalyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, diff --git a/staticcheck/doc.go b/staticcheck/doc.go index 17682a795..d0ed8beae 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -1,39 +1,37 @@ package staticcheck -var docSA1000 = `Invalid regular expression - -Available since - 2017.1 -` - -var docSA1001 = `Invalid template - -Available since - 2017.1 -` - -var docSA1002 = `Invalid format in time.Parse - -Available since - 2017.1 -` - -var docSA1003 = `Unsupported argument to functions in encoding/binary - -The encoding/binary package can only serialize types with known sizes. +import "honnef.co/go/tools/lint" + +var Docs = map[string]*lint.Documentation{ + "SA1000": &lint.Documentation{ + Title: `Invalid regular expression`, + Since: "2017.1", + }, + + "SA1001": &lint.Documentation{ + Title: `Invalid template`, + Since: "2017.1", + }, + + "SA1002": &lint.Documentation{ + Title: `Invalid format in time.Parse`, + Since: "2017.1", + }, + + "SA1003": &lint.Documentation{ + Title: `Unsupported argument to functions in encoding/binary`, + Text: `The encoding/binary package can only serialize types with known sizes. This precludes the use of the int and uint types, as their sizes differ on different architectures. Furthermore, it doesn't support serializing maps, channels, strings, or functions. -Before Go 1.8, bool wasn't supported, either. - -Available since - 2017.1 -` +Before Go 1.8, bool wasn't supported, either.`, + Since: "2017.1", + }, -var docSA1004 = `Suspiciously small untyped constant in time.Sleep - -The time.Sleep function takes a time.Duration as its only argument. + "SA1004": &lint.Documentation{ + Title: `Suspiciously small untyped constant in time.Sleep`, + Text: `The time.Sleep function takes a time.Duration as its only argument. Durations are expressed in nanoseconds. Thus, calling time.Sleep(1) will sleep for 1 nanosecond. This is a common source of bugs, as sleep functions in other languages often accept seconds or milliseconds. @@ -44,15 +42,13 @@ arbitrary durations, for example '5 * time.Second' for 5 seconds. If you truly meant to sleep for a tiny amount of time, use 'n * time.Nanosecond' to signal to staticcheck that you did mean to sleep -for some amount of nanoseconds. - -Available since - 2017.1 -` +for some amount of nanoseconds.`, + Since: "2017.1", + }, -var docSA1005 = `Invalid first argument to exec.Command - -os/exec runs programs directly (using variants of the fork and exec + "SA1005": &lint.Documentation{ + Title: `Invalid first argument to exec.Command`, + Text: `os/exec runs programs directly (using variants of the fork and exec system calls on Unix systems). This shouldn't be confused with running a command in a shell. The shell will allow for features such as input redirection, pipes, and general scripting. The shell is also @@ -69,15 +65,13 @@ If you want to run a command in a shell, consider using something like the following – but be aware that not all systems, particularly Windows, will have a /bin/sh program: - exec.Command("/bin/sh", "-c", "ls | grep Awesome") - -Available since - 2017.1 -` - -var docSA1006 = `Printf with dynamic first argument and no further arguments + exec.Command("/bin/sh", "-c", "ls | grep Awesome")`, + Since: "2017.1", + }, -Using fmt.Printf with a dynamic first argument can lead to unexpected + "SA1006": &lint.Documentation{ + Title: `Printf with dynamic first argument and no further arguments`, + Text: `Using fmt.Printf with a dynamic first argument can lead to unexpected output. The first argument is a format string, where certain character combinations have special meaning. If, for example, a user were to enter a string such as @@ -95,21 +89,18 @@ it would lead to the following output: Similarly, forming the first parameter via string concatenation with user input should be avoided for the same reason. When printing user input, either use a variant of fmt.Print, or use the %s Printf verb -and pass the string as an argument. - -Available since - 2017.1 -` - -var docSA1007 = `Invalid URL in net/url.Parse - -Available since - 2017.1 -` - -var docSA1008 = `Non-canonical key in http.Header map - -Keys in http.Header maps are canonical, meaning they follow a specific +and pass the string as an argument.`, + Since: "2017.1", + }, + + "SA1007": &lint.Documentation{ + Title: `Invalid URL in net/url.Parse`, + Since: "2017.1", + }, + + "SA1008": &lint.Documentation{ + Title: `Non-canonical key in http.Header map`, + Text: `Keys in http.Header maps are canonical, meaning they follow a specific combination of uppercase and lowercase letters. Methods such as http.Header.Add and http.Header.Del convert inputs into this canonical form before manipulating the map. @@ -128,119 +119,98 @@ demonstrates one such inconsistency: // map[Etag:[5678] etag:[1234]] The easiest way of obtaining the canonical form of a key is to use -http.CanonicalHeaderKey. - -Available since - 2017.1 -` - -var docSA1010 = `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results - -If n >= 0, the function returns at most n matches/submatches. To -return all results, specify a negative number. - -Available since - 2017.1 -` - -var docSA1011 = `Various methods in the strings package expect valid UTF-8, but invalid input is provided - -Available since - 2017.1 -` - -var docSA1012 = `A nil context.Context is being passed to a function, consider using context.TODO instead - -Available since - 2017.1 -` - -var docSA1013 = `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second - -Available since - 2017.1 -` - -var docSA1014 = `Non-pointer value passed to Unmarshal or Decode - -Available since - 2017.1 -` - -var docSA1015 = `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions - -Available since - 2017.1 -` - -var docSA1016 = `Trapping a signal that cannot be trapped - -Not all signals can be intercepted by a process. Speficially, on +http.CanonicalHeaderKey.`, + Since: "2017.1", + }, + + "SA1010": &lint.Documentation{ + Title: `(*regexp.Regexp).FindAll called with n == 0, which will always return zero results`, + Text: `If n >= 0, the function returns at most n matches/submatches. To +return all results, specify a negative number.`, + Since: "2017.1", + }, + + "SA1011": &lint.Documentation{ + Title: `Various methods in the strings package expect valid UTF-8, but invalid input is provided`, + Since: "2017.1", + }, + + "SA1012": &lint.Documentation{ + Title: `A nil context.Context is being passed to a function, consider using context.TODO instead`, + Since: "2017.1", + }, + + "SA1013": &lint.Documentation{ + Title: `io.Seeker.Seek is being called with the whence constant as the first argument, but it should be the second`, + Since: "2017.1", + }, + + "SA1014": &lint.Documentation{ + Title: `Non-pointer value passed to Unmarshal or Decode`, + Since: "2017.1", + }, + + "SA1015": &lint.Documentation{ + Title: `Using time.Tick in a way that will leak. Consider using time.NewTicker, and only use time.Tick in tests, commands and endless functions`, + Since: "2017.1", + }, + + "SA1016": &lint.Documentation{ + Title: `Trapping a signal that cannot be trapped`, + Text: `Not all signals can be intercepted by a process. Speficially, on UNIX-like systems, the syscall.SIGKILL and syscall.SIGSTOP signals are never passed to the process, but instead handled directly by the -kernel. It is therefore pointless to try and handle these signals. - -Available since - 2017.1 -` +kernel. It is therefore pointless to try and handle these signals.`, + Since: "2017.1", + }, -var docSA1017 = `Channels used with os/signal.Notify should be buffered - -The os/signal package uses non-blocking channel sends when delivering + "SA1017": &lint.Documentation{ + Title: `Channels used with os/signal.Notify should be buffered`, + Text: `The os/signal package uses non-blocking channel sends when delivering signals. If the receiving end of the channel isn't ready and the channel is either unbuffered or full, the signal will be dropped. To avoid missing signals, the channel should be buffered and of the appropriate size. For a channel used for notification of just one -signal value, a buffer of size 1 is sufficient. - -Available since - 2017.1 -` - -var docSA1018 = `strings.Replace called with n == 0, which does nothing - -With n == 0, zero instances will be replaced. To replace all -instances, use a negative number, or use strings.ReplaceAll. - -Available since - 2017.1 -` - -var docSA1019 = `Using a deprecated function, variable, constant or field - -Available since - 2017.1 -` - -var docSA1020 = `Using an invalid host:port pair with a net.Listen-related function - -Available since - 2017.1 -` - -var docSA1021 = `Using bytes.Equal to compare two net.IP - -A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The +signal value, a buffer of size 1 is sufficient.`, + Since: "2017.1", + }, + + "SA1018": &lint.Documentation{ + Title: `strings.Replace called with n == 0, which does nothing`, + Text: `With n == 0, zero instances will be replaced. To replace all +instances, use a negative number, or use strings.ReplaceAll.`, + Since: "2017.1", + }, + + "SA1019": &lint.Documentation{ + Title: `Using a deprecated function, variable, constant or field`, + Since: "2017.1", + }, + + "SA1020": &lint.Documentation{ + Title: `Using an invalid host:port pair with a net.Listen-related function`, + Since: "2017.1", + }, + + "SA1021": &lint.Documentation{ + Title: `Using bytes.Equal to compare two net.IP`, + Text: `A net.IP stores an IPv4 or IPv6 address as a slice of bytes. The length of the slice for an IPv4 address, however, can be either 4 or 16 bytes long, using different ways of representing IPv4 addresses. In order to correctly compare two net.IPs, the net.IP.Equal method should -be used, as it takes both representations into account. - -Available since - 2017.1 -` - -var docSA1023 = `Modifying the buffer in an io.Writer implementation - -Write must not modify the slice data, even temporarily. - -Available since - 2017.1 -` - -var docSA1024 = `A string cutset contains duplicate characters - -The strings.TrimLeft and strings.TrimRight functions take cutsets, not +be used, as it takes both representations into account.`, + Since: "2017.1", + }, + + "SA1023": &lint.Documentation{ + Title: `Modifying the buffer in an io.Writer implementation`, + Text: `Write must not modify the slice data, even temporarily.`, + Since: "2017.1", + }, + + "SA1024": &lint.Documentation{ + Title: `A string cutset contains duplicate characters`, + Text: `The strings.TrimLeft and strings.TrimRight functions take cutsets, not prefixes. A cutset is treated as a set of characters to remove from a string. For example, @@ -249,47 +219,40 @@ string. For example, will result in the string "word" – any characters that are 1, 2, 3 or 4 are cut from the left of the string. -In order to remove one string from another, use strings.TrimPrefix instead. - -Available since - 2017.1 -` - -var docSA1025 = `It is not possible to use (*time.Timer).Reset's return value correctly - -Available since - 2019.1 -` - -var docSA1026 = `Cannot marshal channels or functions +In order to remove one string from another, use strings.TrimPrefix instead.`, + Since: "2017.1", + }, -Available since - Unreleased -` + "SA1025": &lint.Documentation{ + Title: `It is not possible to use (*time.Timer).Reset's return value correctly`, + Since: "2019.1", + }, -var docSA1027 = `Atomic access to 64-bit variable must be 64-bit aligned + "SA1026": &lint.Documentation{ + Title: `Cannot marshal channels or functions`, + Since: "2019.2", + }, -On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to + "SA1027": &lint.Documentation{ + Title: `Atomic access to 64-bit variable must be 64-bit aligned`, + Text: `On ARM, x86-32, and 32-bit MIPS, it is the caller's responsibility to arrange for 64-bit alignment of 64-bit words accessed atomically. The first word in a variable or in an allocated struct, array, or slice can be relied upon to be 64-bit aligned. You can use the structlayout tool to inspect the alignment of fields -in a struct. +in a struct.`, + Since: "2019.2", + }, -Available since - Unreleased -` + "SA2000": &lint.Documentation{ + Title: `sync.WaitGroup.Add called inside the goroutine, leading to a race condition`, + Since: "2017.1", + }, -var docSA2000 = `sync.WaitGroup.Add called inside the goroutine, leading to a race condition - -Available since - 2017.1 -` - -var docSA2001 = `Empty critical section, did you mean to defer the unlock? - -Empty critical sections of the kind + "SA2001": &lint.Documentation{ + Title: `Empty critical section, did you mean to defer the unlock?`, + Text: `Empty critical sections of the kind mu.Lock() mu.Unlock() @@ -304,166 +267,138 @@ form of signaling to wait on another goroutine. Many times, there are simpler ways of achieving the same effect. When that isn't the case, the code should be amply commented to avoid confusion. Combining such comments with a //lint:ignore directive can be used to suppress this -rare false positive. - -Available since - 2017.1 -` - -var docSA2002 = `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed - -Available since - 2017.1 -` - -var docSA2003 = `Deferred Lock right after locking, likely meant to defer Unlock instead - -Available since - 2017.1 -` - -var docSA3000 = `TestMain doesn't call os.Exit, hiding test failures - -Test executables (and in turn 'go test') exit with a non-zero status +rare false positive.`, + Since: "2017.1", + }, + + "SA2002": &lint.Documentation{ + Title: `Called testing.T.FailNow or SkipNow in a goroutine, which isn't allowed`, + Since: "2017.1", + }, + + "SA2003": &lint.Documentation{ + Title: `Deferred Lock right after locking, likely meant to defer Unlock instead`, + Since: "2017.1", + }, + + "SA3000": &lint.Documentation{ + Title: `TestMain doesn't call os.Exit, hiding test failures`, + Text: `Test executables (and in turn 'go test') exit with a non-zero status code if any tests failed. When specifying your own TestMain function, it is your responsibility to arrange for this, by calling os.Exit with the correct code. The correct code is returned by (*testing.M).Run, so the usual way of implementing TestMain is to end it with -os.Exit(m.Run()). - -Available since - 2017.1 -` - -var docSA3001 = `Assigning to b.N in benchmarks distorts the results +os.Exit(m.Run()).`, + Since: "2017.1", + }, -The testing package dynamically sets b.N to improve the reliability of + "SA3001": &lint.Documentation{ + Title: `Assigning to b.N in benchmarks distorts the results`, + Text: `The testing package dynamically sets b.N to improve the reliability of benchmarks and uses it in computations to determine the duration of a single operation. Benchmark code must not alter b.N as this would -falsify results. - -Available since - 2017.1 -` - -var docSA4000 = `Boolean expression has identical expressions on both sides - -Available since - 2017.1 -` - -var docSA4001 = `&*x gets simplified to x, it does not copy x - -Available since - 2017.1 -` - -var docSA4002 = `Comparing strings with known different sizes has predictable results - -Available since - 2017.1 -` - -var docSA4003 = `Comparing unsigned values against negative values is pointless - -Available since - 2017.1 -` - -var docSA4004 = `The loop exits unconditionally after one iteration - -Available since - 2017.1 -` - -//lint:ignore U1000 This check is currently disabled -var docSA4005 = `Field assignment that will never be observed. Did you mean to use a pointer receiver? - -Available since - 2017.1 -` - -var docSA4006 = `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code? - -Available since - 2017.1 -` - -var docSA4008 = `The variable in the loop condition never changes, are you incrementing the wrong variable? - -Available since - 2017.1 -` - -var docSA4009 = `A function argument is overwritten before its first use - -Available since - 2017.1 -` - -var docSA4010 = `The result of append will never be observed anywhere - -Available since - 2017.1 -` - -var docSA4011 = `Break statement with no effect. Did you mean to break out of an outer loop? - -Available since - 2017.1 -` - -var docSA4012 = `Comparing a value against NaN even though no value is equal to NaN - -Available since - 2017.1 -` - -var docSA4013 = `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo. - -Available since - 2017.1 -` - -var docSA4014 = `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either - -Available since - 2017.1 -` - -var docSA4015 = `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful - -Available since - 2017.1 -` - -var docSA4016 = `Certain bitwise operations, such as x ^ 0, do not do anything useful - -Available since - 2017.1 -` - -var docSA4017 = `A pure function's return value is discarded, making the call pointless - -Available since - 2017.1 -` - -var docSA4018 = `Self-assignment of variables - -Available since - 2017.1 -` - -var docSA4019 = `Multiple, identical build constraints in the same file - -Available since - 2017.1 -` - -var docSA4020 = `Unreachable case clause in a type switch - -In a type switch like the following +falsify results.`, + Since: "2017.1", + }, + + "SA4000": &lint.Documentation{ + Title: `Boolean expression has identical expressions on both sides`, + Since: "2017.1", + }, + + "SA4001": &lint.Documentation{ + Title: `&*x gets simplified to x, it does not copy x`, + Since: "2017.1", + }, + + "SA4002": &lint.Documentation{ + Title: `Comparing strings with known different sizes has predictable results`, + Since: "2017.1", + }, + + "SA4003": &lint.Documentation{ + Title: `Comparing unsigned values against negative values is pointless`, + Since: "2017.1", + }, + + "SA4004": &lint.Documentation{ + Title: `The loop exits unconditionally after one iteration`, + Since: "2017.1", + }, + + "SA4005": &lint.Documentation{ + Title: `Field assignment that will never be observed. Did you mean to use a pointer receiver?`, + Since: "2017.1", + }, + + "SA4006": &lint.Documentation{ + Title: `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?`, + Since: "2017.1", + }, + + "SA4008": &lint.Documentation{ + Title: `The variable in the loop condition never changes, are you incrementing the wrong variable?`, + Since: "2017.1", + }, + + "SA4009": &lint.Documentation{ + Title: `A function argument is overwritten before its first use`, + Since: "2017.1", + }, + + "SA4010": &lint.Documentation{ + Title: `The result of append will never be observed anywhere`, + Since: "2017.1", + }, + + "SA4011": &lint.Documentation{ + Title: `Break statement with no effect. Did you mean to break out of an outer loop?`, + Since: "2017.1", + }, + + "SA4012": &lint.Documentation{ + Title: `Comparing a value against NaN even though no value is equal to NaN`, + Since: "2017.1", + }, + + "SA4013": &lint.Documentation{ + Title: `Negating a boolean twice (!!b) is the same as writing b. This is either redundant, or a typo.`, + Since: "2017.1", + }, + + "SA4014": &lint.Documentation{ + Title: `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either`, + Since: "2017.1", + }, + + "SA4015": &lint.Documentation{ + Title: `Calling functions like math.Ceil on floats converted from integers doesn't do anything useful`, + Since: "2017.1", + }, + + "SA4016": &lint.Documentation{ + Title: `Certain bitwise operations, such as x ^ 0, do not do anything useful`, + Since: "2017.1", + }, + + "SA4017": &lint.Documentation{ + Title: `A pure function's return value is discarded, making the call pointless`, + Since: "2017.1", + }, + + "SA4018": &lint.Documentation{ + Title: `Self-assignment of variables`, + Since: "2017.1", + }, + + "SA4019": &lint.Documentation{ + Title: `Multiple, identical build constraints in the same file`, + Since: "2017.1", + }, + + "SA4020": &lint.Documentation{ + Title: `Unreachable case clause in a type switch`, + Text: `In a type switch like the following type T struct{} func (T) Read(b []byte) (int, error) { return 0, nil } @@ -528,55 +463,46 @@ the following type switch will have an unreachable case clause: } T will always match before V because they are structurally equivalent -and therefore doSomething()'s return value implements both. - -Available since - Unreleased -` - -var docSA4021 = `x = append(y) is equivalent to x = y - -Available since - Unreleased -` - -var docSA5000 = `Assignment to nil map - -Available since - 2017.1 -` - -var docSA5001 = `Defering Close before checking for a possible error - -Available since - 2017.1 -` - -var docSA5002 = `The empty for loop (for {}) spins and can block the scheduler - -Available since - 2017.1 -` - -var docSA5003 = `Defers in infinite loops will never execute - -Defers are scoped to the surrounding function, not the surrounding +and therefore doSomething()'s return value implements both.`, + Since: "2019.2", + }, + + "SA4021": &lint.Documentation{ + Title: `x = append(y) is equivalent to x = y`, + Since: "2019.2", + }, + + "SA5000": &lint.Documentation{ + Title: `Assignment to nil map`, + Since: "2017.1", + }, + + "SA5001": &lint.Documentation{ + Title: `Defering Close before checking for a possible error`, + Since: "2017.1", + }, + + "SA5002": &lint.Documentation{ + Title: `The empty for loop (for {}) spins and can block the scheduler`, + Since: "2017.1", + }, + + "SA5003": &lint.Documentation{ + Title: `Defers in infinite loops will never execute`, + Text: `Defers are scoped to the surrounding function, not the surrounding block. In a function that never returns, i.e. one containing an -infinite loop, defers will never execute. - -Available since - 2017.1 -` - -var docSA5004 = `for { select { ... with an empty default branch spins - -Available since - 2017.1 -` - -var docSA5005 = `The finalizer references the finalized object, preventing garbage collection - -A finalizer is a function associated with an object that runs when the +infinite loop, defers will never execute.`, + Since: "2017.1", + }, + + "SA5004": &lint.Documentation{ + Title: `for { select { ... with an empty default branch spins`, + Since: "2017.1", + }, + + "SA5005": &lint.Documentation{ + Title: `The finalizer references the finalized object, preventing garbage collection`, + Text: `A finalizer is a function associated with an object that runs when the garbage collector is ready to collect said object, that is when the object is no longer referenced by anything. @@ -586,22 +512,18 @@ collector from collecting the object. The finalizer will never run, and the object will never be collected, leading to a memory leak. That is why the finalizer should instead use its first argument to operate on the object. That way, the number of references can temporarily go -to zero before the object is being passed to the finalizer. - -Available since - 2017.1 -` - -//lint:ignore U1000 This check is currently disabled -var docSA5006 = `Slice index out of bounds - -Available since - 2017.1 -` - -var docSA5007 = `Infinite recursive call - -A function that calls itself recursively needs to have an exit +to zero before the object is being passed to the finalizer.`, + Since: "2017.1", + }, + + "SA5006": &lint.Documentation{ + Title: `Slice index out of bounds`, + Since: "2017.1", + }, + + "SA5007": &lint.Documentation{ + Title: `Infinite recursive call`, + Text: `A function that calls itself recursively needs to have an exit condition. Otherwise it will recurse forever, until the system runs out of memory. @@ -609,33 +531,29 @@ This issue can be caused by simple bugs such as forgetting to add an exit condition. It can also happen "on purpose". Some languages have tail call optimization which makes certain infinite recursive calls safe to use. Go, however, does not implement TCO, and as such a loop -should be used instead. - -Available since - 2017.1 -` - -var docSA5008 = `Invalid struct tag - -Available since - Unreleased -` - -var docSA5009 = `Invalid Printf call +should be used instead.`, + Since: "2017.1", + }, -Available since - Unreleased -` + "SA5008": &lint.Documentation{ + Title: `Invalid struct tag`, + Since: "2019.2", + }, -var docSA6000 = `Using regexp.Match or related in a loop, should use regexp.Compile + "SA5009": &lint.Documentation{ + Title: `Invalid Printf call`, + Since: "2019.2", + }, -Available since - 2017.1 -` + "SA6000": &lint.Documentation{ + Title: `Using regexp.Match or related in a loop, should use regexp.Compile`, + Since: "2017.1", + }, -var docSA6001 = `Missing an optimization opportunity when indexing maps by byte slices + "SA6001": &lint.Documentation{ + Title: `Missing an optimization opportunity when indexing maps by byte slices`, -Map keys must be comparable, which precludes the use of byte slices. + Text: `Map keys must be comparable, which precludes the use of byte slices. This usually leads to using string keys and converting byte slices to strings. @@ -658,15 +576,13 @@ because the first version needs to copy and allocate, while the second one does not. For some history on this optimization, check out commit -f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository. +f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.`, + Since: "2017.1", + }, -Available since - 2017.1 -` - -var docSA6002 = `Storing non-pointer values in sync.Pool allocates memory - -A sync.Pool is used to avoid unnecessary allocations and reduce the + "SA6002": &lint.Documentation{ + Title: `Storing non-pointer values in sync.Pool allocates memory`, + Text: `A sync.Pool is used to avoid unnecessary allocations and reduce the amount of work the garbage collector has to do. When passing a value that is not a pointer to a function that accepts @@ -677,15 +593,13 @@ an array). In order to avoid the extra allocation, one should store a pointer to the slice instead. See the comments on https://go-review.googlesource.com/c/go/+/24371 -that discuss this problem. - -Available since - 2017.1 -` - -var docSA6003 = `Converting a string to a slice of runes before ranging over it +that discuss this problem.`, + Since: "2017.1", + }, -You may want to loop over the runes in a string. Instead of converting + "SA6003": &lint.Documentation{ + Title: `Converting a string to a slice of runes before ranging over it`, + Text: `You may want to loop over the runes in a string. Instead of converting the string to a slice of runes and looping over that, you can loop over the string itself. That is, @@ -701,15 +615,13 @@ and avoid unnecessary memory allocations. Do note that if you are interested in the indices, ranging over a string and over a slice of runes will yield different indices. The first one yields byte offsets, while the second one yields indices in -the slice of runes. +the slice of runes.`, + Since: "2017.1", + }, -Available since - 2017.1 -` - -var docSA6005 = `Inefficient string comparison with strings.ToLower or strings.ToUpper - -Converting two strings to the same case and comparing them like so + "SA6005": &lint.Documentation{ + Title: `Inefficient string comparison with strings.ToLower or strings.ToUpper`, + Text: `Converting two strings to the same case and comparing them like so if strings.ToLower(s1) == strings.ToLower(s2) { ... @@ -727,33 +639,29 @@ strings and can return as soon as the first non-matching character has been found. For a more in-depth explanation of this issue, see -https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/ - -Available since - Unreleased -` - -var docSA9001 = `Defers in range loops may not run when you expect them to - -Available since - 2017.1 -` +https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/`, + Since: "2019.2", + }, -var docSA9002 = `Using a non-octal os.FileMode that looks like it was meant to be in octal. + "SA9001": &lint.Documentation{ + Title: `Defers in range loops may not run when you expect them to`, + Since: "2017.1", + }, -Available since - 2017.1 -` + "SA9002": &lint.Documentation{ + Title: `Using a non-octal os.FileMode that looks like it was meant to be in octal.`, + Since: "2017.1", + }, -var docSA9003 = `Empty body in an if or else branch + "SA9003": &lint.Documentation{ + Title: `Empty body in an if or else branch`, + Since: "2017.1", + }, -Available since - 2017.1 -` + "SA9004": &lint.Documentation{ + Title: `Only the first constant has an explicit type`, -var docSA9004 = `Only the first constant has an explicit type - -In a constant declaration such as the following: + Text: `In a constant declaration such as the following: const ( First byte = 1 @@ -838,22 +746,19 @@ This code will output an enum 2 -as EnumSecond has no explicit type, and thus defaults to int. - -Available since - 2019.1 -` +as EnumSecond has no explicit type, and thus defaults to int.`, + Since: "2019.1", + }, -var docSA9005 = `Trying to marshal a struct with no public fields nor custom marshaling - -The encoding/json and encoding/xml packages only operate on exported + "SA9005": &lint.Documentation{ + Title: `Trying to marshal a struct with no public fields nor custom marshaling`, + Text: `The encoding/json and encoding/xml packages only operate on exported fields in structs, not unexported ones. It is usually an error to try to (un)marshal structs that only consist of unexported fields. This check will not flag calls involving types that define custom marshaling behavior, e.g. via MarshalJSON methods. It will also not -flag empty structs. - -Available since - Unreleased -` +flag empty structs.`, + Since: "2019.2", + }, +} diff --git a/stylecheck/analysis.go b/stylecheck/analysis.go index 8a205e39d..5d8373f71 100644 --- a/stylecheck/analysis.go +++ b/stylecheck/analysis.go @@ -21,90 +21,90 @@ var Analyzers = map[string]*analysis.Analyzer{ "ST1000": { Name: "ST1000", Run: CheckPackageComment, - Doc: docST1000, + Doc: Docs["ST1000"].String(), Requires: []*analysis.Analyzer{}, Flags: newFlagSet(), }, "ST1001": { Name: "ST1001", Run: CheckDotImports, - Doc: docST1001, + Doc: Docs["ST1001"].String(), Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer}, Flags: newFlagSet(), }, "ST1003": { Name: "ST1003", Run: CheckNames, - Doc: docST1003, + Doc: Docs["ST1003"].String(), Requires: []*analysis.Analyzer{facts.Generated, config.Analyzer}, Flags: newFlagSet(), }, "ST1005": { Name: "ST1005", Run: CheckErrorStrings, - Doc: docST1005, + Doc: Docs["ST1005"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "ST1006": { Name: "ST1006", Run: CheckReceiverNames, - Doc: docST1006, + Doc: Docs["ST1006"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "ST1008": { Name: "ST1008", Run: CheckErrorReturn, - Doc: docST1008, + Doc: Docs["ST1008"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "ST1011": { Name: "ST1011", Run: CheckTimeNames, - Doc: docST1011, + Doc: Docs["ST1011"].String(), Flags: newFlagSet(), }, "ST1012": { Name: "ST1012", Run: CheckErrorVarNames, - Doc: docST1012, + Doc: Docs["ST1012"].String(), Requires: []*analysis.Analyzer{config.Analyzer}, Flags: newFlagSet(), }, "ST1013": { Name: "ST1013", Run: CheckHTTPStatusCodes, - Doc: docST1013, + Doc: Docs["ST1013"].String(), Requires: []*analysis.Analyzer{facts.Generated, facts.TokenFile, config.Analyzer}, Flags: newFlagSet(), }, "ST1015": { Name: "ST1015", Run: CheckDefaultCaseOrder, - Doc: docST1015, + Doc: Docs["ST1015"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, "ST1016": { Name: "ST1016", Run: CheckReceiverNamesIdentical, - Doc: docST1016, + Doc: Docs["ST1016"].String(), Requires: []*analysis.Analyzer{buildssa.Analyzer}, Flags: newFlagSet(), }, "ST1017": { Name: "ST1017", Run: CheckYodaConditions, - Doc: docST1017, + Doc: Docs["ST1017"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer, facts.Generated, facts.TokenFile}, Flags: newFlagSet(), }, "ST1018": { Name: "ST1018", Run: CheckInvisibleCharacters, - Doc: docST1018, + Doc: Docs["ST1018"].String(), Requires: []*analysis.Analyzer{inspect.Analyzer}, Flags: newFlagSet(), }, diff --git a/stylecheck/doc.go b/stylecheck/doc.go index 10a28e257..9097214d9 100644 --- a/stylecheck/doc.go +++ b/stylecheck/doc.go @@ -1,18 +1,20 @@ package stylecheck -var docST1000 = `Incorrect or missing package comment +import "honnef.co/go/tools/lint" -Packages must have a package comment that is formatted according to +var Docs = map[string]*lint.Documentation{ + "ST1000": &lint.Documentation{ + Title: `Incorrect or missing package comment`, + Text: `Packages must have a package comment that is formatted according to the guidelines laid out in -https://github.com/golang/go/wiki/CodeReviewComments#package-comments. +https://github.com/golang/go/wiki/CodeReviewComments#package-comments.`, + Since: "2019.1", + NonDefault: true, + }, -Available since - 2019.1, non-default -` - -var docST1001 = `Dot imports are discouraged - -Dot imports that aren't in external test packages are discouraged. + "ST1001": &lint.Documentation{ + Title: `Dot imports are discouraged`, + Text: `Dot imports that aren't in external test packages are discouraged. The dot_import_whitelist option can be used to whitelist certain imports. @@ -35,36 +37,29 @@ Quoting Go Code Review Comments: it is not. Except for this one case, do not use import . in your programs. It makes the programs much harder to read because it is unclear whether a name like Quux is a top-level identifier in the - current package or in an imported package. - -Available since - 2019.1 - -Options - dot_import_whitelist -` - -var docST1003 = `Poorly chosen identifier + current package or in an imported package.`, + Since: "2019.1", + Options: []string{"dot_import_whitelist"}, + }, -Identifiers, such as variable and package names, follow certain rules. + "ST1003": &lint.Documentation{ + Title: `Poorly chosen identifier`, + Text: `Identifiers, such as variable and package names, follow certain rules. See the following links for details: - http://golang.org/doc/effective_go.html#package-names - http://golang.org/doc/effective_go.html#mixed-caps - https://github.com/golang/go/wiki/CodeReviewComments#initialisms - https://github.com/golang/go/wiki/CodeReviewComments#variable-names - -Available since - 2019.1, non-default - -Options - initialisms -` - -var docST1005 = `Incorrectly formatted error string - -Error strings follow a set of guidelines to ensure uniformity and good +- https://golang.org/doc/effective_go.html#package-names +- https://golang.org/doc/effective_go.html#mixed-caps +- https://github.com/golang/go/wiki/CodeReviewComments#initialisms +- https://github.com/golang/go/wiki/CodeReviewComments#variable-names`, + Since: "2019.1", + NonDefault: true, + Options: []string{"initialisms"}, + }, + + "ST1005": &lint.Documentation{ + Title: `Incorrectly formatted error string`, + Text: `Error strings follow a set of guidelines to ensure uniformity and good composability. Quoting Go Code Review Comments: @@ -74,15 +69,13 @@ Quoting Go Code Review Comments: usually printed following other context. That is, use fmt.Errorf("something bad") not fmt.Errorf("Something bad"), so that log.Printf("Reading %s: %v", filename, err) formats without a - spurious capital letter mid-message. + spurious capital letter mid-message.`, + Since: "2019.1", + }, -Available since - 2019.1 -` - -var docST1006 = `Poorly chosen receiver name - -Quoting Go Code Review Comments: + "ST1006": &lint.Documentation{ + Title: `Poorly chosen receiver name`, + Text: `Quoting Go Code Review Comments: The name of a method's receiver should be a reflection of its identity; often a one or two letter abbreviation of its type @@ -94,83 +87,68 @@ Quoting Go Code Review Comments: documentary purpose. It can be very short as it will appear on almost every line of every method of the type; familiarity admits brevity. Be consistent, too: if you call the receiver "c" in one - method, don't call it "cl" in another. - -Available since - 2019.1 -` - -var docST1008 = `A function's error value should be its last return value - -A function's error value should be its last return value. - -Available since - 2019.1 -` - -var docST1011 = `Poorly chosen name for variable of type time.Duration - -time.Duration values represent an amount of time, which is represented + method, don't call it "cl" in another.`, + Since: "2019.1", + }, + + "ST1008": &lint.Documentation{ + Title: `A function's error value should be its last return value`, + Text: `A function's error value should be its last return value.`, + Since: `2019.1`, + }, + + "ST1011": &lint.Documentation{ + Title: `Poorly chosen name for variable of type time.Duration`, + Text: `time.Duration values represent an amount of time, which is represented as a count of nanoseconds. An expression like 5 * time.Microsecond yields the value 5000. It is therefore not appropriate to suffix a variable of type time.Duration with any time unit, such as Msec or -Milli. - -Available since - 2019.1 -` - -var docST1012 = `Poorly chosen name for error variable - -Error variables that are part of an API should be called errFoo or -ErrFoo. - -Available since - 2019.1 -` - -var docST1013 = `Should use constants for HTTP error codes, not magic numbers - -HTTP has a tremendous number of status codes. While some of those are +Milli.`, + Since: `2019.1`, + }, + + "ST1012": &lint.Documentation{ + Title: `Poorly chosen name for error variable`, + Text: `Error variables that are part of an API should be called errFoo or +ErrFoo.`, + Since: "2019.1", + }, + + "ST1013": &lint.Documentation{ + Title: `Should use constants for HTTP error codes, not magic numbers`, + Text: `HTTP has a tremendous number of status codes. While some of those are well known (200, 400, 404, 500), most of them are not. The net/http package provides constants for all status codes that are part of the various specifications. It is recommended to use these constants instead of hard-coding magic numbers, to vastly improve the -readability of your code. - -Available since - 2019.1 - -Options - http_status_code_whitelist -` - -var docST1015 = `A switch's default case should be the first or last case - -Available since - 2019.1 -` - -var docST1016 = `Use consistent method receiver names - -Available since - 2019.1, non-default -` - -var docST1017 = `Don't use Yoda conditions - -Yoda conditions are conditions of the kind 'if 42 == x', where the +readability of your code.`, + Since: "2019.1", + Options: []string{"http_status_code_whitelist"}, + }, + + "ST1015": &lint.Documentation{ + Title: `A switch's default case should be the first or last case`, + Since: "2019.1", + }, + + "ST1016": &lint.Documentation{ + Title: `Use consistent method receiver names`, + Since: "2019.1", + NonDefault: true, + }, + + "ST1017": &lint.Documentation{ + Title: `Don't use Yoda conditions`, + Text: `Yoda conditions are conditions of the kind 'if 42 == x', where the literal is on the left side of the comparison. These are a common idiom in languages in which assignment is an expression, to avoid bugs of the kind 'if (x = 42)'. In Go, which doesn't allow for this kind of -bug, we prefer the more idiomatic 'if x == 42'. - -Available since - Unreleased -` - -var docST1018 = `Avoid zero-width and control characters in string literals - -Available since - Unreleased -` +bug, we prefer the more idiomatic 'if x == 42'.`, + Since: "2019.2", + }, + + "ST1018": &lint.Documentation{ + Title: `Avoid zero-width and control characters in string literals`, + Since: "2019.2", + }, +} From 3b25d9a922b2e97bbbcb68af9d2285dae127c4de Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 6 Jun 2019 18:54:40 +0200 Subject: [PATCH 235/254] doc: slight tweaks to release notes --- doc/2017.2.html | 4 +- doc/2019.2.html | 201 +++++++++++++++++++++++------------------------- 2 files changed, 100 insertions(+), 105 deletions(-) diff --git a/doc/2017.2.html b/doc/2017.2.html index a6b522ad7..6e982d341 100644 --- a/doc/2017.2.html +++ b/doc/2017.2.html @@ -41,8 +41,9 @@

    Output formats

    output format. The other is json, which emits JSON. The output is a stream of objects, allowing for a future streaming output mode. Each object uses the following example schema: +

    -
    {
    +
    {
       "checker": "staticcheck",
       "code": "SA4006",
       "location": {
    @@ -53,7 +54,6 @@ 

    Output formats

    "message": "this value of err is never used", "ignored": false }
    -

    Control over the exit code of megacheck

    diff --git a/doc/2019.2.html b/doc/2019.2.html index ff39d8a66..3b54d14c5 100644 --- a/doc/2019.2.html +++ b/doc/2019.2.html @@ -25,64 +25,61 @@

    Performance improvements

    We've observed reductions in memory usage of 2x to 8x when checking large code bases. - Due to the reduced load on the garbage collector, the execution time often improves, too.

    -

    - - - - - - - - - - - - - - - - - - - - - - - - +
    Package2019.1.12019.2¹Change
    net/http3.543 s / 677 MB3.747 s / 254 MB+5.76% / -62.48%
    strconv1.628 s / 294 MB1.678 s / 118 MB+3.07% / -59.86%
    + - - - - + + + + - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - + - - - - - - -
    image/color1.304 s / 225 MB1.702 s / 138 MB+30.52% / -38.67%Package2019.1.12019.2¹Change
    std26.234 s / 3987 MB19.444 s / 1054 MB-25.88% / -73.56%
    net/http3.543 s / 677 MB3.747 s / 254 MB+5.76% / -62.48%
    strconv1.628 s / 294 MB1.678 s / 118 MB+3.07% / -59.86%
    image/color1.304 s / 225 MB1.702 s / 138 MB+30.52% / -38.67%
    std26.234 s / 3987 MB19.444 s / 1054 MB-25.88% / -73.56%
    github.com/cockroachdb/cockroach/pkg/...88.644 s / 15959 MB93.798 s / 4156 MB+5.81% / -73.96%
    github.com/cockroachdb/cockroach/pkg/...88.644 s / 15959 MB93.798 s / 4156 MB+5.81% / -73.96% + ¹: The fact cache was empty for all benchmarks. +
    - ¹: The fact cache was empty for all benchmarks. -
    -

    + +

    In addition, staticcheck now employs caching to speed up repeated checking of packages. @@ -91,53 +88,51 @@

    Performance improvements

    This makes staticcheck behave a lot more like go build, where repeated builds are much faster.

    -

    - - - - - - - - - - +
    PackageUncachedCachedChange
    + - - - - + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    net/http3.747 s / 254 MB1.545 s / 195 MB-58.77% / -23.23%PackageUncachedCachedChange
    strconv1.678 s / 118 MB0.495 s / 57 MB-70.5% / -51.69%
    image/color1.702 s / 138 MB0.329 s / 31 MB-80.67% / -77.54%
    std19.444 s / 1054 MB15.099 s / 887 MB-22.35% / -15.84%
    github.com/cockroachdb/cockroach/pkg/...93.798 s / 4156 MB47.205 s / 2516 MB-49.67% / -39.46%
    -

    + + + + net/http + 3.747 s / 254 MB + 1.545 s / 195 MB + -58.77% / -23.23% + + + + strconv + 1.678 s / 118 MB + 0.495 s / 57 MB + -70.5% / -51.69% + + + + image/color + 1.702 s / 138 MB + 0.329 s / 31 MB + -80.67% / -77.54% + + + + std + 19.444 s / 1054 MB + 15.099 s / 887 MB + -22.35% / -15.84% + + + + github.com/cockroachdb/cockroach/pkg/... + 93.798 s / 4156 MB + 47.205 s / 2516 MB + -49.67% / -39.46% + +

    This combination of improvements not only compensates for the @@ -252,8 +247,10 @@

    -debug.version

    Enabling unused's whole program mode

    -When we merged unused into staticcheck, we lost the ability to specify the -exported flag to report unused exported identifiers. -Staticcheck 2019.2 restores this ability with the new -unused.whole-program flag. +

    + When we merged unused into staticcheck, we lost the ability to specify the -exported flag to report unused exported identifiers. + Staticcheck 2019.2 restores this ability with the new -unused.whole-program flag. +

    Range information in diagnostics

    @@ -289,7 +286,7 @@

    Installing staticcheck as a module

    To download the master branch, use go get honnef.co/go/tools/cmd/staticcheck@master

    -

    Removal of functionality

    +

    Removal of deprecated functionality

    Staticcheck 2019.1 deprecated the unused, gosimple, and megacheck @@ -299,9 +296,7 @@

    Removal of functionality

    This release no longer includes these deprecated utilities, nor does - it provide the deprecated flag. Additionally, the errcheck utility - has been removed. It has never been an official or production-ready - part of staticcheck. + it provide the deprecated flag.

    Checks

    From 3945b7919a7479de0fafe0b621de132cee44985f Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 6 Jun 2019 20:59:24 +0200 Subject: [PATCH 236/254] simple: improve documentation --- simple/doc.go | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/simple/doc.go b/simple/doc.go index f6c32b8d7..eb0072de5 100644 --- a/simple/doc.go +++ b/simple/doc.go @@ -4,7 +4,7 @@ import "honnef.co/go/tools/lint" var Docs = map[string]*lint.Documentation{ "S1000": &lint.Documentation{ - Title: `Use plain channel send or receive`, + Title: `Use plain channel send or receive instead of single-case select`, Text: `Select statements with a single case can be replaced with a simple send or receive. @@ -23,7 +23,7 @@ After: }, "S1001": &lint.Documentation{ - Title: `Replace with copy()`, + Title: `Replace for loop with call to copy`, Text: `Use copy() for copying elements from one slice to another. Before: @@ -51,7 +51,7 @@ After: }, "S1003": &lint.Documentation{ - Title: `Replace with strings.Contains`, + Title: `Replace call to strings.Index with strings.Contains`, Text: `Before: if strings.Index(x, y) != -1 {} @@ -63,7 +63,7 @@ After: }, "S1004": &lint.Documentation{ - Title: `Replace with bytes.Equal`, + Title: `Replace call to bytes.Compare with bytes.Equal`, Text: `Before: if bytes.Compare(x, y) == 0 {} @@ -93,7 +93,7 @@ After: }, "S1006": &lint.Documentation{ - Title: `Replace with for { ... }`, + Title: `Use for { ... } for infinite loops`, Text: `For infinite loops, using for { ... } is the most idiomatic choice.`, Since: "2017.1", }, @@ -170,7 +170,7 @@ After: }, "S1012": &lint.Documentation{ - Title: `Replace with time.Since(x)`, + Title: `Replace time.Now().Sub(x) with time.Since(x)`, Text: `The time.Since helper has the same effect as using time.Now().Sub(x) but is easier to read. @@ -185,7 +185,7 @@ After: }, "S1016": &lint.Documentation{ - Title: `Use a type conversion`, + Title: `Use a type conversion instead of manually copying struct fields`, Text: `Two struct types with identical fields can be converted between each other. In older versions of Go, the fields had to have identical struct tags. Since Go 1.8, however, struct tags are ignored during @@ -208,7 +208,7 @@ After: }, "S1017": &lint.Documentation{ - Title: `Replace with strings.TrimPrefix`, + Title: `Replace manual trimming with strings.TrimPrefix`, Text: `Instead of using strings.HasPrefix and manual slicing, use the strings.TrimPrefix function. If the string doesn't start with the prefix, the original string will be returned. Using strings.TrimPrefix @@ -228,7 +228,7 @@ After: }, "S1018": &lint.Documentation{ - Title: `Replace with copy()`, + Title: `Use copy for sliding elements`, Text: `copy() permits using the same source and destination slice, even with overlapping ranges. This makes it ideal for sliding elements in a slice. @@ -246,7 +246,7 @@ After: }, "S1019": &lint.Documentation{ - Title: `Simplify make call`, + Title: `Simplify make call by omitting redundant arguments`, Text: `The make function has default values for the length and capacity arguments. For channels and maps, the length defaults to zero. Additionally, for slices the capacity defaults to the length.`, @@ -290,7 +290,7 @@ statement in a case block.`, }, "S1024": &lint.Documentation{ - Title: `Replace with time.Until(x)`, + Title: `Replace x.Sub(time.Now()) with time.Until(x)`, Text: `The time.Until helper has the same effect as using x.Sub(time.Now()) but is easier to read. @@ -337,7 +337,7 @@ to }, "S1028": &lint.Documentation{ - Title: `replace with fmt.Errorf`, + Title: `Simplify error construction with fmt.Errorf`, Text: `Before: errors.New(fmt.Sprintf(...)) @@ -349,7 +349,7 @@ After: }, "S1029": &lint.Documentation{ - Title: `Range over the string`, + Title: `Range over the string directly`, Text: `Ranging over a string will yield byte offsets and runes. If the offset isn't used, this is functionally equivalent to converting the string to a slice of runes and ranging over that. Ranging directly over the @@ -397,7 +397,7 @@ After: }, "S1032": &lint.Documentation{ - Title: `Replace with sort.Ints(x), sort.Float64s(x), sort.Strings(x)`, + Title: `Use sort.Ints(x), sort.Float64s(x), and sort.Strings(x)`, Text: `The sort.Ints, sort.Float64s and sort.Strings functions are easier to read than sort.Sort(sort.IntSlice(x)), sort.Sort(sort.Float64Slice(x)) and sort.Sort(sort.StringSlice(x)). From 34fc4656b19ab8e36066da86bf82da09d88959b7 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 6 Jun 2019 21:01:59 +0200 Subject: [PATCH 237/254] Remove sponsor --- README.md | 1 - images/sponsors/digitalocean.png | Bin 3309 -> 0 bytes 2 files changed, 1 deletion(-) delete mode 100644 images/sponsors/digitalocean.png diff --git a/README.md b/README.md index aefbde692..1c71a715e 100644 --- a/README.md +++ b/README.md @@ -72,7 +72,6 @@ You can find extensive documentation on This project is sponsored by: -[DigitalOcean](https://digitalocean.com) [Fastly](https://fastly.com) [Uber](https://uber.com) diff --git a/images/sponsors/digitalocean.png b/images/sponsors/digitalocean.png deleted file mode 100644 index a53b8a689d673d700765d49385b3732fea15cf1f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3309 zcmb7{c{|jL!^TJUJ!A@z7Fni|B@u?QGdL(5zGOF+8Id)+P%+kniLyiuN)t2q&M@{Z zTR4_6c9DI{lD*^U_Y$7#y06c5fBv{%z@1=bs?W_K%mINwxD5?-EFh3`z@Ls{WBfyi zdT`wzDw-KT(*2)CpZ&kW9|*ksvpOGOVfqkqKe}=2yqshIOo4c+Z@C$(d(<}wgv-WI z=fR_p@%7v>+=MvS{jp&5@3YaMoQKNEaxF0*GA>VjW$%JCbcn|>w}x|mcx1HVAcYU| zWNz)fJ_7wN_P(d@xxB!S+U9$O>(8CUnvNkCZW=brjdpf5ohR=Hgd+P=Te3&glp9b|V8XTkU#p%j_9oOr9psxOZnQRCSxGZb$i9$l&WvOQ{NP^KEQ$ z=jLlmimc)L>9EatbYd!e(U3i5^<7EQ6!t}98zHj0VV$MKqbRxIJzl_iz*lcFQ-r-& z=(bp!?%}mtuf2afo=|RzOI3*%NAZ)^K@UP{!n{t2rSy=KlE^IY_duV2W1~XPnqhqs zy&;vZs^u|FnZ~+*?S<1R4Rit`@~K8{0VZ1!#v;poNBW>00u#GxynJuTidK-P#}L+ulUQaED1ccPl&U9 zn5wJ12F+{EC_U5n2af1=^PMP4mPW*q!tpYhX1d0)SwyurYh)njg=e;yOrx6+QCl=*gP+qzQo z2C7RNOS*-WM9kqRSGOe2@2Xr(u;u-_oOJQ_Q*ZtK;qYD0t;pZ&_~;Ce)3PAFSydKCk2(R7xVEIEW(}AWS;PFL+i$FI5#<4^}JOC zReF?%d#?MUVW}aR=K`XtAsILspuGv*Dsi@X#&k=YvA!Grooi*CN)gPusCJ!4?|d;9+r_p5zKek($9GAW3H6G9j^ z(PmA2*p%zP62J6SYPjo1|6XnGmAf)qbz>7#qUHt@Qp!#(z)ARg$CnEEB87%k8~X@e zuZ+2WK=<*FZNit*xkTq*4k}|sBb;1Gl0KIAOaFTm354GT%KbR4>E=EQ5bnWTAYj$9 zA`VzAUdKPv6Ge6p2RqdU5tlawjjyYjdfmBhz*sCheaHiwdRki430HOV>w_PU#%|PB zI{+u`l@l7t7|ml-=}fxG>qi6n2e^c%fl@)q0Hp-k3ja(CB`-Vhf=gCH_Uq2dX|`2d z`LgjNEfUh?pKhL?b;GfJ{HLT?nWy<))1C+c))-V(} zeV0wgX;oGdUF0RIgK+pH_`g=dLqOgjWB>j_FVp8neZQ#6v@-sln*v3`h8lAb|zo4aGcORjd#u;E;Cx@*}%#9Yuu z{{!khQzuU3%MpL!{tq_BOD!h%{I?wR_{ zY+jN?#~A-e;7!U}gP>p(uNcP<)r^kj-V-|&Ju;+)HSfCIb|^Nx=DAY} zT67+L^mdb(w-kOSHd?RgjZIRFp-0T{V#)i~FSZk`e4*&00r3fA7px6_MK!5B;kY57 zSjo4#a=@zMdi8?BrNhlQ4+6g!3v?vL`^1#GpP;{%oRQlB9dh)^cYzUkzIRxdDKhb2 zNZ4R_^vab}(L_{~vZq!c3+nXjVp!=~IT0C~*TW4HwGS%XR%eG^MJom#y2OjkH5X-G zfnR;kZ35nsuUK3?tPLLdo57m!(CdNIBGGNL5n8TFcSe;@jcr!`Ol$7+oZFB21O-lk zvBV4)v{^!EfdNXX6UyhRUAbWDLmP>Z5B*B$fnUuRuH7;RRo-5$z3VIka41AV0cv+D zu{uwY!ZZ6Wcydv8yG*4#evBi-<&8Ouf<`~??SfiF8l2N-yO`hjp`*!PPCbit3P6}_ zXRi}y*d2BM>BJ)^P;05?;$;&)Qtnq;dgE(&3<*`fW3i5%TR?Wc?*+~Jqk+S5u8uwd2 zBX&j6g+%wTt#nIapqmS*P(gg^WOR8mv5a%-;q#ec0M8T9B4|$o)}|z)C9xxU-32Cf zPWH2W{*w==db(zd+dX&54@&fLe?vcEpo5{Z6Y9LF)XUV07BZOmrNKAIM@7u{j0Vu( zYR)}m8EQXAVWn`(>l2xPtmGFtZ8+$%Qmbl~%Iibe(iner%~r@O_=eh-v>8i$N3Bv& z@^R7#2b`sv>Z9H1%K=zD?o#i=UGEd)CJM$qcdk->r0sh{R^bl7Wv!7rAm+%4d03rc zt@+`ehL!Guum%Crx|xSvUZhTvP=C!sat4pMi%ZacPNK(6N>i`Mk`*0u}8S z7nOf10AfWWYiPdRIssPH6}7&&z{S9WH&Owya|0aG7jP5x&)f$or{UmwT)%&}aRSJ0 zK=%aS8IU!KLWm0tsu=r{C{7buTTrm)2j)<%@nlW%o&QM3?W-uUVrfei-hdCe1^qaT z=w@C&jV7RcL$X4`>z8%#Xp8;|HKJr0iWlq>5tbMp&4la^K5)^BG~(D!_hA69{TrMy zHg3ZUjHgxx4zD&QA1-)OmV<_WRDRq&t^RDX3#-5j_(?X~fwPpR^)U3nY|HVk5Sln^ zdh3;BbAWJ11T4=AMrudMH}|@E4<$Kr*7`)l&C41uyK}yjKx7Z_6u$gLGd&P0tr7!j z%>%v`S4uonQ#n`blLp^NQ2VEY-ZE>MIm{a#Kh5mHbS*CkOT_DzdG?~g5BPIWRBu}L zPoVjbsZ0;odu_NLdF*cS%IG~`^z-&bE)?gt;l?h=OZU1WF`Y6bX3~xiby`5=m8st8 zS)qc#t4MHwA=N@l@q(#gT*jFhVkN2PU4VKl{Q7058>9sEJxWb|2h?Z+`?3i?H9(A^ zppW{f0eB|_y718IOBFLIlrnklZj71eN0z_NR72NF7?gJsZ8P~2_9zc5Q2ET z=$kze;d+uLx=P$ez|}J;+i!YqJj5>EW)Cl{Lq2kjjX)B+X1_!J74TX|L{9bd4h`~M z2Yy(sxj{k~__{tBsg?mm!WybzP}ivM_Y zb#~OQ)Q59J8n_y*VldU!!oYNeZdLtr{{wx`G(uIh;7r`wM%41C1^Q!k@j2>oSnQQz zK5$MegJu%KWdwH?{7(wehqLP#JeCx7DEWNWDCUNgU4L*F i2&m0_kc62-Ads?T==q+~9Q^-&i=nQm&KE7m=l=&P#(Mn# From 1a665cc4863a8d23955176924c8d1655e3a96de9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 6 Jun 2019 22:14:41 +0200 Subject: [PATCH 238/254] doc: adjust header depth --- doc/staticcheck.html | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/doc/staticcheck.html b/doc/staticcheck.html index 06a6d3fa3..4c903e415 100644 --- a/doc/staticcheck.html +++ b/doc/staticcheck.html @@ -3,7 +3,7 @@ )--> -

    Overview

    +

    Overview

    Staticcheck is a static analysis toolset for the Go programming language. @@ -12,7 +12,7 @@

    Overview

    and offers enough customizability to fit into your workflows.

    -

    Installation

    +

    Installation

    There are various ways in which you can install staticcheck, @@ -41,7 +41,7 @@

    Installation

    Releases only watches.

    -

    Running staticcheck

    +

    Running staticcheck

    Staticcheck can be run on code in several ways, @@ -66,7 +66,7 @@

    Running staticcheck

    similar to how go build works.

    -

    Configuration

    +

    Configuration

    Various aspects of staticcheck can be customized with configuration files. @@ -82,7 +82,7 @@

    Configuration

    Staticcheck's default configuration is represented as the virtual root of the configuration tree and can be inherited from.

    -

    Configuration format

    +

    Configuration format

    Staticcheck configuration files are named staticcheck.conf and contain TOML. @@ -108,13 +108,13 @@

    Configuration format

    or in combination with "inherit" to remove values from the inherited option.

    -

    Options

    +

    Options

    A list of all options and their explanations can be found on the Options page.

    -

    Example configuration

    +

    Example configuration

    The following example configuration is the textual representation of staticcheck's default configuration. @@ -131,7 +131,7 @@

    Example configuration

    {{ option "dot_import_whitelist" }} = [] {{ option "http_status_code_whitelist" }} = ["200", "400", "404", "500"]
    -

    Command-line flags

    +

    Command-line flags

    In addition to configuration files, some aspects of staticcheck can be controlled via command-line flags. @@ -216,7 +216,7 @@

    Command-line flags

    -

    Targeting Go versions

    +

    Targeting Go versions

    By default, staticcheck will make suggestions that are correct for the current version of Go. @@ -231,7 +231,7 @@

    Targeting Go versions

    For example, with -go 1.6, only suggestions that are valid for Go 1.6 will be made.

    -

    Ignoring problems

    +

    Ignoring problems

    In general, you shouldn't have to ignore problems reported by staticcheck. @@ -246,7 +246,7 @@

    Ignoring problems

    For those rare cases, there are several ways of ignoring unwanted problems.

    -

    Line-based linter directives

    +

    Line-based linter directives

    The most fine-grained way of ignoring reported problems is to annotate the offending lines of code with linter directives. @@ -289,7 +289,7 @@

    Line-based linter directives

    }

    -
    Maintenance of linter directives
    +

    Maintenance of linter directives

    It is crucial to update or remove outdated linter directives when code has been changed. @@ -309,7 +309,7 @@

    Maintenance of linter directives
    will not cause directives to be considered unnecessary.

    -

    File-based linter directives

    +

    File-based linter directives

    In some cases, you may want to disable checks for an entire file. @@ -334,7 +334,7 @@

    File-based linter directives

    Unlike line-based directives, file-based ones will not be flagged for being unnecessary.

    -

    Resource usage

    +

    Resource usage

    Static analysis is a rather resource intensive process, @@ -377,6 +377,6 @@

    Resource usage

    -

    Checks

    +

    Checks

    A list of all checks can be found on the Checks page. From 4359a122b34db26888c6da98b85f2ac7aa0ebefa Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Thu, 6 Jun 2019 22:53:16 +0200 Subject: [PATCH 239/254] doc: fix typo --- staticcheck/doc.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/staticcheck/doc.go b/staticcheck/doc.go index d0ed8beae..4a87d4a24 100644 --- a/staticcheck/doc.go +++ b/staticcheck/doc.go @@ -433,7 +433,7 @@ Another example: Even though T has a Close method and thus implements io.ReadCloser, io.Reader will always match first. The method set of io.Reader is a subset of io.ReadCloser. Thus it is impossible to match the second -case without mtching the first case. +case without matching the first case. Structurally equivalent interfaces From bd260fed3277161ca57b61803855edc929aaae97 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 7 Jun 2019 01:30:49 +0200 Subject: [PATCH 240/254] unused: don't accidentally mark objects used by the wrong function --- unused/testdata/src/variables/vartype.go | 10 ++++++++++ unused/unused.go | 22 ++++++++++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 unused/testdata/src/variables/vartype.go diff --git a/unused/testdata/src/variables/vartype.go b/unused/testdata/src/variables/vartype.go new file mode 100644 index 000000000..ede73ffa5 --- /dev/null +++ b/unused/testdata/src/variables/vartype.go @@ -0,0 +1,10 @@ +package pkg + +type t181025 struct{} + +func (t181025) F() {} + +// package-level variable after function declaration used to trigger a +// bug in unused. + +var V181025 t181025 diff --git a/unused/unused.go b/unused/unused.go index 6af85a77f..d6491bd78 100644 --- a/unused/unused.go +++ b/unused/unused.go @@ -1194,12 +1194,29 @@ func (g *Graph) entry(pkg *pkg) { ctx.seeAndUse(obj, nil, edgeUsedConstant) } + var fns []*types.Func var fn *types.Func + var stack []ast.Node for _, f := range pkg.Files { ast.Inspect(f, func(n ast.Node) bool { + if n == nil { + pop := stack[len(stack)-1] + stack = stack[:len(stack)-1] + if _, ok := pop.(*ast.FuncDecl); ok { + fns = fns[:len(fns)-1] + if len(fns) == 0 { + fn = nil + } else { + fn = fns[len(fns)-1] + } + } + return true + } + stack = append(stack, n) switch n := n.(type) { case *ast.FuncDecl: fn = pkg.TypesInfo.ObjectOf(n.Name).(*types.Func) + fns = append(fns, fn) ctx.see(fn) case *ast.GenDecl: switch n.Tok { @@ -1227,6 +1244,11 @@ func (g *Graph) entry(pkg *pkg) { if fn != nil { ctx.seeAndUse(T, fn, edgeVarDecl) } else { + // TODO(dh): we likely want to make + // the type used by the variable, not + // the package containing the + // variable. But then we have to take + // special care of blank identifiers. ctx.seeAndUse(T, nil, edgeVarDecl) } g.typ(ctx, T, nil) From 9fe8d4145fb1e9a1b23fe742deecf8273bb7af9e Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 7 Jun 2019 02:19:02 +0200 Subject: [PATCH 241/254] Add issue templates --- .github/ISSUE_TEMPLATE/1_false_positive.md | 17 +++++++++++++++++ .github/ISSUE_TEMPLATE/2_false_negative.md | 17 +++++++++++++++++ .github/ISSUE_TEMPLATE/3_bug.md | 17 +++++++++++++++++ .github/ISSUE_TEMPLATE/4_other.md | 6 ++++++ 4 files changed, 57 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/1_false_positive.md create mode 100644 .github/ISSUE_TEMPLATE/2_false_negative.md create mode 100644 .github/ISSUE_TEMPLATE/3_bug.md create mode 100644 .github/ISSUE_TEMPLATE/4_other.md diff --git a/.github/ISSUE_TEMPLATE/1_false_positive.md b/.github/ISSUE_TEMPLATE/1_false_positive.md new file mode 100644 index 000000000..5e87614cd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/1_false_positive.md @@ -0,0 +1,17 @@ +--- +name: 💢 False positive in staticcheck +about: Your code is fine but staticcheck complains about it, anyway. +labels: false-positive, needs-triage +title: "" +--- + + diff --git a/.github/ISSUE_TEMPLATE/2_false_negative.md b/.github/ISSUE_TEMPLATE/2_false_negative.md new file mode 100644 index 000000000..ba290a33e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/2_false_negative.md @@ -0,0 +1,17 @@ +--- +name: 🦆 False negative in staticcheck +about: Your code is wrong but staticcheck doesn't complain about it. +labels: false-negative, needs-triage +title: "" +--- + + diff --git a/.github/ISSUE_TEMPLATE/3_bug.md b/.github/ISSUE_TEMPLATE/3_bug.md new file mode 100644 index 000000000..852721e76 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/3_bug.md @@ -0,0 +1,17 @@ +--- +name: 🐞 General bugs with staticcheck +about: Something in staticcheck isn't working as it should. +labels: bug, needs-triage +title: "" +--- + + diff --git a/.github/ISSUE_TEMPLATE/4_other.md b/.github/ISSUE_TEMPLATE/4_other.md new file mode 100644 index 000000000..a2b3089ec --- /dev/null +++ b/.github/ISSUE_TEMPLATE/4_other.md @@ -0,0 +1,6 @@ +--- +name: 🛠 Other +about: Ideas, feature requests, and all other issues not fitting into another category. +labels: needs-triage +title: "" +--- From 91b34a6dad6c3aabea21270b2953579b48458732 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 7 Jun 2019 02:21:14 +0200 Subject: [PATCH 242/254] Improve issue templates --- .github/ISSUE_TEMPLATE/1_false_positive.md | 2 ++ .github/ISSUE_TEMPLATE/2_false_negative.md | 2 ++ .github/ISSUE_TEMPLATE/3_bug.md | 2 ++ 3 files changed, 6 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/1_false_positive.md b/.github/ISSUE_TEMPLATE/1_false_positive.md index 5e87614cd..e63f37301 100644 --- a/.github/ISSUE_TEMPLATE/1_false_positive.md +++ b/.github/ISSUE_TEMPLATE/1_false_positive.md @@ -9,6 +9,8 @@ Please make sure to include the following information in your issue report: - The output of 'staticcheck -version' - The output of 'staticcheck -debug.version' (it is fine if this command fails) +- The output of 'go version' +- The output of 'go env' - Exactly which command you ran - Output of the command and what's wrong with the output - Where we can read the code you're running staticcheck on diff --git a/.github/ISSUE_TEMPLATE/2_false_negative.md b/.github/ISSUE_TEMPLATE/2_false_negative.md index ba290a33e..57f53cdf8 100644 --- a/.github/ISSUE_TEMPLATE/2_false_negative.md +++ b/.github/ISSUE_TEMPLATE/2_false_negative.md @@ -9,6 +9,8 @@ Please make sure to include the following information in your issue report: - The output of 'staticcheck -version' - The output of 'staticcheck -debug.version' (it is fine if this command fails) +- The output of 'go version' +- The output of 'go env' - Exactly which command you ran - Output of the command and what's wrong with the output - Where we can read the code you're running staticcheck on diff --git a/.github/ISSUE_TEMPLATE/3_bug.md b/.github/ISSUE_TEMPLATE/3_bug.md index 852721e76..d8fd62fe1 100644 --- a/.github/ISSUE_TEMPLATE/3_bug.md +++ b/.github/ISSUE_TEMPLATE/3_bug.md @@ -9,6 +9,8 @@ Please make sure to include the following information in your issue report: - The output of 'staticcheck -version' - The output of 'staticcheck -debug.version' (it is fine if this command fails) +- The output of 'go version' +- The output of 'go env' - Exactly which command you ran - Output of the command and what's wrong with the output - Where we can read the code you're running staticcheck on From 314105b5afc713036eb02c7f8676843bc8fb55a2 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 7 Jun 2019 19:00:52 +0200 Subject: [PATCH 243/254] stylecheck: don't flag receiver names in generated code Generators may not be able to make sensible choices for receiver names and default to undescriptive names such as "self". Also, it makes little sense to special case "_" in code generation. --- stylecheck/analysis.go | 2 +- stylecheck/lint.go | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/stylecheck/analysis.go b/stylecheck/analysis.go index 5d8373f71..f252487f7 100644 --- a/stylecheck/analysis.go +++ b/stylecheck/analysis.go @@ -50,7 +50,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "ST1006", Run: CheckReceiverNames, Doc: Docs["ST1006"].String(), - Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "ST1008": { diff --git a/stylecheck/lint.go b/stylecheck/lint.go index 8b0031b3a..1699d5898 100644 --- a/stylecheck/lint.go +++ b/stylecheck/lint.go @@ -233,10 +233,10 @@ func CheckReceiverNames(pass *analysis.Pass) (interface{}, error) { continue } if recv.Name() == "self" || recv.Name() == "this" { - pass.Reportf(recv.Pos(), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) + ReportfFG(pass, recv.Pos(), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) } if recv.Name() == "_" { - pass.Reportf(recv.Pos(), "receiver name should not be an underscore, omit the name if it is unused") + ReportfFG(pass, recv.Pos(), "receiver name should not be an underscore, omit the name if it is unused") } } } From 2ec2889002ffebf138e131649cb7d58c08b0cc61 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 7 Jun 2019 20:05:27 +0200 Subject: [PATCH 244/254] Add ability to detect specific code generators --- facts/generated.go | 42 ++++++++++++++++++++++++++++++++--------- lint/lint.go | 5 ++++- lint/lintdsl/lintdsl.go | 20 ++++++++++++++------ lint/runner.go | 4 ++-- 4 files changed, 53 insertions(+), 18 deletions(-) diff --git a/facts/generated.go b/facts/generated.go index 9da7b4d86..1ed9563a3 100644 --- a/facts/generated.go +++ b/facts/generated.go @@ -6,10 +6,21 @@ import ( "io" "os" "reflect" + "strings" "golang.org/x/tools/go/analysis" ) +type Generator int + +// A list of known generators we can detect +const ( + Unknown Generator = iota + Goyacc + Cgo + Stringer +) + var ( // used by cgo before Go 1.11 oldCgo = []byte("// Created by cgo - DO NOT EDIT") @@ -19,44 +30,57 @@ var ( crnl = []byte("\r\n") ) -func isGenerated(path string) bool { +func isGenerated(path string) (Generator, bool) { f, err := os.Open(path) if err != nil { - return false + return 0, false } defer f.Close() br := bufio.NewReader(f) for { s, err := br.ReadBytes('\n') if err != nil && err != io.EOF { - return false + return 0, false } s = bytes.TrimSuffix(s, crnl) s = bytes.TrimSuffix(s, nl) if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) { - return true + text := string(s[len(prefix) : len(s)-len(suffix)]) + switch text { + case "by goyacc.": + return Goyacc, true + case "by cmd/cgo;": + return Cgo, true + } + if strings.HasPrefix(text, `by "stringer `) { + return Stringer, true + } + return Unknown, true } if bytes.Equal(s, oldCgo) { - return true + return Cgo, true } if err == io.EOF { break } } - return false + return 0, false } var Generated = &analysis.Analyzer{ Name: "isgenerated", Doc: "annotate file names that have been code generated", Run: func(pass *analysis.Pass) (interface{}, error) { - m := map[string]bool{} + m := map[string]Generator{} for _, f := range pass.Files { path := pass.Fset.PositionFor(f.Pos(), false).Filename - m[path] = isGenerated(path) + g, ok := isGenerated(path) + if ok { + m[path] = g + } } return m, nil }, RunDespiteErrors: true, - ResultType: reflect.TypeOf(map[string]bool{}), + ResultType: reflect.TypeOf(map[string]Generator{}), } diff --git a/lint/lint.go b/lint/lint.go index d86d321b2..dfd8bbb76 100644 --- a/lint/lint.go +++ b/lint/lint.go @@ -254,7 +254,10 @@ func (l *Linter) Lint(cfg *packages.Config, patterns []string) ([]Problem, error allowedChecks := FilterChecks(analyzers, pkg.cfg.Merge(l.Config).Checks) if allowedChecks[cum.Analyzer().Name] { pos := DisplayPosition(pkg.Fset, res.Pos()) - if pkg.gen[pos.Filename] { + // FIXME(dh): why are we ignoring generated files + // here? Surely this is specific to 'unused', not all + // cumulative checkers + if _, ok := pkg.gen[pos.Filename]; ok { continue } p := cum.ProblemObject(pkg.Fset, res) diff --git a/lint/lintdsl/lintdsl.go b/lint/lintdsl/lintdsl.go index df802cb41..3b939e95f 100644 --- a/lint/lintdsl/lintdsl.go +++ b/lint/lintdsl/lintdsl.go @@ -363,15 +363,23 @@ func File(pass *analysis.Pass, node lint.Positioner) *ast.File { // IsGenerated reports whether pos is in a generated file, It ignores // //line directives. func IsGenerated(pass *analysis.Pass, pos token.Pos) bool { + _, ok := Generator(pass, pos) + return ok +} + +// Generator returns the generator that generated the file containing +// pos. It ignores //line directives. +func Generator(pass *analysis.Pass, pos token.Pos) (facts.Generator, bool) { file := pass.Fset.PositionFor(pos, false).Filename - m := pass.ResultOf[facts.Generated].(map[string]bool) - return m[file] + m := pass.ResultOf[facts.Generated].(map[string]facts.Generator) + g, ok := m[file] + return g, ok } func ReportfFG(pass *analysis.Pass, pos token.Pos, f string, args ...interface{}) { file := lint.DisplayPosition(pass.Fset, pos).Filename - m := pass.ResultOf[facts.Generated].(map[string]bool) - if m[file] { + m := pass.ResultOf[facts.Generated].(map[string]facts.Generator) + if _, ok := m[file]; ok { return } pass.Reportf(pos, f, args...) @@ -384,8 +392,8 @@ func ReportNodef(pass *analysis.Pass, node ast.Node, format string, args ...inte func ReportNodefFG(pass *analysis.Pass, node ast.Node, format string, args ...interface{}) { file := lint.DisplayPosition(pass.Fset, node.Pos()).Filename - m := pass.ResultOf[facts.Generated].(map[string]bool) - if m[file] { + m := pass.ResultOf[facts.Generated].(map[string]facts.Generator) + if _, ok := m[file]; ok { return } ReportNodef(pass, node, format, args...) diff --git a/lint/runner.go b/lint/runner.go index dcc6e2b21..7f5fd4539 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -71,7 +71,7 @@ type Package struct { results []*result cfg *config.Config - gen map[string]bool + gen map[string]facts.Generator problems []Problem ignores []Ignore errs []error @@ -812,7 +812,7 @@ func (r *Runner) processPkg(pkg *Package, analyzers []*analysis.Analyzer) { if pkg.results[r.analyzerIDs.get(config.Analyzer)].v != nil { pkg.cfg = pkg.results[r.analyzerIDs.get(config.Analyzer)].v.(*config.Config) } - pkg.gen = pkg.results[r.analyzerIDs.get(facts.Generated)].v.(map[string]bool) + pkg.gen = pkg.results[r.analyzerIDs.get(facts.Generated)].v.(map[string]facts.Generator) } // In a previous version of the code, we would throw away all type From 497c8f037f5a2754b53ea5031e9d4e4cdea503c1 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 7 Jun 2019 20:18:01 +0200 Subject: [PATCH 245/254] staticcheck: don't flag unused values in code generated by goyacc There may be hundreds of those due to the way the state machine is constructed. --- staticcheck/analysis.go | 2 +- staticcheck/lint.go | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/staticcheck/analysis.go b/staticcheck/analysis.go index 633a4973f..442aebe5a 100644 --- a/staticcheck/analysis.go +++ b/staticcheck/analysis.go @@ -284,7 +284,7 @@ var Analyzers = map[string]*analysis.Analyzer{ Name: "SA4006", Run: CheckUnreadVariableValues, Doc: Docs["SA4006"].String(), - Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Requires: []*analysis.Analyzer{buildssa.Analyzer, facts.Generated}, Flags: newFlagSet(), }, "SA4008": { diff --git a/staticcheck/lint.go b/staticcheck/lint.go index fee2d7860..96e217d73 100644 --- a/staticcheck/lint.go +++ b/staticcheck/lint.go @@ -1494,6 +1494,12 @@ func CheckUnreadVariableValues(pass *analysis.Pass) (interface{}, error) { if node == nil { continue } + if gen, ok := Generator(pass, node.Pos()); ok && gen == facts.Goyacc { + // Don't flag unused values in code generated by goyacc. + // There may be hundreds of those due to the way the state + // machine is constructed. + continue + } ast.Inspect(node, func(node ast.Node) bool { assign, ok := node.(*ast.AssignStmt) From c4f8a89809225f1c6b343295afce3b78b071a745 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 9 Jun 2019 10:59:03 +0200 Subject: [PATCH 246/254] doc: update changelog with recent changes --- doc/2019.2.html | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/2019.2.html b/doc/2019.2.html index 3b54d14c5..f6c8a7175 100644 --- a/doc/2019.2.html +++ b/doc/2019.2.html @@ -339,7 +339,9 @@

    Changed checks

    • {{ check "SA1019" }} now flags imports of deprecated packages.
    • {{ check "SA4000" }} no longer flags comparisons between custom float types. Additionally, it avoids a false positive caused by cgo.
    • +
    • {{ check "SA4006" }} no longer flags unused values in code generated by goyacc. This avoids noise caused by the nature of the generated state machine.
    • {{ check "ST1005" }} no longer flags error messages that start with capitalized type names.
    • +
    • {{ check "ST1006" }} no longer flags receiver names in generated code.
    • {{ check "SA5002" }} no longer suggests replacing for false { with for {.
    • Added "SIP" and "RTP" as default initialisms to {{ check "ST1003" }}.
    • {{ check "SA1006" }}, {{ check "SA4003" }}, {{ check "S1017" }}, and {{ check "S1020" }} match more code patterns.
    • From cb51c254f01bf832acf2b42c428c6219d5b83408 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Fri, 14 Jun 2019 02:24:13 +0200 Subject: [PATCH 247/254] lint: give malformed linter directives the "compile" check The "compile" check is our catch-all for problems affecting the analysis itself: type errors, invalid configuration, and so on. We forgot to give malformed linter directives the "compile" check. This caused them to never be shown, because "" isn't on the list of allowed checks. Fixes gh-515 --- lint/runner.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lint/runner.go b/lint/runner.go index 7f5fd4539..faa6b64dd 100644 --- a/lint/runner.go +++ b/lint/runner.go @@ -893,7 +893,7 @@ func parseDirectives(pkg *packages.Package) ([]Ignore, []Problem) { Pos: DisplayPosition(pkg.Fset, c.Pos()), Message: "malformed linter directive; missing the required reason field?", Severity: Error, - Check: "", + Check: "compile", } problems = append(problems, p) continue From 0d05180ad8c0c505d5f6c6d4ec9a9188213dbe4b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 22 Jun 2019 18:14:25 +0200 Subject: [PATCH 248/254] Update golang.org/x/tools --- go.mod | 2 +- go.sum | 7 ++----- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 3bbe16aa8..670b23bd7 100644 --- a/go.mod +++ b/go.mod @@ -8,5 +8,5 @@ require ( github.com/kisielk/gotool v1.0.0 github.com/rogpeppe/go-internal v1.3.0 golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e - golang.org/x/tools v0.0.0-20190530171427-2b03ca6e44eb + golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac ) diff --git a/go.sum b/go.sum index e56784c91..ac4afcae1 100644 --- a/go.sum +++ b/go.sum @@ -24,10 +24,7 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d h1:+R4KGOnez64A81RvjARKc4UT5 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd h1:7E3PabyysDSEjnaANKBgums/hyvMI/HoHQ50qZEzTrg= -golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190530171427-2b03ca6e44eb h1:mnQlcVx8Qq8L70HV0DxUGuiuAtiEHTwF1gYJE/EL9nU= -golang.org/x/tools v0.0.0-20190530171427-2b03ca6e44eb/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190601110225-0abef6e9ecb8 h1:KFgOV120pDm8h0MBnt26wwMmwdhSXE+K+G9jg1ZjxbE= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac h1:MQEvx39qSf8vyrx3XRaOe+j1UDIzKwkYOVObRgGPVqI= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= From 65e0b96f7690c2ae71cc5ae10ae4b0bb90dff39b Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sat, 22 Jun 2019 19:10:27 +0200 Subject: [PATCH 249/254] dist: update build script for Go modules --- dist/build.sh | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/dist/build.sh b/dist/build.sh index 5aace124e..83fce429a 100755 --- a/dist/build.sh +++ b/dist/build.sh @@ -3,19 +3,23 @@ SYSTEMS=(windows linux freebsd darwin) ARCHS=(amd64 386) -clean=$(git status --porcelain --untracked-files=no) -if [ -n "$clean" ]; then - echo "There are uncommited changes" - exit 1 +rev="$1" +if [ -z "$rev" ]; then + echo "Usage: $0 " + exit 1 fi -rev=$(git describe --tags --always) -if [ -e "$rev" ]; then - rm -rf "$rev" -fi + mkdir "$rev" +d=$(realpath "$rev") + +wrk=$(mktemp -d) +trap "{ rm -rf \"$wrk\"; }" EXIT +cd "$wrk" + +go mod init foo +GO111MODULE=on go get -d honnef.co/go/tools/cmd/staticcheck@"$rev" -echo "Revision is ${rev}" for os in ${SYSTEMS[@]}; do for arch in ${ARCHS[@]}; do echo "Building GOOS=$os GOARCH=$arch..." @@ -23,15 +27,16 @@ for os in ${SYSTEMS[@]}; do if [ $os = "windows" ]; then out="${out}.exe" fi - CGO_ENABLED=0 GOOS=$os GOARCH=$arch go build -o "${rev}/${out}" honnef.co/go/tools/cmd/staticcheck + + CGO_ENABLED=0 GOOS=$os GOARCH=$arch GO111MODULE=on go build -o "$d/$out" honnef.co/go/tools/cmd/staticcheck ( - cd "$rev" + cd "$d" sha256sum "$out" > "$out".sha256 ) done done ( - cd "$rev" + cd "$d" sha256sum -c --strict *.sha256 ) From 880d9ef1ee4983204154d3a718e25c137ae91aa6 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 23 Jun 2019 16:02:43 +0200 Subject: [PATCH 250/254] Update notes on sponsorship --- README.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 1c71a715e..b490e9421 100644 --- a/README.md +++ b/README.md @@ -4,10 +4,9 @@ working with Go code, including linters and static analysis, most prominently staticcheck. -**These tools are supported by -[patrons on Patreon](https://www.patreon.com/dominikh) and -[sponsors](#sponsors). If you use these tools at your company, -consider supporting open source by [becoming a sponsor!](mailto:dominik@honnef.co?subject=Staticcheck%20sponsorship)** +**These tools are financially supported by [private and corporate sponsors](http://staticcheck.io/sponsors) to ensure its continued development. +Please consider [becoming a sponsor](https://github.com/users/dominikh/sponsorship) if you or your company relies on the tools.** + ## Installation @@ -70,11 +69,13 @@ You can find extensive documentation on ## Sponsors -This project is sponsored by: +This project is sponsored by the following companies [Fastly](https://fastly.com) [Uber](https://uber.com) +as well as many generous people. + ## Licenses All original code in this repository is licensed under the following From 76a54248f37faaeb66e94b0dc6435886bb3cc6b5 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 23 Jun 2019 16:03:11 +0200 Subject: [PATCH 251/254] Trim down install instructions --- README.md | 5 ----- 1 file changed, 5 deletions(-) diff --git a/README.md b/README.md index b490e9421..cf1d1f111 100644 --- a/README.md +++ b/README.md @@ -29,11 +29,6 @@ while the master branch is usually stable, it may still contain new checks or backwards incompatible changes that break your build. By using the master branch you agree to become a beta tester. -To use the master branch, a simple `go get -u -honnef.co/go/tools/cmd/...` suffices. You can also install a subset of -the commands, for example only staticcheck with `go get -u -honnef.co/go/tools/cmd/staticcheck`. - ## Tools All of the following tools can be found in the cmd/ directory. Each From 4cc7fa21be3cd45133573476c2f572468593c766 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 23 Jun 2019 16:03:19 +0200 Subject: [PATCH 252/254] Move link to website closer to the top of README --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index cf1d1f111..5dcbce38e 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,11 @@ prominently staticcheck. Please consider [becoming a sponsor](https://github.com/users/dominikh/sponsorship) if you or your company relies on the tools.** +## Documentation + +You can find extensive documentation on these tools, in particular staticcheck, on [staticcheck.io](https://staticcheck.io/docs/). + + ## Installation ### Releases @@ -57,11 +62,6 @@ backwards-incompatible changes. We support the last two versions of Go. -## Documentation - -You can find extensive documentation on -[staticcheck.io](https://staticcheck.io). - ## Sponsors This project is sponsored by the following companies From 86b08b5391b71f2d6a05d15701c9726ce96330e9 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Sun, 23 Jun 2019 21:05:02 +0200 Subject: [PATCH 253/254] Limit parallelism of tests in CI CircleCI provides us with a container that is limited to 2 cores and 4 GB of memory, but runtime.NumCPU sees all cores of the machine. Test parallelism defaults to runtime.NumCPU, and test parallelism affects memory usage. The runtime.NumCPU value may be large (we've seen 36), which results in memory usage higher than 4 GB, ultimately resulting in our tests being killed, leading to flaky tests. Provide an explicit, sensible value for parallelism. --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index bb0943f01..3df7d582f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -33,7 +33,7 @@ test: &test at: "/go/" - run: name: "Run tests" - command: "go test -v ./... >/tmp/test.out" + command: "go test -parallel=2 -v ./... >/tmp/test.out" - run: name: "Process test results" when: always From 6c4852bd92a8ec927b5c5876e93d3d157d03a666 Mon Sep 17 00:00:00 2001 From: Dominik Honnef Date: Mon, 24 Jun 2019 17:19:00 +0200 Subject: [PATCH 254/254] Version 2019.2 --- version/version.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version/version.go b/version/version.go index a12f70fb4..6467c206f 100644 --- a/version/version.go +++ b/version/version.go @@ -7,7 +7,7 @@ import ( "runtime" ) -const Version = "devel" +const Version = "2019.2" // version returns a version descriptor and reports whether the // version is a known release.