cmd/fix: add modernize and inline analyzers

We ran 'go mod vendor' to pull in the newly used packages.

Also, add a cmd/go script test that minimally
exercises each analyzer, analogous to the cmd/vet test.

For #75266
For #75267
For #71859

Change-Id: I334daea048e3d2f614a1788292a3175acf173932
Reviewed-on: https://go-review.googlesource.com/c/go/+/710995
Reviewed-by: Michael Matloob <matloob@golang.org>
Auto-Submit: Alan Donovan <adonovan@google.com>
TryBot-Bypass: Alan Donovan <adonovan@google.com>
Reviewed-by: Michael Matloob <matloob@google.com>
This commit is contained in:
Alan Donovan 2025-10-10 15:49:53 -04:00
parent 80876f4b42
commit a5f55a441e
45 changed files with 14862 additions and 12 deletions

View file

@ -22,10 +22,13 @@ package main
import (
"cmd/internal/objabi"
"cmd/internal/telemetry/counter"
"slices"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildtag"
"golang.org/x/tools/go/analysis/passes/hostport"
"golang.org/x/tools/go/analysis/passes/inline"
"golang.org/x/tools/go/analysis/passes/modernize"
"golang.org/x/tools/go/analysis/unitchecker"
)
@ -38,22 +41,23 @@ func main() {
unitchecker.Main(suite...) // (never returns)
}
// The fix suite analyzers produce fixes that are safe to apply.
// (Diagnostics may not describe actual problems,
// but their fixes must be unambiguously safe to apply.)
var suite = []*analysis.Analyzer{
// The fix suite analyzers produce fixes are unambiguously safe to apply,
// even if the diagnostics might not describe actual problems.
var suite = slices.Concat(
[]*analysis.Analyzer{
buildtag.Analyzer,
hostport.Analyzer,
// TODO(adonovan): now the modernize (proposal #75266) and
// inline (proposal #75267) analyzers are published, revendor
// x/tools and add them here.
//
inline.Analyzer,
},
modernize.Suite,
// TODO(adonovan): add any other vet analyzers whose fixes are always safe.
// Candidates to audit: sigchanyzer, printf, assign, unreachable.
// Many of staticcheck's analyzers would make good candidates
// (e.g. rewriting WriteString(fmt.Sprintf()) to Fprintf.)
// Rejected:
// - composites: some types (e.g. PointXY{1,2}) don't want field names.
// - timeformat: flipping MM/DD is a behavior change, but the code
// could potentially be a workaround for another bug.
// - stringintconv: offers two fixes, user input required to choose.
// - fieldalignment: poor signal/noise; fix could be a regression.
}
)

View file

@ -0,0 +1,53 @@
# Elementary test of each analyzer in the "go fix" suite.
# This is simply to prove that they are running at all;
# detailed behavior is tested in x/tools.
#
# Each assertion matches the expected diff.
#
# Tip: to see the actual stdout,
# temporarily prefix the go command with "! ".
go fix -diff example.com/x
# buildtag
stdout '-// \+build go1.26'
# hostport
stdout 'net.Dial.*net.JoinHostPort'
# inline
stdout 'var three = 1 \+ 2'
# newexpr (proxy for whole modernize suite)
stdout 'var _ = new\(123\)'
-- go.mod --
module example.com/x
go 1.26
-- x.go --
//go:build go1.26
// +build go1.26
// ↑ buildtag
package x
import (
"fmt"
"net"
)
// hostport
var s string
var _, _ = net.Dial("tcp", fmt.Sprintf("%s:%d", s, 80))
//go:fix inline
func add(x, y int) int { return x + y }
// inline
var three = add(1, 2)
// newexpr
func varOf(x int) *int { return &x }
var _ = varOf(123)

View file

@ -0,0 +1,109 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package inline defines an analyzer that inlines calls to functions
and uses of constants marked with a "//go:fix inline" directive.
# Analyzer inline
inline: apply fixes based on 'go:fix inline' comment directives
The inline analyzer inlines functions and constants that are marked for inlining.
## Functions
Given a function that is marked for inlining, like this one:
//go:fix inline
func Square(x int) int { return Pow(x, 2) }
this analyzer will recommend that calls to the function elsewhere, in the same
or other packages, should be inlined.
Inlining can be used to move off of a deprecated function:
// Deprecated: prefer Pow(x, 2).
//go:fix inline
func Square(x int) int { return Pow(x, 2) }
It can also be used to move off of an obsolete package,
as when the import path has changed or a higher major version is available:
package pkg
import pkg2 "pkg/v2"
//go:fix inline
func F() { pkg2.F(nil) }
Replacing a call pkg.F() by pkg2.F(nil) can have no effect on the program,
so this mechanism provides a low-risk way to update large numbers of calls.
We recommend, where possible, expressing the old API in terms of the new one
to enable automatic migration.
The inliner takes care to avoid behavior changes, even subtle ones,
such as changes to the order in which argument expressions are
evaluated. When it cannot safely eliminate all parameter variables,
it may introduce a "binding declaration" of the form
var params = args
to evaluate argument expressions in the correct order and bind them to
parameter variables. Since the resulting code transformation may be
stylistically suboptimal, such inlinings may be disabled by specifying
the -inline.allow_binding_decl=false flag to the analyzer driver.
(In cases where it is not safe to "reduce" a callthat is, to replace
a call f(x) by the body of function f, suitably substitutedthe
inliner machinery is capable of replacing f by a function literal,
func(){...}(). However, the inline analyzer discards all such
"literalizations" unconditionally, again on grounds of style.)
## Constants
Given a constant that is marked for inlining, like this one:
//go:fix inline
const Ptr = Pointer
this analyzer will recommend that uses of Ptr should be replaced with Pointer.
As with functions, inlining can be used to replace deprecated constants and
constants in obsolete packages.
A constant definition can be marked for inlining only if it refers to another
named constant.
The "//go:fix inline" comment must appear before a single const declaration on its own,
as above; before a const declaration that is part of a group, as in this case:
const (
C = 1
//go:fix inline
Ptr = Pointer
)
or before a group, applying to every constant in the group:
//go:fix inline
const (
Ptr = Pointer
Val = Value
)
The proposal https://go.dev/issue/32816 introduces the "//go:fix inline" directives.
You can use this command to apply inline fixes en masse:
$ go run golang.org/x/tools/go/analysis/passes/inline/cmd/inline@latest -fix ./...
# Analyzer gofixdirective
gofixdirective: validate uses of //go:fix comment directives
The gofixdirective analyzer checks "//go:fix inline" directives for correctness.
See the documentation for the gofix analyzer for more about "/go:fix inline".
*/
package inline

View file

@ -0,0 +1,537 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package inline
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"slices"
"strings"
_ "embed"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/analysis/passes/internal/gofixdirective"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/diff"
"golang.org/x/tools/internal/packagepath"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/refactor/inline"
"golang.org/x/tools/internal/typesinternal"
)
//go:embed doc.go
var doc string
var Analyzer = &analysis.Analyzer{
Name: "inline",
Doc: analysisinternal.MustExtractDoc(doc, "inline"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/inline",
Run: run,
FactTypes: []analysis.Fact{
(*goFixInlineFuncFact)(nil),
(*goFixInlineConstFact)(nil),
(*goFixInlineAliasFact)(nil),
},
Requires: []*analysis.Analyzer{inspect.Analyzer},
}
var allowBindingDecl bool
func init() {
Analyzer.Flags.BoolVar(&allowBindingDecl, "allow_binding_decl", false,
"permit inlinings that require a 'var params = args' declaration")
}
// analyzer holds the state for this analysis.
type analyzer struct {
pass *analysis.Pass
root inspector.Cursor
// memoization of repeated calls for same file.
fileContent map[string][]byte
// memoization of fact imports (nil => no fact)
inlinableFuncs map[*types.Func]*inline.Callee
inlinableConsts map[*types.Const]*goFixInlineConstFact
inlinableAliases map[*types.TypeName]*goFixInlineAliasFact
}
func run(pass *analysis.Pass) (any, error) {
a := &analyzer{
pass: pass,
root: pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Root(),
fileContent: make(map[string][]byte),
inlinableFuncs: make(map[*types.Func]*inline.Callee),
inlinableConsts: make(map[*types.Const]*goFixInlineConstFact),
inlinableAliases: make(map[*types.TypeName]*goFixInlineAliasFact),
}
gofixdirective.Find(pass, a.root, a)
a.inline()
return nil, nil
}
// HandleFunc exports a fact for functions marked with go:fix.
func (a *analyzer) HandleFunc(decl *ast.FuncDecl) {
content, err := a.readFile(decl)
if err != nil {
a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: cannot read source file: %v", err)
return
}
callee, err := inline.AnalyzeCallee(discard, a.pass.Fset, a.pass.Pkg, a.pass.TypesInfo, decl, content)
if err != nil {
a.pass.Reportf(decl.Doc.Pos(), "invalid inlining candidate: %v", err)
return
}
fn := a.pass.TypesInfo.Defs[decl.Name].(*types.Func)
a.pass.ExportObjectFact(fn, &goFixInlineFuncFact{callee})
a.inlinableFuncs[fn] = callee
}
// HandleAlias exports a fact for aliases marked with go:fix.
func (a *analyzer) HandleAlias(spec *ast.TypeSpec) {
// Remember that this is an inlinable alias.
typ := &goFixInlineAliasFact{}
lhs := a.pass.TypesInfo.Defs[spec.Name].(*types.TypeName)
a.inlinableAliases[lhs] = typ
// Create a fact only if the LHS is exported and defined at top level.
// We create a fact even if the RHS is non-exported,
// so we can warn about uses in other packages.
if lhs.Exported() && typesinternal.IsPackageLevel(lhs) {
a.pass.ExportObjectFact(lhs, typ)
}
}
// HandleConst exports a fact for constants marked with go:fix.
func (a *analyzer) HandleConst(nameIdent, rhsIdent *ast.Ident) {
lhs := a.pass.TypesInfo.Defs[nameIdent].(*types.Const)
rhs := a.pass.TypesInfo.Uses[rhsIdent].(*types.Const) // must be so in a well-typed program
con := &goFixInlineConstFact{
RHSName: rhs.Name(),
RHSPkgName: rhs.Pkg().Name(),
RHSPkgPath: rhs.Pkg().Path(),
}
if rhs.Pkg() == a.pass.Pkg {
con.rhsObj = rhs
}
a.inlinableConsts[lhs] = con
// Create a fact only if the LHS is exported and defined at top level.
// We create a fact even if the RHS is non-exported,
// so we can warn about uses in other packages.
if lhs.Exported() && typesinternal.IsPackageLevel(lhs) {
a.pass.ExportObjectFact(lhs, con)
}
}
// inline inlines each static call to an inlinable function
// and each reference to an inlinable constant or type alias.
//
// TODO(adonovan): handle multiple diffs that each add the same import.
func (a *analyzer) inline() {
for cur := range a.root.Preorder((*ast.CallExpr)(nil), (*ast.Ident)(nil)) {
switch n := cur.Node().(type) {
case *ast.CallExpr:
a.inlineCall(n, cur)
case *ast.Ident:
switch t := a.pass.TypesInfo.Uses[n].(type) {
case *types.TypeName:
a.inlineAlias(t, cur)
case *types.Const:
a.inlineConst(t, cur)
}
}
}
}
// If call is a call to an inlinable func, suggest inlining its use at cur.
func (a *analyzer) inlineCall(call *ast.CallExpr, cur inspector.Cursor) {
if fn := typeutil.StaticCallee(a.pass.TypesInfo, call); fn != nil {
// Inlinable?
callee, ok := a.inlinableFuncs[fn]
if !ok {
var fact goFixInlineFuncFact
if a.pass.ImportObjectFact(fn, &fact) {
callee = fact.Callee
a.inlinableFuncs[fn] = callee
}
}
if callee == nil {
return // nope
}
// Inline the call.
content, err := a.readFile(call)
if err != nil {
a.pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err)
return
}
curFile := astutil.EnclosingFile(cur)
caller := &inline.Caller{
Fset: a.pass.Fset,
Types: a.pass.Pkg,
Info: a.pass.TypesInfo,
File: curFile,
Call: call,
Content: content,
}
res, err := inline.Inline(caller, callee, &inline.Options{Logf: discard})
if err != nil {
a.pass.Reportf(call.Lparen, "%v", err)
return
}
if res.Literalized {
// Users are not fond of inlinings that literalize
// f(x) to func() { ... }(), so avoid them.
//
// (Unfortunately the inliner is very timid,
// and often literalizes when it cannot prove that
// reducing the call is safe; the user of this tool
// has no indication of what the problem is.)
return
}
if res.BindingDecl && !allowBindingDecl {
// When applying fix en masse, users are similarly
// unenthusiastic about inlinings that cannot
// entirely eliminate the parameters and
// insert a 'var params = args' declaration.
// The flag allows them to decline such fixes.
return
}
got := res.Content
// Suggest the "fix".
var textEdits []analysis.TextEdit
for _, edit := range diff.Bytes(content, got) {
textEdits = append(textEdits, analysis.TextEdit{
Pos: curFile.FileStart + token.Pos(edit.Start),
End: curFile.FileStart + token.Pos(edit.End),
NewText: []byte(edit.New),
})
}
a.pass.Report(analysis.Diagnostic{
Pos: call.Pos(),
End: call.End(),
Message: fmt.Sprintf("Call of %v should be inlined", callee),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Inline call of %v", callee),
TextEdits: textEdits,
}},
})
}
}
// If tn is the TypeName of an inlinable alias, suggest inlining its use at cur.
func (a *analyzer) inlineAlias(tn *types.TypeName, curId inspector.Cursor) {
inalias, ok := a.inlinableAliases[tn]
if !ok {
var fact goFixInlineAliasFact
if a.pass.ImportObjectFact(tn, &fact) {
inalias = &fact
a.inlinableAliases[tn] = inalias
}
}
if inalias == nil {
return // nope
}
alias := tn.Type().(*types.Alias)
// Remember the names of the alias's type params. When we check for shadowing
// later, we'll ignore these because they won't appear in the replacement text.
typeParamNames := map[*types.TypeName]bool{}
for tp := range alias.TypeParams().TypeParams() {
typeParamNames[tp.Obj()] = true
}
rhs := alias.Rhs()
curPath := a.pass.Pkg.Path()
curFile := astutil.EnclosingFile(curId)
id := curId.Node().(*ast.Ident)
// We have an identifier A here (n), possibly qualified by a package
// identifier (sel.n), and an inlinable "type A = rhs" elsewhere.
//
// We can replace A with rhs if no name in rhs is shadowed at n's position,
// and every package in rhs is importable by the current package.
var (
importPrefixes = map[string]string{curPath: ""} // from pkg path to prefix
edits []analysis.TextEdit
)
for _, tn := range typenames(rhs) {
// Ignore the type parameters of the alias: they won't appear in the result.
if typeParamNames[tn] {
continue
}
var pkgPath, pkgName string
if pkg := tn.Pkg(); pkg != nil {
pkgPath = pkg.Path()
pkgName = pkg.Name()
}
if pkgPath == "" || pkgPath == curPath {
// The name is in the current package or the universe scope, so no import
// is required. Check that it is not shadowed (that is, that the type
// it refers to in rhs is the same one it refers to at n).
scope := a.pass.TypesInfo.Scopes[curFile].Innermost(id.Pos()) // n's scope
_, obj := scope.LookupParent(tn.Name(), id.Pos()) // what qn.name means in n's scope
if obj != tn {
return
}
} else if !packagepath.CanImport(a.pass.Pkg.Path(), pkgPath) {
// If this package can't see the package of this part of rhs, we can't inline.
return
} else if _, ok := importPrefixes[pkgPath]; !ok {
// Use AddImport to add pkgPath if it's not there already. Associate the prefix it assigns
// with the package path for use by the TypeString qualifier below.
prefix, eds := refactor.AddImport(
a.pass.TypesInfo, curFile, pkgName, pkgPath, tn.Name(), id.Pos())
importPrefixes[pkgPath] = strings.TrimSuffix(prefix, ".")
edits = append(edits, eds...)
}
}
// Find the complete identifier, which may take any of these forms:
// Id
// Id[T]
// Id[K, V]
// pkg.Id
// pkg.Id[T]
// pkg.Id[K, V]
var expr ast.Expr = id
if astutil.IsChildOf(curId, edge.SelectorExpr_Sel) {
curId = curId.Parent()
expr = curId.Node().(ast.Expr)
}
// If expr is part of an IndexExpr or IndexListExpr, we'll need that node.
// Given C[int], TypeOf(C) is generic but TypeOf(C[int]) is instantiated.
switch ek, _ := curId.ParentEdge(); ek {
case edge.IndexExpr_X:
expr = curId.Parent().Node().(*ast.IndexExpr)
case edge.IndexListExpr_X:
expr = curId.Parent().Node().(*ast.IndexListExpr)
}
t := a.pass.TypesInfo.TypeOf(expr).(*types.Alias) // type of entire identifier
if targs := t.TypeArgs(); targs.Len() > 0 {
// Instantiate the alias with the type args from this use.
// For example, given type A = M[K, V], compute the type of the use
// A[int, Foo] as M[int, Foo].
// Don't validate instantiation: it can't panic unless we have a bug,
// in which case seeing the stack trace via telemetry would be helpful.
instAlias, _ := types.Instantiate(nil, alias, slices.Collect(targs.Types()), false)
rhs = instAlias.(*types.Alias).Rhs()
}
// To get the replacement text, render the alias RHS using the package prefixes
// we assigned above.
newText := types.TypeString(rhs, func(p *types.Package) string {
if p == a.pass.Pkg {
return ""
}
if prefix, ok := importPrefixes[p.Path()]; ok {
return prefix
}
panic(fmt.Sprintf("in %q, package path %q has no import prefix", rhs, p.Path()))
})
a.reportInline("type alias", "Type alias", expr, edits, newText)
}
// typenames returns the TypeNames for types within t (including t itself) that have
// them: basic types, named types and alias types.
// The same name may appear more than once.
func typenames(t types.Type) []*types.TypeName {
var tns []*types.TypeName
var visit func(types.Type)
visit = func(t types.Type) {
if hasName, ok := t.(interface{ Obj() *types.TypeName }); ok {
tns = append(tns, hasName.Obj())
}
switch t := t.(type) {
case *types.Basic:
tns = append(tns, types.Universe.Lookup(t.Name()).(*types.TypeName))
case *types.Named:
for t := range t.TypeArgs().Types() {
visit(t)
}
case *types.Alias:
for t := range t.TypeArgs().Types() {
visit(t)
}
case *types.TypeParam:
tns = append(tns, t.Obj())
case *types.Pointer:
visit(t.Elem())
case *types.Slice:
visit(t.Elem())
case *types.Array:
visit(t.Elem())
case *types.Chan:
visit(t.Elem())
case *types.Map:
visit(t.Key())
visit(t.Elem())
case *types.Struct:
for i := range t.NumFields() {
visit(t.Field(i).Type())
}
case *types.Signature:
// Ignore the receiver: although it may be present, it has no meaning
// in a type expression.
// Ditto for receiver type params.
// Also, function type params cannot appear in a type expression.
if t.TypeParams() != nil {
panic("Signature.TypeParams in type expression")
}
visit(t.Params())
visit(t.Results())
case *types.Interface:
for i := range t.NumEmbeddeds() {
visit(t.EmbeddedType(i))
}
for i := range t.NumExplicitMethods() {
visit(t.ExplicitMethod(i).Type())
}
case *types.Tuple:
for v := range t.Variables() {
visit(v.Type())
}
case *types.Union:
panic("Union in type expression")
default:
panic(fmt.Sprintf("unknown type %T", t))
}
}
visit(t)
return tns
}
// If con is an inlinable constant, suggest inlining its use at cur.
func (a *analyzer) inlineConst(con *types.Const, cur inspector.Cursor) {
incon, ok := a.inlinableConsts[con]
if !ok {
var fact goFixInlineConstFact
if a.pass.ImportObjectFact(con, &fact) {
incon = &fact
a.inlinableConsts[con] = incon
}
}
if incon == nil {
return // nope
}
// If n is qualified by a package identifier, we'll need the full selector expression.
curFile := astutil.EnclosingFile(cur)
n := cur.Node().(*ast.Ident)
// We have an identifier A here (n), possibly qualified by a package identifier (sel.X,
// where sel is the parent of n), // and an inlinable "const A = B" elsewhere (incon).
// Consider replacing A with B.
// Check that the expression we are inlining (B) means the same thing
// (refers to the same object) in n's scope as it does in A's scope.
// If the RHS is not in the current package, AddImport will handle
// shadowing, so we only need to worry about when both expressions
// are in the current package.
if a.pass.Pkg.Path() == incon.RHSPkgPath {
// incon.rhsObj is the object referred to by B in the definition of A.
scope := a.pass.TypesInfo.Scopes[curFile].Innermost(n.Pos()) // n's scope
_, obj := scope.LookupParent(incon.RHSName, n.Pos()) // what "B" means in n's scope
if obj == nil {
// Should be impossible: if code at n can refer to the LHS,
// it can refer to the RHS.
panic(fmt.Sprintf("no object for inlinable const %s RHS %s", n.Name, incon.RHSName))
}
if obj != incon.rhsObj {
// "B" means something different here than at the inlinable const's scope.
return
}
} else if !packagepath.CanImport(a.pass.Pkg.Path(), incon.RHSPkgPath) {
// If this package can't see the RHS's package, we can't inline.
return
}
var (
importPrefix string
edits []analysis.TextEdit
)
if incon.RHSPkgPath != a.pass.Pkg.Path() {
importPrefix, edits = refactor.AddImport(
a.pass.TypesInfo, curFile, incon.RHSPkgName, incon.RHSPkgPath, incon.RHSName, n.Pos())
}
// If n is qualified by a package identifier, we'll need the full selector expression.
var expr ast.Expr = n
if astutil.IsChildOf(cur, edge.SelectorExpr_Sel) {
expr = cur.Parent().Node().(ast.Expr)
}
a.reportInline("constant", "Constant", expr, edits, importPrefix+incon.RHSName)
}
// reportInline reports a diagnostic for fixing an inlinable name.
func (a *analyzer) reportInline(kind, capKind string, ident ast.Expr, edits []analysis.TextEdit, newText string) {
edits = append(edits, analysis.TextEdit{
Pos: ident.Pos(),
End: ident.End(),
NewText: []byte(newText),
})
name := astutil.Format(a.pass.Fset, ident)
a.pass.Report(analysis.Diagnostic{
Pos: ident.Pos(),
End: ident.End(),
Message: fmt.Sprintf("%s %s should be inlined", capKind, name),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Inline %s %s", kind, name),
TextEdits: edits,
}},
})
}
func (a *analyzer) readFile(node ast.Node) ([]byte, error) {
filename := a.pass.Fset.File(node.Pos()).Name()
content, ok := a.fileContent[filename]
if !ok {
var err error
content, err = a.pass.ReadFile(filename)
if err != nil {
return nil, err
}
a.fileContent[filename] = content
}
return content, nil
}
// A goFixInlineFuncFact is exported for each function marked "//go:fix inline".
// It holds information about the callee to support inlining.
type goFixInlineFuncFact struct{ Callee *inline.Callee }
func (f *goFixInlineFuncFact) String() string { return "goFixInline " + f.Callee.String() }
func (*goFixInlineFuncFact) AFact() {}
// A goFixInlineConstFact is exported for each constant marked "//go:fix inline".
// It holds information about an inlinable constant. Gob-serializable.
type goFixInlineConstFact struct {
// Information about "const LHSName = RHSName".
RHSName string
RHSPkgPath string
RHSPkgName string
rhsObj types.Object // for current package
}
func (c *goFixInlineConstFact) String() string {
return fmt.Sprintf("goFixInline const %q.%s", c.RHSPkgPath, c.RHSName)
}
func (*goFixInlineConstFact) AFact() {}
// A goFixInlineAliasFact is exported for each type alias marked "//go:fix inline".
// It holds no information; its mere existence demonstrates that an alias is inlinable.
type goFixInlineAliasFact struct{}
func (c *goFixInlineAliasFact) String() string { return "goFixInline alias" }
func (*goFixInlineAliasFact) AFact() {}
func discard(string, ...any) {}

View file

@ -0,0 +1,143 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package gofixdirective searches for and validates go:fix directives. The
// go/analysis/passes/inline package uses findgofix to perform inlining.
// The go/analysis/passes/gofix package uses findgofix to check for problems
// with go:fix directives.
//
// gofixdirective is separate from gofix to avoid depending on refactor/inline,
// which is large.
package gofixdirective
// This package is tested by go/analysis/passes/inline.
import (
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/ast/inspector"
internalastutil "golang.org/x/tools/internal/astutil"
)
// A Handler handles language entities with go:fix directives.
type Handler interface {
HandleFunc(*ast.FuncDecl)
HandleAlias(*ast.TypeSpec)
HandleConst(name, rhs *ast.Ident)
}
// Find finds functions and constants annotated with an appropriate "//go:fix"
// comment (the syntax proposed by #32816), and calls handler methods for each one.
// h may be nil.
func Find(pass *analysis.Pass, root inspector.Cursor, h Handler) {
for cur := range root.Preorder((*ast.FuncDecl)(nil), (*ast.GenDecl)(nil)) {
switch decl := cur.Node().(type) {
case *ast.FuncDecl:
findFunc(decl, h)
case *ast.GenDecl:
if decl.Tok != token.CONST && decl.Tok != token.TYPE {
continue
}
declInline := hasFixInline(decl.Doc)
// Accept inline directives on the entire decl as well as individual specs.
for _, spec := range decl.Specs {
switch spec := spec.(type) {
case *ast.TypeSpec: // Tok == TYPE
findAlias(pass, spec, declInline, h)
case *ast.ValueSpec: // Tok == CONST
findConst(pass, spec, declInline, h)
}
}
}
}
}
func findFunc(decl *ast.FuncDecl, h Handler) {
if !hasFixInline(decl.Doc) {
return
}
if h != nil {
h.HandleFunc(decl)
}
}
func findAlias(pass *analysis.Pass, spec *ast.TypeSpec, declInline bool, h Handler) {
if !declInline && !hasFixInline(spec.Doc) {
return
}
if !spec.Assign.IsValid() {
pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: not a type alias")
return
}
// Disallow inlines of type expressions containing array types.
// Given an array type like [N]int where N is a named constant, go/types provides
// only the value of the constant as an int64. So inlining A in this code:
//
// const N = 5
// type A = [N]int
//
// would result in [5]int, breaking the connection with N.
for n := range ast.Preorder(spec.Type) {
if ar, ok := n.(*ast.ArrayType); ok && ar.Len != nil {
// Make an exception when the array length is a literal int.
if lit, ok := ast.Unparen(ar.Len).(*ast.BasicLit); ok && lit.Kind == token.INT {
continue
}
pass.Reportf(spec.Pos(), "invalid //go:fix inline directive: array types not supported")
return
}
}
if h != nil {
h.HandleAlias(spec)
}
}
func findConst(pass *analysis.Pass, spec *ast.ValueSpec, declInline bool, h Handler) {
specInline := hasFixInline(spec.Doc)
if declInline || specInline {
for i, nameIdent := range spec.Names {
if i >= len(spec.Values) {
// Possible following an iota.
break
}
var rhsIdent *ast.Ident
switch val := spec.Values[i].(type) {
case *ast.Ident:
// Constants defined with the predeclared iota cannot be inlined.
if pass.TypesInfo.Uses[val] == builtinIota {
pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is iota")
return
}
rhsIdent = val
case *ast.SelectorExpr:
rhsIdent = val.Sel
default:
pass.Reportf(val.Pos(), "invalid //go:fix inline directive: const value is not the name of another constant")
return
}
if h != nil {
h.HandleConst(nameIdent, rhsIdent)
}
}
}
}
// hasFixInline reports the presence of a "//go:fix inline" directive
// in the comments.
func hasFixInline(cg *ast.CommentGroup) bool {
for _, d := range internalastutil.Directives(cg) {
if d.Tool == "go" && d.Name == "fix" && d.Args == "inline" {
return true
}
}
return false
}
var builtinIota = types.Universe.Lookup("iota")

View file

@ -0,0 +1,61 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"go/ast"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
)
var AnyAnalyzer = &analysis.Analyzer{
Name: "any",
Doc: analysisinternal.MustExtractDoc(doc, "any"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
},
Run: runAny,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#any",
}
// The any pass replaces interface{} with go1.18's 'any'.
func runAny(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.18") {
for curIface := range curFile.Preorder((*ast.InterfaceType)(nil)) {
iface := curIface.Node().(*ast.InterfaceType)
if iface.Methods.NumFields() == 0 {
// Check that 'any' is not shadowed.
if lookup(pass.TypesInfo, curIface, "any") == builtinAny {
pass.Report(analysis.Diagnostic{
Pos: iface.Pos(),
End: iface.End(),
Message: "interface{} can be replaced by any",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace interface{} by any",
TextEdits: []analysis.TextEdit{
{
Pos: iface.Pos(),
End: iface.End(),
NewText: []byte("any"),
},
},
}},
})
}
}
}
}
return nil, nil
}

View file

@ -0,0 +1,250 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/moreiters"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var BLoopAnalyzer = &analysis.Analyzer{
Name: "bloop",
Doc: analysisinternal.MustExtractDoc(doc, "bloop"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: bloop,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#bloop",
}
// bloop updates benchmarks that use "for range b.N", replacing it
// with go1.24's b.Loop() and eliminating any preceding
// b.{Start,Stop,Reset}Timer calls.
//
// Variants:
//
// for i := 0; i < b.N; i++ {} => for b.Loop() {}
// for range b.N {}
func bloop(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
if !typesinternal.Imports(pass.Pkg, "testing") {
return nil, nil
}
var (
inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
)
// edits computes the text edits for a matched for/range loop
// at the specified cursor. b is the *testing.B value, and
// (start, end) is the portion using b.N to delete.
edits := func(curLoop inspector.Cursor, b ast.Expr, start, end token.Pos) (edits []analysis.TextEdit) {
curFn, _ := enclosingFunc(curLoop)
// Within the same function, delete all calls to
// b.{Start,Stop,Timer} that precede the loop.
filter := []ast.Node{(*ast.ExprStmt)(nil), (*ast.FuncLit)(nil)}
curFn.Inspect(filter, func(cur inspector.Cursor) (descend bool) {
node := cur.Node()
if is[*ast.FuncLit](node) {
return false // don't descend into FuncLits (e.g. sub-benchmarks)
}
stmt := node.(*ast.ExprStmt)
if stmt.Pos() > start {
return false // not preceding: stop
}
if call, ok := stmt.X.(*ast.CallExpr); ok {
obj := typeutil.Callee(info, call)
if typesinternal.IsMethodNamed(obj, "testing", "B", "StopTimer", "StartTimer", "ResetTimer") {
// Delete call statement.
// TODO(adonovan): delete following newline, or
// up to start of next stmt? (May delete a comment.)
edits = append(edits, analysis.TextEdit{
Pos: stmt.Pos(),
End: stmt.End(),
})
}
}
return true
})
// Replace ...b.N... with b.Loop().
return append(edits, analysis.TextEdit{
Pos: start,
End: end,
NewText: fmt.Appendf(nil, "%s.Loop()", astutil.Format(pass.Fset, b)),
})
}
// Find all for/range statements.
loops := []ast.Node{
(*ast.ForStmt)(nil),
(*ast.RangeStmt)(nil),
}
for curFile := range filesUsing(inspect, info, "go1.24") {
for curLoop := range curFile.Preorder(loops...) {
switch n := curLoop.Node().(type) {
case *ast.ForStmt:
// for _; i < b.N; _ {}
if cmp, ok := n.Cond.(*ast.BinaryExpr); ok && cmp.Op == token.LSS {
if sel, ok := cmp.Y.(*ast.SelectorExpr); ok &&
sel.Sel.Name == "N" &&
typesinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") && usesBenchmarkNOnce(curLoop, info) {
delStart, delEnd := n.Cond.Pos(), n.Cond.End()
// Eliminate variable i if no longer needed:
// for i := 0; i < b.N; i++ {
// ...no references to i...
// }
body, _ := curLoop.LastChild()
if v := isIncrementLoop(info, n); v != nil &&
!uses(index, body, v) {
delStart, delEnd = n.Init.Pos(), n.Post.End()
}
pass.Report(analysis.Diagnostic{
// Highlight "i < b.N".
Pos: n.Cond.Pos(),
End: n.Cond.End(),
Message: "b.N can be modernized using b.Loop()",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace b.N with b.Loop()",
TextEdits: edits(curLoop, sel.X, delStart, delEnd),
}},
})
}
}
case *ast.RangeStmt:
// for range b.N {} -> for b.Loop() {}
//
// TODO(adonovan): handle "for i := range b.N".
if sel, ok := n.X.(*ast.SelectorExpr); ok &&
n.Key == nil &&
n.Value == nil &&
sel.Sel.Name == "N" &&
typesinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") && usesBenchmarkNOnce(curLoop, info) {
pass.Report(analysis.Diagnostic{
// Highlight "range b.N".
Pos: n.Range,
End: n.X.End(),
Message: "b.N can be modernized using b.Loop()",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace b.N with b.Loop()",
TextEdits: edits(curLoop, sel.X, n.Range, n.X.End()),
}},
})
}
}
}
}
return nil, nil
}
// uses reports whether the subtree cur contains a use of obj.
func uses(index *typeindex.Index, cur inspector.Cursor, obj types.Object) bool {
for use := range index.Uses(obj) {
if cur.Contains(use) {
return true
}
}
return false
}
// enclosingFunc returns the cursor for the innermost Func{Decl,Lit}
// that encloses c, if any.
func enclosingFunc(c inspector.Cursor) (inspector.Cursor, bool) {
return moreiters.First(c.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)))
}
// usesBenchmarkNOnce reports whether a b.N loop should be modernized to b.Loop().
// Only modernize loops that are:
// 1. Directly in a benchmark function (not in nested functions)
// - b.Loop() must be called in the same goroutine as the benchmark function
// - Function literals are often used with goroutines (go func(){...})
//
// 2. The only b.N loop in that benchmark function
// - b.Loop() can only be called once per benchmark execution
// - Multiple calls result in "B.Loop called with timer stopped" error
func usesBenchmarkNOnce(c inspector.Cursor, info *types.Info) bool {
// Find the enclosing benchmark function
curFunc, ok := enclosingFunc(c)
if !ok {
return false
}
// Check if this is actually a benchmark function
fdecl, ok := curFunc.Node().(*ast.FuncDecl)
if !ok {
return false // not in a function; or, inside a FuncLit
}
if !isBenchmarkFunc(fdecl) {
return false
}
// Count b.N references in this benchmark function
bnRefCount := 0
filter := []ast.Node{(*ast.SelectorExpr)(nil), (*ast.FuncLit)(nil)}
curFunc.Inspect(filter, func(cur inspector.Cursor) bool {
switch n := cur.Node().(type) {
case *ast.FuncLit:
return false // don't descend into nested function literals
case *ast.SelectorExpr:
if n.Sel.Name == "N" && typesinternal.IsPointerToNamed(info.TypeOf(n.X), "testing", "B") {
bnRefCount++
}
}
return true
})
// Only modernize if there's exactly one b.N reference
return bnRefCount == 1
}
// isBenchmarkFunc reports whether f is a benchmark function.
func isBenchmarkFunc(f *ast.FuncDecl) bool {
return f.Recv == nil &&
f.Name != nil &&
f.Name.IsExported() &&
strings.HasPrefix(f.Name.Name, "Benchmark") &&
f.Type.Params != nil &&
len(f.Type.Params.List) == 1
}
// isIncrementLoop reports whether loop has the form "for i := 0; ...; i++ { ... }",
// and if so, it returns the symbol for the index variable.
func isIncrementLoop(info *types.Info, loop *ast.ForStmt) *types.Var {
if assign, ok := loop.Init.(*ast.AssignStmt); ok &&
assign.Tok == token.DEFINE &&
len(assign.Rhs) == 1 &&
isZeroIntLiteral(info, assign.Rhs[0]) &&
is[*ast.IncDecStmt](loop.Post) &&
loop.Post.(*ast.IncDecStmt).Tok == token.INC &&
astutil.EqualSyntax(loop.Post.(*ast.IncDecStmt).X, assign.Lhs[0]) {
return info.Defs[assign.Lhs[0].(*ast.Ident)].(*types.Var)
}
return nil
}

View file

@ -0,0 +1,455 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package modernize provides a suite of analyzers that suggest
simplifications to Go code, using modern language and library
features.
Each diagnostic provides a fix. Our intent is that these fixes may
be safely applied en masse without changing the behavior of your
program. In some cases the suggested fixes are imperfect and may
lead to (for example) unused imports or unused local variables,
causing build breakage. However, these problems are generally
trivial to fix. We regard any modernizer whose fix changes program
behavior to have a serious bug and will endeavor to fix it.
To apply all modernization fixes en masse, you can use the
following command:
$ go run golang.org/x/tools/go/analysis/passes/modernize/cmd/modernize@latest -fix ./...
(Do not use "go get -tool" to add gopls as a dependency of your
module; gopls commands must be built from their release branch.)
If the tool warns of conflicting fixes, you may need to run it more
than once until it has applied all fixes cleanly. This command is
not an officially supported interface and may change in the future.
Changes produced by this tool should be reviewed as usual before
being merged. In some cases, a loop may be replaced by a simple
function call, causing comments within the loop to be discarded.
Human judgment may be required to avoid losing comments of value.
The modernize suite contains many analyzers. Diagnostics from some,
such as "any" (which replaces "interface{}" with "any" where it
is safe to do so), are particularly numerous. It may ease the burden of
code review to apply fixes in two steps, the first consisting only of
fixes from the "any" analyzer, the second consisting of all
other analyzers. This can be achieved using flags, as in this example:
$ modernize -any=true -fix ./...
$ modernize -any=false -fix ./...
# Analyzer appendclipped
appendclipped: simplify append chains using slices.Concat
The appendclipped analyzer suggests replacing chains of append calls with a
single call to slices.Concat, which was added in Go 1.21. For example,
append(append(s, s1...), s2...) would be simplified to slices.Concat(s, s1, s2).
In the simple case of appending to a newly allocated slice, such as
append([]T(nil), s...), the analyzer suggests the more concise slices.Clone(s).
For byte slices, it will prefer bytes.Clone if the "bytes" package is
already imported.
This fix is only applied when the base of the append tower is a
"clipped" slice, meaning its length and capacity are equal (e.g.
x[:0:0] or []T{}). This is to avoid changing program behavior by
eliminating intended side effects on the base slice's underlying
array.
This analyzer is currently disabled by default as the
transformation does not preserve the nilness of the base slice in
all cases; see https://go.dev/issue/73557.
# Analyzer bloop
bloop: replace for-range over b.N with b.Loop
The bloop analyzer suggests replacing benchmark loops of the form
`for i := 0; i < b.N; i++` or `for range b.N` with the more modern
`for b.Loop()`, which was added in Go 1.24.
This change makes benchmark code more readable and also removes the need for
manual timer control, so any preceding calls to b.StartTimer, b.StopTimer,
or b.ResetTimer within the same function will also be removed.
Caveats: The b.Loop() method is designed to prevent the compiler from
optimizing away the benchmark loop, which can occasionally result in
slower execution due to increased allocations in some specific cases.
# Analyzer any
any: replace interface{} with any
The any analyzer suggests replacing uses of the empty interface type,
`interface{}`, with the `any` alias, which was introduced in Go 1.18.
This is a purely stylistic change that makes code more readable.
# Analyzer errorsastype
errorsastype: replace errors.As with errors.AsType[T]
This analyzer suggests fixes to simplify uses of [errors.As] of
this form:
var myerr *MyErr
if errors.As(err, &myerr) {
handle(myerr)
}
by using the less error-prone generic [errors.AsType] function,
introduced in Go 1.26:
if myerr, ok := errors.AsType[*MyErr](err); ok {
handle(myerr)
}
The fix is only offered if the var declaration has the form shown and
there are no uses of myerr outside the if statement.
# Analyzer fmtappendf
fmtappendf: replace []byte(fmt.Sprintf) with fmt.Appendf
The fmtappendf analyzer suggests replacing `[]byte(fmt.Sprintf(...))` with
`fmt.Appendf(nil, ...)`. This avoids the intermediate allocation of a string
by Sprintf, making the code more efficient. The suggestion also applies to
fmt.Sprint and fmt.Sprintln.
# Analyzer forvar
forvar: remove redundant re-declaration of loop variables
The forvar analyzer removes unnecessary shadowing of loop variables.
Before Go 1.22, it was common to write `for _, x := range s { x := x ... }`
to create a fresh variable for each iteration. Go 1.22 changed the semantics
of `for` loops, making this pattern redundant. This analyzer removes the
unnecessary `x := x` statement.
This fix only applies to `range` loops.
# Analyzer mapsloop
mapsloop: replace explicit loops over maps with calls to maps package
The mapsloop analyzer replaces loops of the form
for k, v := range x { m[k] = v }
with a single call to a function from the `maps` package, added in Go 1.23.
Depending on the context, this could be `maps.Copy`, `maps.Insert`,
`maps.Clone`, or `maps.Collect`.
The transformation to `maps.Clone` is applied conservatively, as it
preserves the nilness of the source map, which may be a subtle change in
behavior if the original code did not handle a nil map in the same way.
# Analyzer minmax
minmax: replace if/else statements with calls to min or max
The minmax analyzer simplifies conditional assignments by suggesting the use
of the built-in `min` and `max` functions, introduced in Go 1.21. For example,
if a < b { x = a } else { x = b }
is replaced by
x = min(a, b).
This analyzer avoids making suggestions for floating-point types,
as the behavior of `min` and `max` with NaN values can differ from
the original if/else statement.
# Analyzer newexpr
newexpr: simplify code by using go1.26's new(expr)
This analyzer finds declarations of functions of this form:
func varOf(x int) *int { return &x }
and suggests a fix to turn them into inlinable wrappers around
go1.26's built-in new(expr) function:
func varOf(x int) *int { return new(x) }
In addition, this analyzer suggests a fix for each call
to one of the functions before it is transformed, so that
use(varOf(123))
is replaced by:
use(new(123))
(Wrapper functions such as varOf are common when working with Go
serialization packages such as for JSON or protobuf, where pointers
are often used to express optionality.)
# Analyzer omitzero
omitzero: suggest replacing omitempty with omitzero for struct fields
The omitzero analyzer identifies uses of the `omitempty` JSON struct tag on
fields that are themselves structs. The `omitempty` tag has no effect on
struct-typed fields. The analyzer offers two suggestions: either remove the
tag, or replace it with `omitzero` (added in Go 1.24), which correctly
omits the field if the struct value is zero.
Replacing `omitempty` with `omitzero` is a change in behavior. The
original code would always encode the struct field, whereas the
modified code will omit it if it is a zero-value.
# Analyzer plusbuild
plusbuild: remove obsolete //+build comments
The plusbuild analyzer suggests a fix to remove obsolete build tags
of the form:
//+build linux,amd64
in files that also contain a Go 1.18-style tag such as:
//go:build linux && amd64
(It does not check that the old and new tags are consistent;
that is the job of the 'buildtag' analyzer in the vet suite.)
# Analyzer rangeint
rangeint: replace 3-clause for loops with for-range over integers
The rangeint analyzer suggests replacing traditional for loops such
as
for i := 0; i < n; i++ { ... }
with the more idiomatic Go 1.22 style:
for i := range n { ... }
This transformation is applied only if (a) the loop variable is not
modified within the loop body and (b) the loop's limit expression
is not modified within the loop, as `for range` evaluates its
operand only once.
# Analyzer reflecttypefor
reflecttypefor: replace reflect.TypeOf(x) with TypeFor[T]()
This analyzer suggests fixes to replace uses of reflect.TypeOf(x) with
reflect.TypeFor, introduced in go1.22, when the desired runtime type
is known at compile time, for example:
reflect.TypeOf(uint32(0)) -> reflect.TypeFor[uint32]()
reflect.TypeOf((*ast.File)(nil)) -> reflect.TypeFor[*ast.File]()
It also offers a fix to simplify the construction below, which uses
reflect.TypeOf to return the runtime type for an interface type,
reflect.TypeOf((*io.Reader)(nil)).Elem()
to:
reflect.TypeFor[io.Reader]()
No fix is offered in cases when the runtime type is dynamic, such as:
var r io.Reader = ...
reflect.TypeOf(r)
or when the operand has potential side effects.
# Analyzer slicescontains
slicescontains: replace loops with slices.Contains or slices.ContainsFunc
The slicescontains analyzer simplifies loops that check for the existence of
an element in a slice. It replaces them with calls to `slices.Contains` or
`slices.ContainsFunc`, which were added in Go 1.21.
If the expression for the target element has side effects, this
transformation will cause those effects to occur only once, not
once per tested slice element.
# Analyzer slicesdelete
slicesdelete: replace append-based slice deletion with slices.Delete
The slicesdelete analyzer suggests replacing the idiom
s = append(s[:i], s[j:]...)
with the more explicit
s = slices.Delete(s, i, j)
introduced in Go 1.21.
This analyzer is disabled by default. The `slices.Delete` function
zeros the elements between the new length and the old length of the
slice to prevent memory leaks, which is a subtle difference in
behavior compared to the append-based idiom; see https://go.dev/issue/73686.
# Analyzer slicessort
slicessort: replace sort.Slice with slices.Sort for basic types
The slicessort analyzer simplifies sorting slices of basic ordered
types. It replaces
sort.Slice(s, func(i, j int) bool { return s[i] < s[j] })
with the simpler `slices.Sort(s)`, which was added in Go 1.21.
# Analyzer stditerators
stditerators: use iterators instead of Len/At-style APIs
This analyzer suggests a fix to replace each loop of the form:
for i := 0; i < x.Len(); i++ {
use(x.At(i))
}
or its "for elem := range x.Len()" equivalent by a range loop over an
iterator offered by the same data type:
for elem := range x.All() {
use(x.At(i)
}
where x is one of various well-known types in the standard library.
# Analyzer stringscutprefix
stringscutprefix: replace HasPrefix/TrimPrefix with CutPrefix
The stringscutprefix analyzer simplifies a common pattern where code first
checks for a prefix with `strings.HasPrefix` and then removes it with
`strings.TrimPrefix`. It replaces this two-step process with a single call
to `strings.CutPrefix`, introduced in Go 1.20. The analyzer also handles
the equivalent functions in the `bytes` package.
For example, this input:
if strings.HasPrefix(s, prefix) {
use(strings.TrimPrefix(s, prefix))
}
is fixed to:
if after, ok := strings.CutPrefix(s, prefix); ok {
use(after)
}
The analyzer also offers fixes to use CutSuffix in a similar way.
This input:
if strings.HasSuffix(s, suffix) {
use(strings.TrimSuffix(s, suffix))
}
is fixed to:
if before, ok := strings.CutSuffix(s, suffix); ok {
use(before)
}
# Analyzer stringsseq
stringsseq: replace ranging over Split/Fields with SplitSeq/FieldsSeq
The stringsseq analyzer improves the efficiency of iterating over substrings.
It replaces
for range strings.Split(...)
with the more efficient
for range strings.SplitSeq(...)
which was added in Go 1.24 and avoids allocating a slice for the
substrings. The analyzer also handles strings.Fields and the
equivalent functions in the bytes package.
# Analyzer stringsbuilder
stringsbuilder: replace += with strings.Builder
This analyzer replaces repeated string += string concatenation
operations with calls to Go 1.10's strings.Builder.
For example:
var s = "["
for x := range seq {
s += x
s += "."
}
s += "]"
use(s)
is replaced by:
var s strings.Builder
s.WriteString("[")
for x := range seq {
s.WriteString(x)
s.WriteString(".")
}
s.WriteString("]")
use(s.String())
This avoids quadratic memory allocation and improves performance.
The analyzer requires that all references to s except the final one
are += operations. To avoid warning about trivial cases, at least one
must appear within a loop. The variable s must be a local
variable, not a global or parameter.
The sole use of the finished string must be the last reference to the
variable s. (It may appear within an intervening loop or function literal,
since even s.String() is called repeatedly, it does not allocate memory.)
# Analyzer testingcontext
testingcontext: replace context.WithCancel with t.Context in tests
The testingcontext analyzer simplifies context management in tests. It
replaces the manual creation of a cancellable context,
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
with a single call to t.Context(), which was added in Go 1.24.
This change is only suggested if the `cancel` function is not used
for any other purpose.
# Analyzer waitgroup
waitgroup: replace wg.Add(1)/go/wg.Done() with wg.Go
The waitgroup analyzer simplifies goroutine management with `sync.WaitGroup`.
It replaces the common pattern
wg.Add(1)
go func() {
defer wg.Done()
...
}()
with a single call to
wg.Go(func(){ ... })
which was added in Go 1.25.
*/
package modernize

View file

@ -0,0 +1,243 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"go/ast"
"go/token"
"go/types"
"fmt"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/goplsexport"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var errorsastypeAnalyzer = &analysis.Analyzer{
Name: "errorsastype",
Doc: analysisinternal.MustExtractDoc(doc, "errorsastype"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#errorsastype",
Requires: []*analysis.Analyzer{generated.Analyzer, typeindexanalyzer.Analyzer},
Run: errorsastype,
}
func init() {
// Export to gopls until this is a published modernizer.
goplsexport.ErrorsAsTypeModernizer = errorsastypeAnalyzer
}
// errorsastype offers a fix to replace error.As with the newer
// errors.AsType[T] following this pattern:
//
// var myerr *MyErr
// if errors.As(err, &myerr) { ... }
//
// =>
//
// if myerr, ok := errors.AsType[*MyErr](err); ok { ... }
//
// (In principle several of these can then be chained using if/else,
// but we don't attempt that.)
//
// We offer the fix only within an if statement, but not within a
// switch case such as:
//
// var myerr *MyErr
// switch {
// case errors.As(err, &myerr):
// }
//
// because the transformation in that case would be ungainly.
//
// Note that the cmd/vet suite includes the "errorsas" analyzer, which
// detects actual mistakes in the use of errors.As. This logic does
// not belong in errorsas because the problems it fixes are merely
// stylistic.
//
// TODO(adonovan): support more cases:
//
// - Negative cases
// var myerr E
// if !errors.As(err, &myerr) { ... }
// =>
// myerr, ok := errors.AsType[E](err)
// if !ok { ... }
//
// - if myerr := new(E); errors.As(err, myerr); { ... }
//
// - if errors.As(err, myerr) && othercond { ... }
func errorsastype(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
)
for curCall := range index.Calls(index.Object("errors", "As")) {
call := curCall.Node().(*ast.CallExpr)
if len(call.Args) < 2 {
continue // spread call: errors.As(pair())
}
v, curDeclStmt := canUseErrorsAsType(info, index, curCall)
if v == nil {
continue
}
file := astutil.EnclosingFile(curDeclStmt)
if !fileUses(info, file, "go1.26") {
continue // errors.AsType is too new
}
// Locate identifier "As" in errors.As.
var asIdent *ast.Ident
switch n := ast.Unparen(call.Fun).(type) {
case *ast.Ident:
asIdent = n // "errors" was dot-imported
case *ast.SelectorExpr:
asIdent = n.Sel
default:
panic("no Ident for errors.As")
}
// Format the type as valid Go syntax.
// TODO(adonovan): fix: FileQualifier needs to respect
// visibility at the current point, and either fail
// or edit the imports as needed.
// TODO(adonovan): fix: TypeString is not a sound way
// to print types as Go syntax as it does not respect
// symbol visibility, etc. We need something loosely
// integrated with FileQualifier that accumulates
// import edits, and may fail (e.g. for unexported
// type or field names from other packages).
// See https://go.dev/issues/75604.
qual := typesinternal.FileQualifier(file, pass.Pkg)
errtype := types.TypeString(v.Type(), qual)
// Choose a name for the "ok" variable.
okName := "ok"
if okVar := lookup(info, curCall, "ok"); okVar != nil {
// The name 'ok' is already declared, but
// don't choose a fresh name unless okVar
// is also used within the if-statement.
curIf := curCall.Parent()
for curUse := range index.Uses(okVar) {
if curIf.Contains(curUse) {
scope := info.Scopes[curIf.Node().(*ast.IfStmt)]
okName = refactor.FreshName(scope, v.Pos(), "ok")
break
}
}
}
pass.Report(analysis.Diagnostic{
Pos: call.Fun.Pos(),
End: call.Fun.End(),
Message: fmt.Sprintf("errors.As can be simplified using AsType[%s]", errtype),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Replace errors.As with AsType[%s]", errtype),
TextEdits: append(
// delete "var myerr *MyErr"
refactor.DeleteStmt(pass.Fset.File(call.Fun.Pos()), curDeclStmt),
// if errors.As (err, &myerr) { ... }
// ------------- -------------- -------- ----
// if myerr, ok := errors.AsType[*MyErr](err ); ok { ... }
analysis.TextEdit{
// insert "myerr, ok := "
Pos: call.Pos(),
End: call.Pos(),
NewText: fmt.Appendf(nil, "%s, %s := ", v.Name(), okName),
},
analysis.TextEdit{
// replace As with AsType[T]
Pos: asIdent.Pos(),
End: asIdent.End(),
NewText: fmt.Appendf(nil, "AsType[%s]", errtype),
},
analysis.TextEdit{
// delete ", &myerr"
Pos: call.Args[0].End(),
End: call.Args[1].End(),
},
analysis.TextEdit{
// insert "; ok"
Pos: call.End(),
End: call.End(),
NewText: fmt.Appendf(nil, "; %s", okName),
},
),
}},
})
}
return nil, nil
}
// canUseErrorsAsType reports whether curCall is a call to
// errors.As beneath an if statement, preceded by a
// declaration of the typed error var. The var must not be
// used outside the if statement.
func canUseErrorsAsType(info *types.Info, index *typeindex.Index, curCall inspector.Cursor) (_ *types.Var, _ inspector.Cursor) {
if !astutil.IsChildOf(curCall, edge.IfStmt_Cond) {
return // not beneath if statement
}
var (
curIfStmt = curCall.Parent()
ifStmt = curIfStmt.Node().(*ast.IfStmt)
)
if ifStmt.Init != nil {
return // if statement already has an init part
}
unary, ok := curCall.Node().(*ast.CallExpr).Args[1].(*ast.UnaryExpr)
if !ok || unary.Op != token.AND {
return // 2nd arg is not &var
}
id, ok := unary.X.(*ast.Ident)
if !ok {
return // not a simple ident (local var)
}
v := info.Uses[id].(*types.Var)
curDef, ok := index.Def(v)
if !ok {
return // var is not local (e.g. dot-imported)
}
// Have: if errors.As(err, &v) { ... }
// Reject if v is used outside (before or after) the
// IfStmt, since that will become its new scope.
for curUse := range index.Uses(v) {
if !curIfStmt.Contains(curUse) {
return // v used before/after if statement
}
}
if !astutil.IsChildOf(curDef, edge.ValueSpec_Names) {
return // v not declared by "var v T"
}
var (
curSpec = curDef.Parent() // ValueSpec
curDecl = curSpec.Parent() // GenDecl
spec = curSpec.Node().(*ast.ValueSpec)
)
if len(spec.Names) != 1 || len(spec.Values) != 0 ||
len(curDecl.Node().(*ast.GenDecl).Specs) != 1 {
return // not a simple "var v T" decl
}
// Have:
// var v *MyErr
// ...
// if errors.As(err, &v) { ... }
// with no uses of v outside the IfStmt.
return v, curDecl.Parent() // DeclStmt
}

View file

@ -0,0 +1,115 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/types"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var FmtAppendfAnalyzer = &analysis.Analyzer{
Name: "fmtappendf",
Doc: analysisinternal.MustExtractDoc(doc, "fmtappendf"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: fmtappendf,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#fmtappendf",
}
// The fmtappend function replaces []byte(fmt.Sprintf(...)) by
// fmt.Appendf(nil, ...), and similarly for Sprint, Sprintln.
func fmtappendf(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
for _, fn := range []types.Object{
index.Object("fmt", "Sprintf"),
index.Object("fmt", "Sprintln"),
index.Object("fmt", "Sprint"),
} {
for curCall := range index.Calls(fn) {
call := curCall.Node().(*ast.CallExpr)
if ek, idx := curCall.ParentEdge(); ek == edge.CallExpr_Args && idx == 0 {
// Is parent a T(fmt.SprintX(...)) conversion?
conv := curCall.Parent().Node().(*ast.CallExpr)
tv := pass.TypesInfo.Types[conv.Fun]
if tv.IsType() && types.Identical(tv.Type, byteSliceType) &&
fileUses(pass.TypesInfo, astutil.EnclosingFile(curCall), "go1.19") {
// Have: []byte(fmt.SprintX(...))
// Find "Sprint" identifier.
var id *ast.Ident
switch e := ast.Unparen(call.Fun).(type) {
case *ast.SelectorExpr:
id = e.Sel // "fmt.Sprint"
case *ast.Ident:
id = e // "Sprint" after `import . "fmt"`
}
old, new := fn.Name(), strings.Replace(fn.Name(), "Sprint", "Append", 1)
edits := []analysis.TextEdit{
{
// delete "[]byte("
Pos: conv.Pos(),
End: conv.Lparen + 1,
},
{
// remove ")"
Pos: conv.Rparen,
End: conv.Rparen + 1,
},
{
Pos: id.Pos(),
End: id.End(),
NewText: []byte(new),
},
{
Pos: call.Lparen + 1,
NewText: []byte("nil, "),
},
}
if len(conv.Args) == 1 {
arg := conv.Args[0]
// Determine if we have T(fmt.SprintX(...)<non-args,
// like a space or a comma>). If so, delete the non-args
// that come before the right parenthesis. Leaving an
// extra comma here produces invalid code. (See
// golang/go#74709)
if arg.End() < conv.Rparen {
edits = append(edits, analysis.TextEdit{
Pos: arg.End(),
End: conv.Rparen,
})
}
}
pass.Report(analysis.Diagnostic{
Pos: conv.Pos(),
End: conv.End(),
Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new),
TextEdits: edits,
}},
})
}
}
}
}
return nil, nil
}

View file

@ -0,0 +1,94 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"go/ast"
"go/token"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
)
var ForVarAnalyzer = &analysis.Analyzer{
Name: "forvar",
Doc: analysisinternal.MustExtractDoc(doc, "forvar"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
},
Run: forvar,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#forvar",
}
// forvar offers to fix unnecessary copying of a for variable
//
// for _, x := range foo {
// x := x // offer to remove this superfluous assignment
// }
//
// Prerequisites:
// First statement in a range loop has to be <ident> := <ident>
// where the two idents are the same,
// and the ident is defined (:=) as a variable in the for statement.
// (Note that this 'fix' does not work for three clause loops
// because the Go specification says "The variable used by each subsequent iteration
// is declared implicitly before executing the post statement and initialized to the
// value of the previous iteration's variable at that moment.")
func forvar(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.22") {
for curLoop := range curFile.Preorder((*ast.RangeStmt)(nil)) {
loop := curLoop.Node().(*ast.RangeStmt)
if loop.Tok != token.DEFINE {
continue
}
isLoopVarRedecl := func(assign *ast.AssignStmt) bool {
for i, lhs := range assign.Lhs {
if !(astutil.EqualSyntax(lhs, assign.Rhs[i]) &&
(astutil.EqualSyntax(lhs, loop.Key) || astutil.EqualSyntax(lhs, loop.Value))) {
return false
}
}
return true
}
// Have: for k, v := range x { stmts }
//
// Delete the prefix of stmts that are
// of the form k := k; v := v; k, v := k, v; v, k := v, k.
for _, stmt := range loop.Body.List {
if assign, ok := stmt.(*ast.AssignStmt); ok &&
assign.Tok == token.DEFINE &&
len(assign.Lhs) == len(assign.Rhs) &&
isLoopVarRedecl(assign) {
curStmt, _ := curLoop.FindNode(stmt)
edits := refactor.DeleteStmt(pass.Fset.File(stmt.Pos()), curStmt)
if len(edits) > 0 {
pass.Report(analysis.Diagnostic{
Pos: stmt.Pos(),
End: stmt.End(),
Message: "copying variable is unneeded",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Remove unneeded redeclaration",
TextEdits: edits,
}},
})
}
} else {
break // stop at first other statement
}
}
}
}
return nil, nil
}

View file

@ -0,0 +1,280 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
// This file defines modernizers that use the "maps" package.
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal"
)
var MapsLoopAnalyzer = &analysis.Analyzer{
Name: "mapsloop",
Doc: analysisinternal.MustExtractDoc(doc, "mapsloop"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
},
Run: mapsloop,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#mapsloop",
}
// The mapsloop pass offers to simplify a loop of map insertions:
//
// for k, v := range x {
// m[k] = v
// }
//
// by a call to go1.23's maps package. There are four variants, the
// product of two axes: whether the source x is a map or an iter.Seq2,
// and whether the destination m is a newly created map:
//
// maps.Copy(m, x) (x is map)
// maps.Insert(m, x) (x is iter.Seq2)
// m = maps.Clone(x) (x is a non-nil map, m is a new map)
// m = maps.Collect(x) (x is iter.Seq2, m is a new map)
//
// A map is newly created if the preceding statement has one of these
// forms, where M is a map type:
//
// m = make(M)
// m = M{}
func mapsloop(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "maps", "bytes", "runtime") {
return nil, nil
}
info := pass.TypesInfo
// check is called for each statement of this form:
// for k, v := range x { m[k] = v }
check := func(file *ast.File, curRange inspector.Cursor, assign *ast.AssignStmt, m, x ast.Expr) {
// Is x a map or iter.Seq2?
tx := types.Unalias(info.TypeOf(x))
var xmap bool
switch typeparams.CoreType(tx).(type) {
case *types.Map:
xmap = true
case *types.Signature:
k, v, ok := assignableToIterSeq2(tx)
if !ok {
return // a named isomer of Seq2
}
xmap = false
// Record in tx the unnamed map[K]V type
// derived from the yield function.
// This is the type of maps.Collect(x).
tx = types.NewMap(k, v)
default:
return // e.g. slice, channel (or no core type!)
}
// Is the preceding statement of the form
// m = make(M) or M{}
// and can we replace its RHS with slices.{Clone,Collect}?
//
// Beware: if x may be nil, we cannot use Clone as it preserves nilness.
var mrhs ast.Expr // make(M) or M{}, or nil
if curPrev, ok := curRange.PrevSibling(); ok {
if assign, ok := curPrev.Node().(*ast.AssignStmt); ok &&
len(assign.Lhs) == 1 &&
len(assign.Rhs) == 1 &&
astutil.EqualSyntax(assign.Lhs[0], m) {
// Have: m = rhs; for k, v := range x { m[k] = v }
var newMap bool
rhs := assign.Rhs[0]
switch rhs := ast.Unparen(rhs).(type) {
case *ast.CallExpr:
if id, ok := ast.Unparen(rhs.Fun).(*ast.Ident); ok &&
info.Uses[id] == builtinMake {
// Have: m = make(...)
newMap = true
}
case *ast.CompositeLit:
if len(rhs.Elts) == 0 {
// Have m = M{}
newMap = true
}
}
// Take care not to change type of m's RHS expression.
if newMap {
trhs := info.TypeOf(rhs)
// Inv: tx is the type of maps.F(x)
// - maps.Clone(x) has the same type as x.
// - maps.Collect(x) returns an unnamed map type.
if assign.Tok == token.DEFINE {
// DEFINE (:=): we must not
// change the type of RHS.
if types.Identical(tx, trhs) {
mrhs = rhs
}
} else {
// ASSIGN (=): the types of LHS
// and RHS may differ in namedness.
if types.AssignableTo(tx, trhs) {
mrhs = rhs
}
}
// Temporarily disable the transformation to the
// (nil-preserving) maps.Clone until we can prove
// that x is non-nil. This is rarely possible,
// and may require control flow analysis
// (e.g. a dominating "if len(x)" check).
// See #71844.
if xmap {
mrhs = nil
}
}
}
}
// Choose function.
var funcName string
if mrhs != nil {
funcName = cond(xmap, "Clone", "Collect")
} else {
funcName = cond(xmap, "Copy", "Insert")
}
// Report diagnostic, and suggest fix.
rng := curRange.Node()
prefix, importEdits := refactor.AddImport(info, file, "maps", "maps", funcName, rng.Pos())
var (
newText []byte
start, end token.Pos
)
if mrhs != nil {
// Replace assignment and loop with expression.
//
// m = make(...)
// for k, v := range x { /* comments */ m[k] = v }
//
// ->
//
// /* comments */
// m = maps.Copy(x)
curPrev, _ := curRange.PrevSibling()
start, end = curPrev.Node().Pos(), rng.End()
newText = fmt.Appendf(nil, "%s%s = %s%s(%s)",
allComments(file, start, end),
astutil.Format(pass.Fset, m),
prefix,
funcName,
astutil.Format(pass.Fset, x))
} else {
// Replace loop with call statement.
//
// for k, v := range x { /* comments */ m[k] = v }
//
// ->
//
// /* comments */
// maps.Copy(m, x)
start, end = rng.Pos(), rng.End()
newText = fmt.Appendf(nil, "%s%s%s(%s, %s)",
allComments(file, start, end),
prefix,
funcName,
astutil.Format(pass.Fset, m),
astutil.Format(pass.Fset, x))
}
pass.Report(analysis.Diagnostic{
Pos: assign.Lhs[0].Pos(),
End: assign.Lhs[0].End(),
Message: "Replace m[k]=v loop with maps." + funcName,
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace m[k]=v loop with maps." + funcName,
TextEdits: append(importEdits, []analysis.TextEdit{{
Pos: start,
End: end,
NewText: newText,
}}...),
}},
})
}
// Find all range loops around m[k] = v.
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.23") {
file := curFile.Node().(*ast.File)
for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
rng := curRange.Node().(*ast.RangeStmt)
if rng.Tok == token.DEFINE &&
rng.Key != nil &&
rng.Value != nil &&
isAssignBlock(rng.Body) {
// Have: for k, v := range x { lhs = rhs }
assign := rng.Body.List[0].(*ast.AssignStmt)
if index, ok := assign.Lhs[0].(*ast.IndexExpr); ok &&
astutil.EqualSyntax(rng.Key, index.Index) &&
astutil.EqualSyntax(rng.Value, assign.Rhs[0]) &&
is[*types.Map](typeparams.CoreType(info.TypeOf(index.X))) &&
types.Identical(info.TypeOf(index), info.TypeOf(rng.Value)) { // m[k], v
// Have: for k, v := range x { m[k] = v }
// where there is no implicit conversion.
check(file, curRange, assign, index.X, rng.X)
}
}
}
}
return nil, nil
}
// assignableToIterSeq2 reports whether t is assignable to
// iter.Seq[K, V] and returns K and V if so.
func assignableToIterSeq2(t types.Type) (k, v types.Type, ok bool) {
// The only named type assignable to iter.Seq2 is iter.Seq2.
if is[*types.Named](t) {
if !typesinternal.IsTypeNamed(t, "iter", "Seq2") {
return
}
t = t.Underlying()
}
if t, ok := t.(*types.Signature); ok {
// func(yield func(K, V) bool)?
if t.Params().Len() == 1 && t.Results().Len() == 0 {
if yield, ok := t.Params().At(0).Type().(*types.Signature); ok { // sic, no Underlying/CoreType
if yield.Params().Len() == 2 &&
yield.Results().Len() == 1 &&
types.Identical(yield.Results().At(0).Type(), builtinBool.Type()) {
return yield.Params().At(0).Type(), yield.Params().At(1).Type(), true
}
}
}
}
return
}

View file

@ -0,0 +1,440 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var MinMaxAnalyzer = &analysis.Analyzer{
Name: "minmax",
Doc: analysisinternal.MustExtractDoc(doc, "minmax"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: minmax,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#minmax",
}
// The minmax pass replaces if/else statements with calls to min or max,
// and removes user-defined min/max functions that are equivalent to built-ins.
//
// If/else replacement patterns:
//
// 1. if a < b { x = a } else { x = b } => x = min(a, b)
// 2. x = a; if a < b { x = b } => x = max(a, b)
//
// Pattern 1 requires that a is not NaN, and pattern 2 requires that b
// is not Nan. Since this is hard to prove, we reject floating-point
// numbers.
//
// Function removal:
// User-defined min/max functions are suggested for removal if they may
// be safely replaced by their built-in namesake.
//
// Variants:
// - all four ordered comparisons
// - "x := a" or "x = a" or "var x = a" in pattern 2
// - "x < b" or "a < b" in pattern 2
func minmax(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
// Check for user-defined min/max functions that can be removed
checkUserDefinedMinMax(pass)
// check is called for all statements of this form:
// if a < b { lhs = rhs }
check := func(file *ast.File, curIfStmt inspector.Cursor, compare *ast.BinaryExpr) {
var (
ifStmt = curIfStmt.Node().(*ast.IfStmt)
tassign = ifStmt.Body.List[0].(*ast.AssignStmt)
a = compare.X
b = compare.Y
lhs = tassign.Lhs[0]
rhs = tassign.Rhs[0]
sign = isInequality(compare.Op)
// callArg formats a call argument, preserving comments from [start-end).
callArg = func(arg ast.Expr, start, end token.Pos) string {
comments := allComments(file, start, end)
return cond(arg == b, ", ", "") + // second argument needs a comma
cond(comments != "", "\n", "") + // comments need their own line
comments +
astutil.Format(pass.Fset, arg)
}
)
if fblock, ok := ifStmt.Else.(*ast.BlockStmt); ok && isAssignBlock(fblock) {
fassign := fblock.List[0].(*ast.AssignStmt)
// Have: if a < b { lhs = rhs } else { lhs2 = rhs2 }
lhs2 := fassign.Lhs[0]
rhs2 := fassign.Rhs[0]
// For pattern 1, check that:
// - lhs = lhs2
// - {rhs,rhs2} = {a,b}
if astutil.EqualSyntax(lhs, lhs2) {
if astutil.EqualSyntax(rhs, a) && astutil.EqualSyntax(rhs2, b) {
sign = +sign
} else if astutil.EqualSyntax(rhs2, a) && astutil.EqualSyntax(rhs, b) {
sign = -sign
} else {
return
}
sym := cond(sign < 0, "min", "max")
if !is[*types.Builtin](lookup(pass.TypesInfo, curIfStmt, sym)) {
return // min/max function is shadowed
}
// pattern 1
//
// TODO(adonovan): if lhs is declared "var lhs T" on preceding line,
// simplify the whole thing to "lhs := min(a, b)".
pass.Report(analysis.Diagnostic{
// Highlight the condition a < b.
Pos: compare.Pos(),
End: compare.End(),
Message: fmt.Sprintf("if/else statement can be modernized using %s", sym),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Replace if statement with %s", sym),
TextEdits: []analysis.TextEdit{{
// Replace IfStmt with lhs = min(a, b).
Pos: ifStmt.Pos(),
End: ifStmt.End(),
NewText: fmt.Appendf(nil, "%s = %s(%s%s)",
astutil.Format(pass.Fset, lhs),
sym,
callArg(a, ifStmt.Pos(), ifStmt.Else.Pos()),
callArg(b, ifStmt.Else.Pos(), ifStmt.End()),
),
}},
}},
})
}
} else if prev, ok := curIfStmt.PrevSibling(); ok && isSimpleAssign(prev.Node()) && ifStmt.Else == nil {
fassign := prev.Node().(*ast.AssignStmt)
// Have: lhs0 = rhs0; if a < b { lhs = rhs }
//
// For pattern 2, check that
// - lhs = lhs0
// - {a,b} = {rhs,rhs0} or {rhs,lhs0}
// The replacement must use rhs0 not lhs0 though.
// For example, we accept this variant:
// lhs = x; if lhs < y { lhs = y } => lhs = min(x, y), not min(lhs, y)
//
// TODO(adonovan): accept "var lhs0 = rhs0" form too.
lhs0 := fassign.Lhs[0]
rhs0 := fassign.Rhs[0]
if astutil.EqualSyntax(lhs, lhs0) {
if astutil.EqualSyntax(rhs, a) && (astutil.EqualSyntax(rhs0, b) || astutil.EqualSyntax(lhs0, b)) {
sign = +sign
} else if (astutil.EqualSyntax(rhs0, a) || astutil.EqualSyntax(lhs0, a)) && astutil.EqualSyntax(rhs, b) {
sign = -sign
} else {
return
}
sym := cond(sign < 0, "min", "max")
if !is[*types.Builtin](lookup(pass.TypesInfo, curIfStmt, sym)) {
return // min/max function is shadowed
}
// Permit lhs0 to stand for rhs0 in the matching,
// but don't actually reduce to lhs0 = min(lhs0, rhs)
// since the "=" could be a ":=". Use min(rhs0, rhs).
if astutil.EqualSyntax(lhs0, a) {
a = rhs0
} else if astutil.EqualSyntax(lhs0, b) {
b = rhs0
}
// pattern 2
pass.Report(analysis.Diagnostic{
// Highlight the condition a < b.
Pos: compare.Pos(),
End: compare.End(),
Message: fmt.Sprintf("if statement can be modernized using %s", sym),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Replace if/else with %s", sym),
TextEdits: []analysis.TextEdit{{
Pos: fassign.Pos(),
End: ifStmt.End(),
// Replace "x := a; if ... {}" with "x = min(...)", preserving comments.
NewText: fmt.Appendf(nil, "%s %s %s(%s%s)",
astutil.Format(pass.Fset, lhs),
fassign.Tok.String(),
sym,
callArg(a, fassign.Pos(), ifStmt.Pos()),
callArg(b, ifStmt.Pos(), ifStmt.End()),
),
}},
}},
})
}
}
}
// Find all "if a < b { lhs = rhs }" statements.
info := pass.TypesInfo
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
for curFile := range filesUsing(inspect, info, "go1.21") {
astFile := curFile.Node().(*ast.File)
for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) {
ifStmt := curIfStmt.Node().(*ast.IfStmt)
// Don't bother handling "if a < b { lhs = rhs }" when it appears
// as the "else" branch of another if-statement.
// if cond { ... } else if a < b { lhs = rhs }
// (This case would require introducing another block
// if cond { ... } else { if a < b { lhs = rhs } }
// and checking that there is no following "else".)
if astutil.IsChildOf(curIfStmt, edge.IfStmt_Else) {
continue
}
if compare, ok := ifStmt.Cond.(*ast.BinaryExpr); ok &&
ifStmt.Init == nil &&
isInequality(compare.Op) != 0 &&
isAssignBlock(ifStmt.Body) {
// a blank var has no type.
if tLHS := info.TypeOf(ifStmt.Body.List[0].(*ast.AssignStmt).Lhs[0]); tLHS != nil && !maybeNaN(tLHS) {
// Have: if a < b { lhs = rhs }
check(astFile, curIfStmt, compare)
}
}
}
}
return nil, nil
}
// allComments collects all the comments from start to end.
func allComments(file *ast.File, start, end token.Pos) string {
var buf strings.Builder
for co := range astutil.Comments(file, start, end) {
_, _ = fmt.Fprintf(&buf, "%s\n", co.Text)
}
return buf.String()
}
// isInequality reports non-zero if tok is one of < <= => >:
// +1 for > and -1 for <.
func isInequality(tok token.Token) int {
switch tok {
case token.LEQ, token.LSS:
return -1
case token.GEQ, token.GTR:
return +1
}
return 0
}
// isAssignBlock reports whether b is a block of the form { lhs = rhs }.
func isAssignBlock(b *ast.BlockStmt) bool {
if len(b.List) != 1 {
return false
}
// Inv: the sole statement cannot be { lhs := rhs }.
return isSimpleAssign(b.List[0])
}
// isSimpleAssign reports whether n has the form "lhs = rhs" or "lhs := rhs".
func isSimpleAssign(n ast.Node) bool {
assign, ok := n.(*ast.AssignStmt)
return ok &&
(assign.Tok == token.ASSIGN || assign.Tok == token.DEFINE) &&
len(assign.Lhs) == 1 &&
len(assign.Rhs) == 1
}
// maybeNaN reports whether t is (or may be) a floating-point type.
func maybeNaN(t types.Type) bool {
// For now, we rely on core types.
// TODO(adonovan): In the post-core-types future,
// follow the approach of types.Checker.applyTypeFunc.
t = typeparams.CoreType(t)
if t == nil {
return true // fail safe
}
if basic, ok := t.(*types.Basic); ok && basic.Info()&types.IsFloat != 0 {
return true
}
return false
}
// checkUserDefinedMinMax looks for user-defined min/max functions that are
// equivalent to the built-in functions and suggests removing them.
func checkUserDefinedMinMax(pass *analysis.Pass) {
index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
// Look up min and max functions by name in package scope
for _, funcName := range []string{"min", "max"} {
if fn, ok := pass.Pkg.Scope().Lookup(funcName).(*types.Func); ok {
// Use typeindex to get the FuncDecl directly
if def, ok := index.Def(fn); ok {
decl := def.Parent().Node().(*ast.FuncDecl)
// Check if this function matches the built-in min/max signature and behavior
if canUseBuiltinMinMax(fn, decl.Body) {
// Expand to include leading doc comment
pos := decl.Pos()
if docs := astutil.DocComment(decl); docs != nil {
pos = docs.Pos()
}
pass.Report(analysis.Diagnostic{
Pos: decl.Pos(),
End: decl.End(),
Message: fmt.Sprintf("user-defined %s function is equivalent to built-in %s and can be removed", funcName, funcName),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Remove user-defined %s function", funcName),
TextEdits: []analysis.TextEdit{{
Pos: pos,
End: decl.End(),
}},
}},
})
}
}
}
}
}
// canUseBuiltinMinMax reports whether it is safe to replace a call
// to this min or max function by its built-in namesake.
func canUseBuiltinMinMax(fn *types.Func, body *ast.BlockStmt) bool {
sig := fn.Type().(*types.Signature)
// Only consider the most common case: exactly 2 parameters
if sig.Params().Len() != 2 {
return false
}
// Check if any parameter might be floating-point
for param := range sig.Params().Variables() {
if maybeNaN(param.Type()) {
return false // Don't suggest removal for float types due to NaN handling
}
}
// Must have exactly one return value
if sig.Results().Len() != 1 {
return false
}
// Check that the function body implements the expected min/max logic
if body == nil {
return false
}
return hasMinMaxLogic(body, fn.Name())
}
// hasMinMaxLogic checks if the function body implements simple min/max logic.
func hasMinMaxLogic(body *ast.BlockStmt, funcName string) bool {
// Pattern 1: Single if/else statement
if len(body.List) == 1 {
if ifStmt, ok := body.List[0].(*ast.IfStmt); ok {
// Get the "false" result from the else block
if elseBlock, ok := ifStmt.Else.(*ast.BlockStmt); ok && len(elseBlock.List) == 1 {
if elseRet, ok := elseBlock.List[0].(*ast.ReturnStmt); ok && len(elseRet.Results) == 1 {
return checkMinMaxPattern(ifStmt, elseRet.Results[0], funcName)
}
}
}
}
// Pattern 2: if statement followed by return
if len(body.List) == 2 {
if ifStmt, ok := body.List[0].(*ast.IfStmt); ok && ifStmt.Else == nil {
if retStmt, ok := body.List[1].(*ast.ReturnStmt); ok && len(retStmt.Results) == 1 {
return checkMinMaxPattern(ifStmt, retStmt.Results[0], funcName)
}
}
}
return false
}
// checkMinMaxPattern checks if an if statement implements min/max logic.
// ifStmt: the if statement to check
// falseResult: the expression returned when the condition is false
// funcName: "min" or "max"
func checkMinMaxPattern(ifStmt *ast.IfStmt, falseResult ast.Expr, funcName string) bool {
// Must have condition with comparison
cmp, ok := ifStmt.Cond.(*ast.BinaryExpr)
if !ok {
return false
}
// Check if then branch returns one of the compared values
if len(ifStmt.Body.List) != 1 {
return false
}
thenRet, ok := ifStmt.Body.List[0].(*ast.ReturnStmt)
if !ok || len(thenRet.Results) != 1 {
return false
}
// Use the same logic as the existing minmax analyzer
sign := isInequality(cmp.Op)
if sign == 0 {
return false // Not a comparison operator
}
t := thenRet.Results[0] // "true" result
f := falseResult // "false" result
x := cmp.X // left operand
y := cmp.Y // right operand
// Check operand order and adjust sign accordingly
if astutil.EqualSyntax(t, x) && astutil.EqualSyntax(f, y) {
sign = +sign
} else if astutil.EqualSyntax(t, y) && astutil.EqualSyntax(f, x) {
sign = -sign
} else {
return false
}
// Check if the sign matches the function name
return cond(sign < 0, "min", "max") == funcName
}
// -- utils --
func is[T any](x any) bool {
_, ok := x.(T)
return ok
}
func cond[T any](cond bool, t, f T) T {
if cond {
return t
} else {
return f
}
}

View file

@ -0,0 +1,162 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
_ "embed"
"go/ast"
"go/constant"
"go/format"
"go/token"
"go/types"
"iter"
"regexp"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal/generated"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/moreiters"
"golang.org/x/tools/internal/packagepath"
"golang.org/x/tools/internal/stdlib"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/versions"
)
//go:embed doc.go
var doc string
// Suite lists all modernize analyzers.
var Suite = []*analysis.Analyzer{
AnyAnalyzer,
// AppendClippedAnalyzer, // not nil-preserving!
BLoopAnalyzer,
FmtAppendfAnalyzer,
ForVarAnalyzer,
MapsLoopAnalyzer,
MinMaxAnalyzer,
NewExprAnalyzer,
OmitZeroAnalyzer,
plusBuildAnalyzer,
RangeIntAnalyzer,
ReflectTypeForAnalyzer,
SlicesContainsAnalyzer,
// SlicesDeleteAnalyzer, // not nil-preserving!
SlicesSortAnalyzer,
stditeratorsAnalyzer,
StringsCutPrefixAnalyzer,
StringsSeqAnalyzer,
StringsBuilderAnalyzer,
TestingContextAnalyzer,
WaitGroupAnalyzer,
}
// -- helpers --
// skipGenerated decorates pass.Report to suppress diagnostics in generated files.
func skipGenerated(pass *analysis.Pass) {
report := pass.Report
pass.Report = func(diag analysis.Diagnostic) {
generated := pass.ResultOf[generated.Analyzer].(*generated.Result)
if generated.IsGenerated(diag.Pos) {
return // skip
}
report(diag)
}
}
// formatExprs formats a comma-separated list of expressions.
func formatExprs(fset *token.FileSet, exprs []ast.Expr) string {
var buf strings.Builder
for i, e := range exprs {
if i > 0 {
buf.WriteString(", ")
}
format.Node(&buf, fset, e) // ignore errors
}
return buf.String()
}
// isZeroIntLiteral reports whether e is an integer whose value is 0.
func isZeroIntLiteral(info *types.Info, e ast.Expr) bool {
return isIntLiteral(info, e, 0)
}
// isIntLiteral reports whether e is an integer with given value.
func isIntLiteral(info *types.Info, e ast.Expr, n int64) bool {
return info.Types[e].Value == constant.MakeInt64(n)
}
// filesUsing returns a cursor for each *ast.File in the inspector
// that uses at least the specified version of Go (e.g. "go1.24").
//
// TODO(adonovan): opt: eliminate this function, instead following the
// approach of [fmtappendf], which uses typeindex and [fileUses].
// See "Tip" at [fileUses] for motivation.
func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) iter.Seq[inspector.Cursor] {
return func(yield func(inspector.Cursor) bool) {
for curFile := range inspect.Root().Children() {
file := curFile.Node().(*ast.File)
if !versions.Before(info.FileVersions[file], version) && !yield(curFile) {
break
}
}
}
}
// fileUses reports whether the specified file uses at least the
// specified version of Go (e.g. "go1.24").
//
// Tip: we recommend using this check "late", just before calling
// pass.Report, rather than "early" (when entering each ast.File, or
// each candidate node of interest, during the traversal), because the
// operation is not free, yet is not a highly selective filter: the
// fraction of files that pass most version checks is high and
// increases over time.
func fileUses(info *types.Info, file *ast.File, version string) bool {
return !versions.Before(info.FileVersions[file], version)
}
// within reports whether the current pass is analyzing one of the
// specified standard packages or their dependencies.
func within(pass *analysis.Pass, pkgs ...string) bool {
path := pass.Pkg.Path()
return packagepath.IsStdPackage(path) &&
moreiters.Contains(stdlib.Dependencies(pkgs...), path)
}
// unparenEnclosing removes enclosing parens from cur in
// preparation for a call to [Cursor.ParentEdge].
func unparenEnclosing(cur inspector.Cursor) inspector.Cursor {
for astutil.IsChildOf(cur, edge.ParenExpr_X) {
cur = cur.Parent()
}
return cur
}
var (
builtinAny = types.Universe.Lookup("any")
builtinAppend = types.Universe.Lookup("append")
builtinBool = types.Universe.Lookup("bool")
builtinInt = types.Universe.Lookup("int")
builtinFalse = types.Universe.Lookup("false")
builtinLen = types.Universe.Lookup("len")
builtinMake = types.Universe.Lookup("make")
builtinNew = types.Universe.Lookup("new")
builtinNil = types.Universe.Lookup("nil")
builtinString = types.Universe.Lookup("string")
builtinTrue = types.Universe.Lookup("true")
byteSliceType = types.NewSlice(types.Typ[types.Byte])
omitemptyRegex = regexp.MustCompile(`(?:^json| json):"[^"]*(,omitempty)(?:"|,[^"]*")\s?`)
)
// lookup returns the symbol denoted by name at the position of the cursor.
func lookup(info *types.Info, cur inspector.Cursor, name string) types.Object {
scope := typesinternal.EnclosingScope(info, cur)
_, obj := scope.LookupParent(name, cur.Node().Pos())
return obj
}

View file

@ -0,0 +1,208 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
_ "embed"
"go/ast"
"go/token"
"go/types"
"strings"
"fmt"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/astutil"
)
var NewExprAnalyzer = &analysis.Analyzer{
Name: "newexpr",
Doc: analysisinternal.MustExtractDoc(doc, "newexpr"),
URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize#newexpr",
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
FactTypes: []analysis.Fact{&newLike{}},
}
func run(pass *analysis.Pass) (any, error) {
var (
inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
info = pass.TypesInfo
)
// Detect functions that are new-like, i.e. have the form:
//
// func f(x T) *T { return &x }
//
// meaning that it is equivalent to new(x), if x has type T.
for curFuncDecl := range inspect.Root().Preorder((*ast.FuncDecl)(nil)) {
decl := curFuncDecl.Node().(*ast.FuncDecl)
fn := info.Defs[decl.Name].(*types.Func)
if decl.Body != nil && len(decl.Body.List) == 1 {
if ret, ok := decl.Body.List[0].(*ast.ReturnStmt); ok && len(ret.Results) == 1 {
if unary, ok := ret.Results[0].(*ast.UnaryExpr); ok && unary.Op == token.AND {
if id, ok := unary.X.(*ast.Ident); ok {
if v, ok := info.Uses[id].(*types.Var); ok {
sig := fn.Signature()
if sig.Results().Len() == 1 &&
is[*types.Pointer](sig.Results().At(0).Type()) && // => no iface conversion
sig.Params().Len() == 1 &&
sig.Params().At(0) == v {
// Export a fact for each one.
pass.ExportObjectFact(fn, &newLike{})
// Check file version.
file := astutil.EnclosingFile(curFuncDecl)
if !fileUses(info, file, "go1.26") {
continue // new(expr) not available in this file
}
var edits []analysis.TextEdit
// If 'new' is not shadowed, replace func body: &x -> new(x).
// This makes it safely and cleanly inlinable.
curRet, _ := curFuncDecl.FindNode(ret)
if lookup(info, curRet, "new") == builtinNew {
edits = []analysis.TextEdit{
// return &x
// ---- -
// return new(x)
{
Pos: unary.OpPos,
End: unary.OpPos + token.Pos(len("&")),
NewText: []byte("new("),
},
{
Pos: unary.X.End(),
End: unary.X.End(),
NewText: []byte(")"),
},
}
}
// Disabled until we resolve https://go.dev/issue/75726
// (Go version skew between caller and callee in inliner.)
// TODO(adonovan): fix and reenable.
//
// Also, restore these lines to our section of doc.go:
// //go:fix inline
// ...
// (The directive comment causes the inline analyzer to suggest
// that calls to such functions are inlined.)
if false {
// Add a //go:fix inline annotation, if not already present.
// TODO(adonovan): use ast.ParseDirective when go1.26 is assured.
if !strings.Contains(decl.Doc.Text(), "go:fix inline") {
edits = append(edits, analysis.TextEdit{
Pos: decl.Pos(),
End: decl.Pos(),
NewText: []byte("//go:fix inline\n"),
})
}
}
if len(edits) > 0 {
pass.Report(analysis.Diagnostic{
Pos: decl.Name.Pos(),
End: decl.Name.End(),
Message: fmt.Sprintf("%s can be an inlinable wrapper around new(expr)", decl.Name),
SuggestedFixes: []analysis.SuggestedFix{
{
Message: "Make %s an inlinable wrapper around new(expr)",
TextEdits: edits,
},
},
})
}
}
}
}
}
}
}
}
// Report and transform calls, when safe.
// In effect, this is inlining the new-like function
// even before we have marked the callee with //go:fix inline.
for curCall := range inspect.Root().Preorder((*ast.CallExpr)(nil)) {
call := curCall.Node().(*ast.CallExpr)
var fact newLike
if fn, ok := typeutil.Callee(info, call).(*types.Func); ok &&
pass.ImportObjectFact(fn, &fact) {
// Check file version.
file := astutil.EnclosingFile(curCall)
if !fileUses(info, file, "go1.26") {
continue // new(expr) not available in this file
}
// Check new is not shadowed.
if lookup(info, curCall, "new") != builtinNew {
continue
}
// The return type *T must exactly match the argument type T.
// (We formulate it this way--not in terms of the parameter
// type--to support generics.)
var targ types.Type
{
arg := call.Args[0]
tvarg := info.Types[arg]
// Constants: we must work around the type checker
// bug that causes info.Types to wrongly report the
// "typed" type for an untyped constant.
// (See "historical reasons" in issue go.dev/issue/70638.)
//
// We don't have a reliable way to do this but we can attempt
// to re-typecheck the constant expression on its own, in
// the original lexical environment but not as a part of some
// larger expression that implies a conversion to some "typed" type.
// (For the genesis of this idea see (*state).arguments
// in ../../../../internal/refactor/inline/inline.go.)
if tvarg.Value != nil {
info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)}
if err := types.CheckExpr(token.NewFileSet(), pass.Pkg, token.NoPos, arg, info2); err != nil {
continue // unexpected error
}
tvarg = info2.Types[arg]
}
targ = types.Default(tvarg.Type)
}
if !types.Identical(types.NewPointer(targ), info.TypeOf(call)) {
continue
}
pass.Report(analysis.Diagnostic{
Pos: call.Pos(),
End: call.End(),
Message: fmt.Sprintf("call of %s(x) can be simplified to new(x)", fn.Name()),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Simplify %s(x) to new(x)", fn.Name()),
TextEdits: []analysis.TextEdit{{
Pos: call.Fun.Pos(),
End: call.Fun.End(),
NewText: []byte("new"),
}},
}},
})
}
}
return nil, nil
}
// A newLike fact records that its associated function is "new-like".
type newLike struct{}
func (*newLike) AFact() {}
func (*newLike) String() string { return "newlike" }

View file

@ -0,0 +1,119 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"go/ast"
"go/types"
"reflect"
"strconv"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
"golang.org/x/tools/internal/astutil"
)
var OmitZeroAnalyzer = &analysis.Analyzer{
Name: "omitzero",
Doc: analysisinternal.MustExtractDoc(doc, "omitzero"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
},
Run: omitzero,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#omitzero",
}
func checkOmitEmptyField(pass *analysis.Pass, info *types.Info, curField *ast.Field) {
typ := info.TypeOf(curField.Type)
_, ok := typ.Underlying().(*types.Struct)
if !ok {
// Not a struct
return
}
tag := curField.Tag
if tag == nil {
// No tag to check
return
}
// The omitempty tag may be used by other packages besides json, but we should only modify its use with json
tagconv, _ := strconv.Unquote(tag.Value)
match := omitemptyRegex.FindStringSubmatchIndex(tagconv)
if match == nil {
// No omitempty in json tag
return
}
omitEmptyPos, omitEmptyEnd, err := astutil.RangeInStringLiteral(curField.Tag, match[2], match[3])
if err != nil {
return
}
removePos, removeEnd := omitEmptyPos, omitEmptyEnd
jsonTag := reflect.StructTag(tagconv).Get("json")
if jsonTag == ",omitempty" {
// Remove the entire struct tag if json is the only package used
if match[1]-match[0] == len(tagconv) {
removePos = curField.Tag.Pos()
removeEnd = curField.Tag.End()
} else {
// Remove the json tag if omitempty is the only field
removePos, err = astutil.PosInStringLiteral(curField.Tag, match[0])
if err != nil {
return
}
removeEnd, err = astutil.PosInStringLiteral(curField.Tag, match[1])
if err != nil {
return
}
}
}
pass.Report(analysis.Diagnostic{
Pos: curField.Tag.Pos(),
End: curField.Tag.End(),
Message: "Omitempty has no effect on nested struct fields",
SuggestedFixes: []analysis.SuggestedFix{
{
Message: "Remove redundant omitempty tag",
TextEdits: []analysis.TextEdit{
{
Pos: removePos,
End: removeEnd,
},
},
},
{
Message: "Replace omitempty with omitzero (behavior change)",
TextEdits: []analysis.TextEdit{
{
Pos: omitEmptyPos,
End: omitEmptyEnd,
NewText: []byte(",omitzero"),
},
},
},
}})
}
// The omitzero pass searches for instances of "omitempty" in a json field tag on a
// struct. Since "omitempty" does not have any effect when applied to a struct field,
// it suggests either deleting "omitempty" or replacing it with "omitzero", which
// correctly excludes structs from a json encoding.
func omitzero(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
info := pass.TypesInfo
for curFile := range filesUsing(inspect, info, "go1.24") {
for curStruct := range curFile.Preorder((*ast.StructType)(nil)) {
for _, curField := range curStruct.Node().(*ast.StructType).Fields.List {
checkOmitEmptyField(pass, info, curField)
}
}
}
return nil, nil
}

View file

@ -0,0 +1,83 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"go/ast"
"go/parser"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/goplsexport"
)
var plusBuildAnalyzer = &analysis.Analyzer{
Name: "plusbuild",
Doc: analysisinternal.MustExtractDoc(doc, "plusbuild"),
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#plusbuild",
Run: plusbuild,
}
func init() {
// Export to gopls until this is a published modernizer.
goplsexport.PlusBuildModernizer = plusBuildAnalyzer
}
func plusbuild(pass *analysis.Pass) (any, error) {
check := func(f *ast.File) {
if !fileUses(pass.TypesInfo, f, "go1.18") {
return
}
// When gofmt sees a +build comment, it adds a
// preceding equivalent //go:build directive, so in
// formatted files we can assume that a +build line is
// part of a comment group that starts with a
// //go:build line and is followed by a blank line.
//
// While we cannot delete comments from an AST and
// expect consistent output in general, this specific
// case--deleting only some lines from a comment
// block--does format correctly.
for _, g := range f.Comments {
sawGoBuild := false
for _, c := range g.List {
if sawGoBuild && strings.HasPrefix(c.Text, "// +build ") {
pass.Report(analysis.Diagnostic{
Pos: c.Pos(),
End: c.End(),
Message: "+build line is no longer needed",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Remove obsolete +build line",
TextEdits: []analysis.TextEdit{{
Pos: c.Pos(),
End: c.End(),
}},
}},
})
break
}
if strings.HasPrefix(c.Text, "//go:build ") {
sawGoBuild = true
}
}
}
}
for _, f := range pass.Files {
check(f)
}
for _, name := range pass.IgnoredFiles {
if strings.HasSuffix(name, ".go") {
f, err := parser.ParseFile(pass.Fset, name, nil, parser.ParseComments|parser.SkipObjectResolution)
if err != nil {
continue // parse error: ignore
}
check(f)
}
}
return nil, nil
}

View file

@ -0,0 +1,310 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var RangeIntAnalyzer = &analysis.Analyzer{
Name: "rangeint",
Doc: analysisinternal.MustExtractDoc(doc, "rangeint"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: rangeint,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#rangeint",
}
// rangeint offers a fix to replace a 3-clause 'for' loop:
//
// for i := 0; i < limit; i++ {}
//
// by a range loop with an integer operand:
//
// for i := range limit {}
//
// Variants:
// - The ':=' may be replaced by '='.
// - The fix may remove "i :=" if it would become unused.
//
// Restrictions:
// - The variable i must not be assigned or address-taken within the
// loop, because a "for range int" loop does not respect assignments
// to the loop index.
// - The limit must not be b.N, to avoid redundancy with bloop's fixes.
//
// Caveats:
//
// The fix causes the limit expression to be evaluated exactly once,
// instead of once per iteration. So, to avoid changing the
// cardinality of side effects, the limit expression must not involve
// function calls (e.g. seq.Len()) or channel receives. Moreover, the
// value of the limit expression must be loop invariant, which in
// practice means it must take one of the following forms:
//
// - a local variable that is assigned only once and not address-taken;
// - a constant; or
// - len(s), where s has the above properties.
func rangeint(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
info := pass.TypesInfo
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
typeindex := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
for curFile := range filesUsing(inspect, info, "go1.22") {
nextLoop:
for curLoop := range curFile.Preorder((*ast.ForStmt)(nil)) {
loop := curLoop.Node().(*ast.ForStmt)
if init, ok := loop.Init.(*ast.AssignStmt); ok &&
isSimpleAssign(init) &&
is[*ast.Ident](init.Lhs[0]) &&
isZeroIntLiteral(info, init.Rhs[0]) {
// Have: for i = 0; ... (or i := 0)
index := init.Lhs[0].(*ast.Ident)
if compare, ok := loop.Cond.(*ast.BinaryExpr); ok &&
compare.Op == token.LSS &&
astutil.EqualSyntax(compare.X, init.Lhs[0]) {
// Have: for i = 0; i < limit; ... {}
limit := compare.Y
// If limit is "len(slice)", simplify it to "slice".
//
// (Don't replace "for i := 0; i < len(map); i++"
// with "for range m" because it's too hard to prove
// that len(m) is loop-invariant).
if call, ok := limit.(*ast.CallExpr); ok &&
typeutil.Callee(info, call) == builtinLen &&
is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) {
limit = call.Args[0]
}
// Check the form of limit: must be a constant,
// or a local var that is not assigned or address-taken.
limitOK := false
if info.Types[limit].Value != nil {
limitOK = true // constant
} else if id, ok := limit.(*ast.Ident); ok {
if v, ok := info.Uses[id].(*types.Var); ok &&
!(v.Exported() && typesinternal.IsPackageLevel(v)) {
// limit is a local or unexported global var.
// (An exported global may have uses we can't see.)
for cur := range typeindex.Uses(v) {
if isScalarLvalue(info, cur) {
// Limit var is assigned or address-taken.
continue nextLoop
}
}
limitOK = true
}
}
if !limitOK {
continue nextLoop
}
if inc, ok := loop.Post.(*ast.IncDecStmt); ok &&
inc.Tok == token.INC &&
astutil.EqualSyntax(compare.X, inc.X) {
// Have: for i = 0; i < limit; i++ {}
// Find references to i within the loop body.
v := info.ObjectOf(index).(*types.Var)
// TODO(adonovan): use go1.25 v.Kind() == types.PackageVar
if typesinternal.IsPackageLevel(v) {
continue nextLoop
}
used := false
for curId := range curLoop.Child(loop.Body).Preorder((*ast.Ident)(nil)) {
id := curId.Node().(*ast.Ident)
if info.Uses[id] == v {
used = true
// Reject if any is an l-value (assigned or address-taken):
// a "for range int" loop does not respect assignments to
// the loop variable.
if isScalarLvalue(info, curId) {
continue nextLoop
}
}
}
// If i is no longer used, delete "i := ".
var edits []analysis.TextEdit
if !used && init.Tok == token.DEFINE {
edits = append(edits, analysis.TextEdit{
Pos: index.Pos(),
End: init.Rhs[0].Pos(),
})
}
// If i is used after the loop,
// don't offer a fix, as a range loop
// leaves i with a different final value (limit-1).
if init.Tok == token.ASSIGN {
for curId := range curLoop.Parent().Preorder((*ast.Ident)(nil)) {
id := curId.Node().(*ast.Ident)
if info.Uses[id] == v {
// Is i used after loop?
if id.Pos() > loop.End() {
continue nextLoop
}
// Is i used within a defer statement
// that is within the scope of i?
// var i int
// defer func() { print(i)}
// for i = ... { ... }
for curDefer := range curId.Enclosing((*ast.DeferStmt)(nil)) {
if curDefer.Node().Pos() > v.Pos() {
continue nextLoop
}
}
}
}
}
// If limit is len(slice),
// simplify "range len(slice)" to "range slice".
if call, ok := limit.(*ast.CallExpr); ok &&
typeutil.Callee(info, call) == builtinLen &&
is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) {
limit = call.Args[0]
}
// If the limit is a untyped constant of non-integer type,
// such as "const limit = 1e3", its effective type may
// differ between the two forms.
// In a for loop, it must be comparable with int i,
// for i := 0; i < limit; i++
// but in a range loop it would become a float,
// for i := range limit {}
// which is a type error. We need to convert it to int
// in this case.
//
// Unfortunately go/types discards the untyped type
// (but see Untyped in golang/go#70638) so we must
// re-type check the expression to detect this case.
var beforeLimit, afterLimit string
if v := info.Types[limit].Value; v != nil {
tVar := info.TypeOf(init.Rhs[0])
file := curFile.Node().(*ast.File)
// TODO(mkalil): use a types.Qualifier that respects the existing
// imports of this file that are visible (not shadowed) at the current position.
qual := typesinternal.FileQualifier(file, pass.Pkg)
beforeLimit, afterLimit = fmt.Sprintf("%s(", types.TypeString(tVar, qual)), ")"
info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)}
if types.CheckExpr(pass.Fset, pass.Pkg, limit.Pos(), limit, info2) == nil {
tLimit := types.Default(info2.TypeOf(limit))
if types.AssignableTo(tLimit, tVar) {
beforeLimit, afterLimit = "", ""
}
}
}
pass.Report(analysis.Diagnostic{
Pos: init.Pos(),
End: inc.End(),
Message: "for loop can be modernized using range over int",
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Replace for loop with range %s",
astutil.Format(pass.Fset, limit)),
TextEdits: append(edits, []analysis.TextEdit{
// for i := 0; i < limit; i++ {}
// ----- ---
// -------
// for i := range limit {}
// Delete init.
{
Pos: init.Rhs[0].Pos(),
End: limit.Pos(),
NewText: []byte("range "),
},
// Add "int(" before limit, if needed.
{
Pos: limit.Pos(),
End: limit.Pos(),
NewText: []byte(beforeLimit),
},
// Delete inc.
{
Pos: limit.End(),
End: inc.End(),
},
// Add ")" after limit, if needed.
{
Pos: limit.End(),
End: limit.End(),
NewText: []byte(afterLimit),
},
}...),
}},
})
}
}
}
}
}
return nil, nil
}
// isScalarLvalue reports whether the specified identifier is
// address-taken or appears on the left side of an assignment.
//
// This function is valid only for scalars (x = ...),
// not for aggregates (x.a[i] = ...)
func isScalarLvalue(info *types.Info, curId inspector.Cursor) bool {
// Unfortunately we can't simply use info.Types[e].Assignable()
// as it is always true for a variable even when that variable is
// used only as an r-value. So we must inspect enclosing syntax.
cur := curId
// Strip enclosing parens.
ek, _ := cur.ParentEdge()
for ek == edge.ParenExpr_X {
cur = cur.Parent()
ek, _ = cur.ParentEdge()
}
switch ek {
case edge.AssignStmt_Lhs:
assign := cur.Parent().Node().(*ast.AssignStmt)
if assign.Tok != token.DEFINE {
return true // i = j or i += j
}
id := curId.Node().(*ast.Ident)
if v, ok := info.Defs[id]; ok && v.Pos() != id.Pos() {
return true // reassignment of i (i, j := 1, 2)
}
case edge.IncDecStmt_X:
return true // i++, i--
case edge.UnaryExpr_X:
if cur.Parent().Node().(*ast.UnaryExpr).Op == token.AND {
return true // &i
}
}
return false
}

View file

@ -0,0 +1,143 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
// This file defines modernizers that use the "reflect" package.
import (
"go/ast"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
"golang.org/x/tools/internal/versions"
)
var ReflectTypeForAnalyzer = &analysis.Analyzer{
Name: "reflecttypefor",
Doc: analysisinternal.MustExtractDoc(doc, "reflecttypefor"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: reflecttypefor,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#reflecttypefor",
}
func reflecttypefor(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
reflectTypeOf = index.Object("reflect", "TypeOf")
)
for curCall := range index.Calls(reflectTypeOf) {
call := curCall.Node().(*ast.CallExpr)
// Have: reflect.TypeOf(expr)
expr := call.Args[0]
if !typesinternal.NoEffects(info, expr) {
continue // don't eliminate operand: may have effects
}
t := info.TypeOf(expr)
var edits []analysis.TextEdit
// Special case for TypeOf((*T)(nil)).Elem(),
// needed when T is an interface type.
if astutil.IsChildOf(curCall, edge.SelectorExpr_X) {
curSel := unparenEnclosing(curCall).Parent()
if astutil.IsChildOf(curSel, edge.CallExpr_Fun) {
call2 := unparenEnclosing(curSel).Parent().Node().(*ast.CallExpr)
obj := typeutil.Callee(info, call2)
if typesinternal.IsMethodNamed(obj, "reflect", "Type", "Elem") {
if ptr, ok := t.(*types.Pointer); ok {
// Have: TypeOf(expr).Elem() where expr : *T
t = ptr.Elem()
// reflect.TypeOf(expr).Elem()
// -------
// reflect.TypeOf(expr)
edits = []analysis.TextEdit{{
Pos: call.End(),
End: call2.End(),
}}
}
}
}
}
// TypeOf(x) where x has an interface type is a
// dynamic operation; don't transform it to TypeFor.
// (edits == nil means "not the Elem() special case".)
if types.IsInterface(t) && edits == nil {
continue
}
file := astutil.EnclosingFile(curCall)
if versions.Before(info.FileVersions[file], "go1.22") {
continue // TypeFor requires go1.22
}
tokFile := pass.Fset.File(file.Pos())
// Format the type as valid Go syntax.
// TODO(adonovan): FileQualifier needs to respect
// visibility at the current point, and either fail
// or edit the imports as needed.
qual := typesinternal.FileQualifier(file, pass.Pkg)
tstr := types.TypeString(t, qual)
sel, ok := call.Fun.(*ast.SelectorExpr)
if !ok {
continue // e.g. reflect was dot-imported
}
// If the call argument contains the last use
// of a variable, as in:
// var zero T
// reflect.TypeOf(zero)
// remove the declaration of that variable.
curArg0 := curCall.ChildAt(edge.CallExpr_Args, 0)
edits = append(edits, refactor.DeleteUnusedVars(index, info, tokFile, curArg0)...)
pass.Report(analysis.Diagnostic{
Pos: call.Fun.Pos(),
End: call.Fun.End(),
Message: "reflect.TypeOf call can be simplified using TypeFor",
SuggestedFixes: []analysis.SuggestedFix{{
// reflect.TypeOf (...T value...)
// ------ -------------
// reflect.TypeFor[T]( )
Message: "Replace TypeOf by TypeFor",
TextEdits: append([]analysis.TextEdit{
{
Pos: sel.Sel.Pos(),
End: sel.Sel.End(),
NewText: []byte("TypeFor[" + tstr + "]"),
},
// delete (pure) argument
{
Pos: call.Lparen + 1,
End: call.Rparen,
},
}, edits...),
}},
})
}
return nil, nil
}

View file

@ -0,0 +1,300 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/types"
"slices"
"strconv"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
)
// Warning: this analyzer is not safe to enable by default.
var AppendClippedAnalyzer = &analysis.Analyzer{
Name: "appendclipped",
Doc: analysisinternal.MustExtractDoc(doc, "appendclipped"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
},
Run: appendclipped,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#appendclipped",
}
// The appendclipped pass offers to simplify a tower of append calls:
//
// append(append(append(base, a...), b..., c...)
//
// with a call to go1.21's slices.Concat(base, a, b, c), or simpler
// replacements such as slices.Clone(a) in degenerate cases.
//
// We offer bytes.Clone in preference to slices.Clone where
// appropriate, if the package already imports "bytes";
// their behaviors are identical.
//
// The base expression must denote a clipped slice (see [isClipped]
// for definition), otherwise the replacement might eliminate intended
// side effects to the base slice's array.
//
// Examples:
//
// append(append(append(x[:0:0], a...), b...), c...) -> slices.Concat(a, b, c)
// append(append(slices.Clip(a), b...) -> slices.Concat(a, b)
// append([]T{}, a...) -> slices.Clone(a)
// append([]string(nil), os.Environ()...) -> os.Environ()
//
// The fix does not always preserve nilness the of base slice when the
// addends (a, b, c) are all empty (see #73557).
func appendclipped(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "slices", "bytes", "runtime") {
return nil, nil
}
info := pass.TypesInfo
// sliceArgs is a non-empty (reversed) list of slices to be concatenated.
simplifyAppendEllipsis := func(file *ast.File, call *ast.CallExpr, base ast.Expr, sliceArgs []ast.Expr) {
// Only appends whose base is a clipped slice can be simplified:
// We must conservatively assume an append to an unclipped slice
// such as append(y[:0], x...) is intended to have effects on y.
clipped, empty := clippedSlice(info, base)
if clipped == nil {
return
}
// If any slice arg has a different type from the base
// (and thus the result) don't offer a fix, to avoid
// changing the return type, e.g:
//
// type S []int
// - x := append([]int(nil), S{}...) // x : []int
// + x := slices.Clone(S{}) // x : S
//
// We could do better by inserting an explicit generic
// instantiation:
//
// x := slices.Clone[[]int](S{})
//
// but this is often unnecessary and unwanted, such as
// when the value is used an in assignment context that
// provides an explicit type:
//
// var x []int = slices.Clone(S{})
baseType := info.TypeOf(base)
for _, arg := range sliceArgs {
if !types.Identical(info.TypeOf(arg), baseType) {
return
}
}
// If the (clipped) base is empty, it may be safely ignored.
// Otherwise treat it (or its unclipped subexpression, if possible)
// as just another arg (the first) to Concat.
//
// TODO(adonovan): not so fast! If all the operands
// are empty, then the nilness of base matters, because
// append preserves nilness whereas Concat does not (#73557).
if !empty {
sliceArgs = append(sliceArgs, clipped)
}
slices.Reverse(sliceArgs)
// TODO(adonovan): simplify sliceArgs[0] further: slices.Clone(s) -> s
// Concat of a single (non-trivial) slice degenerates to Clone.
if len(sliceArgs) == 1 {
s := sliceArgs[0]
// Special case for common but redundant clone of os.Environ().
// append(zerocap, os.Environ()...) -> os.Environ()
if scall, ok := s.(*ast.CallExpr); ok {
obj := typeutil.Callee(info, scall)
if typesinternal.IsFunctionNamed(obj, "os", "Environ") {
pass.Report(analysis.Diagnostic{
Pos: call.Pos(),
End: call.End(),
Message: "Redundant clone of os.Environ()",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Eliminate redundant clone",
TextEdits: []analysis.TextEdit{{
Pos: call.Pos(),
End: call.End(),
NewText: []byte(astutil.Format(pass.Fset, s)),
}},
}},
})
return
}
}
// If the slice type is []byte, and the file imports
// "bytes" but not "slices", prefer the (behaviorally
// identical) bytes.Clone for local consistency.
// https://go.dev/issue/70815#issuecomment-2671572984
fileImports := func(path string) bool {
return slices.ContainsFunc(file.Imports, func(spec *ast.ImportSpec) bool {
value, _ := strconv.Unquote(spec.Path.Value)
return value == path
})
}
clonepkg := cond(
types.Identical(info.TypeOf(call), byteSliceType) &&
!fileImports("slices") && fileImports("bytes"),
"bytes",
"slices")
// append(zerocap, s...) -> slices.Clone(s) or bytes.Clone(s)
//
// This is unsound if s is empty and its nilness
// differs from zerocap (#73557).
prefix, importEdits := refactor.AddImport(info, file, clonepkg, clonepkg, "Clone", call.Pos())
message := fmt.Sprintf("Replace append with %s.Clone", clonepkg)
pass.Report(analysis.Diagnostic{
Pos: call.Pos(),
End: call.End(),
Message: message,
SuggestedFixes: []analysis.SuggestedFix{{
Message: message,
TextEdits: append(importEdits, []analysis.TextEdit{{
Pos: call.Pos(),
End: call.End(),
NewText: fmt.Appendf(nil, "%sClone(%s)", prefix, astutil.Format(pass.Fset, s)),
}}...),
}},
})
return
}
// append(append(append(base, a...), b..., c...) -> slices.Concat(base, a, b, c)
//
// This is unsound if all slices are empty and base is non-nil (#73557).
prefix, importEdits := refactor.AddImport(info, file, "slices", "slices", "Concat", call.Pos())
pass.Report(analysis.Diagnostic{
Pos: call.Pos(),
End: call.End(),
Message: "Replace append with slices.Concat",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace append with slices.Concat",
TextEdits: append(importEdits, []analysis.TextEdit{{
Pos: call.Pos(),
End: call.End(),
NewText: fmt.Appendf(nil, "%sConcat(%s)", prefix, formatExprs(pass.Fset, sliceArgs)),
}}...),
}},
})
}
// Mark nested calls to append so that we don't emit diagnostics for them.
skip := make(map[*ast.CallExpr]bool)
// Visit calls of form append(x, y...).
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
for curFile := range filesUsing(inspect, info, "go1.21") {
file := curFile.Node().(*ast.File)
for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) {
call := curCall.Node().(*ast.CallExpr)
if skip[call] {
continue
}
// Recursively unwrap ellipsis calls to append, so
// append(append(append(base, a...), b..., c...)
// yields (base, [c b a]).
base, slices := ast.Expr(call), []ast.Expr(nil) // base case: (call, nil)
again:
if call, ok := base.(*ast.CallExpr); ok {
if id, ok := call.Fun.(*ast.Ident); ok &&
call.Ellipsis.IsValid() &&
len(call.Args) == 2 &&
info.Uses[id] == builtinAppend {
// Have: append(base, s...)
base, slices = call.Args[0], append(slices, call.Args[1])
skip[call] = true
goto again
}
}
if len(slices) > 0 {
simplifyAppendEllipsis(file, call, base, slices)
}
}
}
return nil, nil
}
// clippedSlice returns res != nil if e denotes a slice that is
// definitely clipped, that is, its len(s)==cap(s).
//
// The value of res is either the same as e or is a subexpression of e
// that denotes the same slice but without the clipping operation.
//
// In addition, it reports whether the slice is definitely empty.
//
// Examples of clipped slices:
//
// x[:0:0] (empty)
// []T(nil) (empty)
// Slice{} (empty)
// x[:len(x):len(x)] (nonempty) res=x
// x[:k:k] (nonempty)
// slices.Clip(x) (nonempty) res=x
//
// TODO(adonovan): Add a check that the expression x has no side effects in
// case x[:len(x):len(x)] -> x. Now the program behavior may change.
func clippedSlice(info *types.Info, e ast.Expr) (res ast.Expr, empty bool) {
switch e := e.(type) {
case *ast.SliceExpr:
// x[:0:0], x[:len(x):len(x)], x[:k:k]
if e.Slice3 && e.High != nil && e.Max != nil && astutil.EqualSyntax(e.High, e.Max) { // x[:k:k]
res = e
empty = isZeroIntLiteral(info, e.High) // x[:0:0]
if call, ok := e.High.(*ast.CallExpr); ok &&
typeutil.Callee(info, call) == builtinLen &&
astutil.EqualSyntax(call.Args[0], e.X) {
res = e.X // x[:len(x):len(x)] -> x
}
return
}
return
case *ast.CallExpr:
// []T(nil)?
if info.Types[e.Fun].IsType() &&
is[*ast.Ident](e.Args[0]) &&
info.Uses[e.Args[0].(*ast.Ident)] == builtinNil {
return e, true
}
// slices.Clip(x)?
obj := typeutil.Callee(info, e)
if typesinternal.IsFunctionNamed(obj, "slices", "Clip") {
return e.Args[0], false // slices.Clip(x) -> x
}
case *ast.CompositeLit:
// Slice{}?
if len(e.Elts) == 0 {
return e, true
}
}
return nil, false
}

View file

@ -0,0 +1,433 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var SlicesContainsAnalyzer = &analysis.Analyzer{
Name: "slicescontains",
Doc: analysisinternal.MustExtractDoc(doc, "slicescontains"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: slicescontains,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicescontains",
}
// The slicescontains pass identifies loops that can be replaced by a
// call to slices.Contains{,Func}. For example:
//
// for i, elem := range s {
// if elem == needle {
// ...
// break
// }
// }
//
// =>
//
// if slices.Contains(s, needle) { ... }
//
// Variants:
// - if the if-condition is f(elem), the replacement
// uses slices.ContainsFunc(s, f).
// - if the if-body is "return true" and the fallthrough
// statement is "return false" (or vice versa), the
// loop becomes "return [!]slices.Contains(...)".
// - if the if-body is "found = true" and the previous
// statement is "found = false" (or vice versa), the
// loop becomes "found = [!]slices.Contains(...)".
//
// It may change cardinality of effects of the "needle" expression.
// (Mostly this appears to be a desirable optimization, avoiding
// redundantly repeated evaluation.)
//
// TODO(adonovan): Add a check that needle/predicate expression from
// if-statement has no effects. Now the program behavior may change.
func slicescontains(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "slices", "runtime") {
return nil, nil
}
var (
inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
)
// check is called for each RangeStmt of this form:
// for i, elem := range s { if cond { ... } }
check := func(file *ast.File, curRange inspector.Cursor) {
rng := curRange.Node().(*ast.RangeStmt)
ifStmt := rng.Body.List[0].(*ast.IfStmt)
// isSliceElem reports whether e denotes the
// current slice element (elem or s[i]).
isSliceElem := func(e ast.Expr) bool {
if rng.Value != nil && astutil.EqualSyntax(e, rng.Value) {
return true // "elem"
}
if x, ok := e.(*ast.IndexExpr); ok &&
astutil.EqualSyntax(x.X, rng.X) &&
astutil.EqualSyntax(x.Index, rng.Key) {
return true // "s[i]"
}
return false
}
// Examine the condition for one of these forms:
//
// - if elem or s[i] == needle { ... } => Contains
// - if predicate(s[i] or elem) { ... } => ContainsFunc
var (
funcName string // "Contains" or "ContainsFunc"
arg2 ast.Expr // second argument to func (needle or predicate)
)
switch cond := ifStmt.Cond.(type) {
case *ast.BinaryExpr:
if cond.Op == token.EQL {
var elem ast.Expr
if isSliceElem(cond.X) {
funcName = "Contains"
elem = cond.X
arg2 = cond.Y // "if elem == needle"
} else if isSliceElem(cond.Y) {
funcName = "Contains"
elem = cond.Y
arg2 = cond.X // "if needle == elem"
}
// Reject if elem and needle have different types.
if elem != nil {
tElem := info.TypeOf(elem)
tNeedle := info.TypeOf(arg2)
if !types.Identical(tElem, tNeedle) {
// Avoid ill-typed slices.Contains([]error, any).
if !types.AssignableTo(tNeedle, tElem) {
return
}
// TODO(adonovan): relax this check to allow
// slices.Contains([]error, error(any)),
// inserting an explicit widening conversion
// around the needle.
return
}
}
}
case *ast.CallExpr:
if len(cond.Args) == 1 &&
isSliceElem(cond.Args[0]) &&
typeutil.Callee(info, cond) != nil { // not a conversion
// Attempt to get signature
sig, isSignature := info.TypeOf(cond.Fun).(*types.Signature)
if isSignature {
// skip variadic functions
if sig.Variadic() {
return
}
// Slice element type must match function parameter type.
var (
tElem = typeparams.CoreType(info.TypeOf(rng.X)).(*types.Slice).Elem()
tParam = sig.Params().At(0).Type()
)
if !types.Identical(tElem, tParam) {
return
}
}
funcName = "ContainsFunc"
arg2 = cond.Fun // "if predicate(elem)"
}
}
if funcName == "" {
return // not a candidate for Contains{,Func}
}
// body is the "true" body.
body := ifStmt.Body
if len(body.List) == 0 {
// (We could perhaps delete the loop entirely.)
return
}
// Reject if the body, needle or predicate references either range variable.
usesRangeVar := func(n ast.Node) bool {
cur, ok := curRange.FindNode(n)
if !ok {
panic(fmt.Sprintf("FindNode(%T) failed", n))
}
return uses(index, cur, info.Defs[rng.Key.(*ast.Ident)]) ||
rng.Value != nil && uses(index, cur, info.Defs[rng.Value.(*ast.Ident)])
}
if usesRangeVar(body) {
// Body uses range var "i" or "elem".
//
// (The check for "i" could be relaxed when we
// generalize this to support slices.Index;
// and the check for "elem" could be relaxed
// if "elem" can safely be replaced in the
// body by "needle".)
return
}
if usesRangeVar(arg2) {
return
}
// Prepare slices.Contains{,Func} call.
prefix, importEdits := refactor.AddImport(info, file, "slices", "slices", funcName, rng.Pos())
contains := fmt.Sprintf("%s%s(%s, %s)",
prefix,
funcName,
astutil.Format(pass.Fset, rng.X),
astutil.Format(pass.Fset, arg2))
report := func(edits []analysis.TextEdit) {
pass.Report(analysis.Diagnostic{
Pos: rng.Pos(),
End: rng.End(),
Message: fmt.Sprintf("Loop can be simplified using slices.%s", funcName),
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace loop by call to slices." + funcName,
TextEdits: append(edits, importEdits...),
}},
})
}
// Last statement of body must return/break out of the loop.
//
// TODO(adonovan): opt:consider avoiding FindNode with new API of form:
// curRange.Get(edge.RangeStmt_Body, -1).
// Get(edge.BodyStmt_List, 0).
// Get(edge.IfStmt_Body)
curBody, _ := curRange.FindNode(body)
curLastStmt, _ := curBody.LastChild()
// Reject if any statement in the body except the
// last has a free continuation (continue or break)
// that might affected by melting down the loop.
//
// TODO(adonovan): relax check by analyzing branch target.
for curBodyStmt := range curBody.Children() {
if curBodyStmt != curLastStmt {
for range curBodyStmt.Preorder((*ast.BranchStmt)(nil), (*ast.ReturnStmt)(nil)) {
return
}
}
}
switch lastStmt := curLastStmt.Node().(type) {
case *ast.ReturnStmt:
// Have: for ... range seq { if ... { stmts; return x } }
// Special case:
// body={ return true } next="return false" (or negation)
// => return [!]slices.Contains(...)
if curNext, ok := curRange.NextSibling(); ok {
nextStmt := curNext.Node().(ast.Stmt)
tval := isReturnTrueOrFalse(info, lastStmt)
fval := isReturnTrueOrFalse(info, nextStmt)
if len(body.List) == 1 && tval*fval < 0 {
// for ... { if ... { return true/false } }
// => return [!]slices.Contains(...)
report([]analysis.TextEdit{
// Delete the range statement and following space.
{
Pos: rng.Pos(),
End: nextStmt.Pos(),
},
// Change return to [!]slices.Contains(...).
{
Pos: nextStmt.Pos(),
End: nextStmt.End(),
NewText: fmt.Appendf(nil, "return %s%s",
cond(tval > 0, "", "!"),
contains),
},
})
return
}
}
// General case:
// => if slices.Contains(...) { stmts; return x }
report([]analysis.TextEdit{
// Replace "for ... { if ... " with "if slices.Contains(...)".
{
Pos: rng.Pos(),
End: ifStmt.Body.Pos(),
NewText: fmt.Appendf(nil, "if %s ", contains),
},
// Delete '}' of range statement and preceding space.
{
Pos: ifStmt.Body.End(),
End: rng.End(),
},
})
return
case *ast.BranchStmt:
if lastStmt.Tok == token.BREAK && lastStmt.Label == nil { // unlabeled break
// Have: for ... { if ... { stmts; break } }
var prevStmt ast.Stmt // previous statement to range (if any)
if curPrev, ok := curRange.PrevSibling(); ok {
// If the RangeStmt's previous sibling is a Stmt,
// the RangeStmt must be among the Body list of
// a BlockStmt, CauseClause, or CommClause.
// In all cases, the prevStmt is the immediate
// predecessor of the RangeStmt during execution.
//
// (This is not true for Stmts in general;
// see [Cursor.Children] and #71074.)
prevStmt, _ = curPrev.Node().(ast.Stmt)
}
// Special case:
// prev="lhs = false" body={ lhs = true; break }
// => lhs = slices.Contains(...) (or negation)
if assign, ok := body.List[0].(*ast.AssignStmt); ok &&
len(body.List) == 2 &&
assign.Tok == token.ASSIGN &&
len(assign.Lhs) == 1 &&
len(assign.Rhs) == 1 {
// Have: body={ lhs = rhs; break }
if prevAssign, ok := prevStmt.(*ast.AssignStmt); ok &&
len(prevAssign.Lhs) == 1 &&
len(prevAssign.Rhs) == 1 &&
astutil.EqualSyntax(prevAssign.Lhs[0], assign.Lhs[0]) &&
is[*ast.Ident](assign.Rhs[0]) &&
info.Uses[assign.Rhs[0].(*ast.Ident)] == builtinTrue {
// Have:
// lhs = false
// for ... { if ... { lhs = true; break } }
// =>
// lhs = slices.Contains(...)
//
// TODO(adonovan):
// - support "var lhs bool = false" and variants.
// - support negation.
// Both these variants seem quite significant.
// - allow the break to be omitted.
report([]analysis.TextEdit{
// Replace "rhs" of previous assignment by slices.Contains(...)
{
Pos: prevAssign.Rhs[0].Pos(),
End: prevAssign.Rhs[0].End(),
NewText: []byte(contains),
},
// Delete the loop and preceding space.
{
Pos: prevAssign.Rhs[0].End(),
End: rng.End(),
},
})
return
}
}
// General case:
// for ... { if ... { stmts; break } }
// => if slices.Contains(...) { stmts }
report([]analysis.TextEdit{
// Replace "for ... { if ... " with "if slices.Contains(...)".
{
Pos: rng.Pos(),
End: ifStmt.Body.Pos(),
NewText: fmt.Appendf(nil, "if %s ", contains),
},
// Delete break statement and preceding space.
{
Pos: func() token.Pos {
if len(body.List) > 1 {
beforeBreak, _ := curLastStmt.PrevSibling()
return beforeBreak.Node().End()
}
return lastStmt.Pos()
}(),
End: lastStmt.End(),
},
// Delete '}' of range statement and preceding space.
{
Pos: ifStmt.Body.End(),
End: rng.End(),
},
})
return
}
}
}
for curFile := range filesUsing(inspect, info, "go1.21") {
file := curFile.Node().(*ast.File)
for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
rng := curRange.Node().(*ast.RangeStmt)
if is[*ast.Ident](rng.Key) &&
rng.Tok == token.DEFINE &&
len(rng.Body.List) == 1 &&
is[*types.Slice](typeparams.CoreType(info.TypeOf(rng.X))) {
// Have:
// - for _, elem := range s { S }
// - for i := range s { S }
if ifStmt, ok := rng.Body.List[0].(*ast.IfStmt); ok &&
ifStmt.Init == nil && ifStmt.Else == nil {
// Have: for i, elem := range s { if cond { ... } }
check(file, curRange)
}
}
}
}
return nil, nil
}
// -- helpers --
// isReturnTrueOrFalse returns nonzero if stmt returns true (+1) or false (-1).
func isReturnTrueOrFalse(info *types.Info, stmt ast.Stmt) int {
if ret, ok := stmt.(*ast.ReturnStmt); ok && len(ret.Results) == 1 {
if id, ok := ret.Results[0].(*ast.Ident); ok {
switch info.Uses[id] {
case builtinTrue:
return +1
case builtinFalse:
return -1
}
}
}
return 0
}

View file

@ -0,0 +1,184 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"go/ast"
"go/constant"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
)
// Warning: this analyzer is not safe to enable by default (not nil-preserving).
var SlicesDeleteAnalyzer = &analysis.Analyzer{
Name: "slicesdelete",
Doc: analysisinternal.MustExtractDoc(doc, "slicesdelete"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
},
Run: slicesdelete,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicesdelete",
}
// The slicesdelete pass attempts to replace instances of append(s[:i], s[i+k:]...)
// with slices.Delete(s, i, i+k) where k is some positive constant.
// Other variations that will also have suggested replacements include:
// append(s[:i-1], s[i:]...) and append(s[:i+k1], s[i+k2:]) where k2 > k1.
func slicesdelete(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "slices", "runtime") {
return nil, nil
}
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
info := pass.TypesInfo
report := func(file *ast.File, call *ast.CallExpr, slice1, slice2 *ast.SliceExpr) {
insert := func(pos token.Pos, text string) analysis.TextEdit {
return analysis.TextEdit{Pos: pos, End: pos, NewText: []byte(text)}
}
isIntExpr := func(e ast.Expr) bool {
return types.Identical(types.Default(info.TypeOf(e)), builtinInt.Type())
}
isIntShadowed := func() bool {
scope := pass.TypesInfo.Scopes[file].Innermost(call.Lparen)
if _, obj := scope.LookupParent("int", call.Lparen); obj != builtinInt {
return true // int type is shadowed
}
return false
}
prefix, edits := refactor.AddImport(info, file, "slices", "slices", "Delete", call.Pos())
// append's indices may be any integer type; slices.Delete requires int.
// Insert int conversions as needed (and if possible).
if isIntShadowed() && (!isIntExpr(slice1.High) || !isIntExpr(slice2.Low)) {
return
}
if !isIntExpr(slice1.High) {
edits = append(edits,
insert(slice1.High.Pos(), "int("),
insert(slice1.High.End(), ")"),
)
}
if !isIntExpr(slice2.Low) {
edits = append(edits,
insert(slice2.Low.Pos(), "int("),
insert(slice2.Low.End(), ")"),
)
}
pass.Report(analysis.Diagnostic{
Pos: call.Pos(),
End: call.End(),
Message: "Replace append with slices.Delete",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace append with slices.Delete",
TextEdits: append(edits, []analysis.TextEdit{
// Change name of called function.
{
Pos: call.Fun.Pos(),
End: call.Fun.End(),
NewText: []byte(prefix + "Delete"),
},
// Delete ellipsis.
{
Pos: call.Ellipsis,
End: call.Ellipsis + token.Pos(len("...")), // delete ellipsis
},
// Remove second slice variable name.
{
Pos: slice2.X.Pos(),
End: slice2.X.End(),
},
// Insert after first slice variable name.
{
Pos: slice1.X.End(),
NewText: []byte(", "),
},
// Remove brackets and colons.
{
Pos: slice1.Lbrack,
End: slice1.High.Pos(),
},
{
Pos: slice1.Rbrack,
End: slice1.Rbrack + 1,
},
{
Pos: slice2.Lbrack,
End: slice2.Lbrack + 1,
},
{
Pos: slice2.Low.End(),
End: slice2.Rbrack + 1,
},
}...),
}},
})
}
for curFile := range filesUsing(inspect, info, "go1.21") {
file := curFile.Node().(*ast.File)
for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) {
call := curCall.Node().(*ast.CallExpr)
if id, ok := call.Fun.(*ast.Ident); ok && len(call.Args) == 2 {
// Verify we have append with two slices and ... operator,
// the first slice has no low index and second slice has no
// high index, and not a three-index slice.
if call.Ellipsis.IsValid() && info.Uses[id] == builtinAppend {
slice1, ok1 := call.Args[0].(*ast.SliceExpr)
slice2, ok2 := call.Args[1].(*ast.SliceExpr)
if ok1 && slice1.Low == nil && !slice1.Slice3 &&
ok2 && slice2.High == nil && !slice2.Slice3 &&
astutil.EqualSyntax(slice1.X, slice2.X) &&
typesinternal.NoEffects(info, slice1.X) &&
increasingSliceIndices(info, slice1.High, slice2.Low) {
// Have append(s[:a], s[b:]...) where we can verify a < b.
report(file, call, slice1, slice2)
}
}
}
}
}
return nil, nil
}
// Given two slice indices a and b, returns true if we can verify that a < b.
// It recognizes certain forms such as i+k1 < i+k2 where k1 < k2.
func increasingSliceIndices(info *types.Info, a, b ast.Expr) bool {
// Given an expression of the form i±k, returns (i, k)
// where k is a signed constant. Otherwise it returns (e, 0).
split := func(e ast.Expr) (ast.Expr, constant.Value) {
if binary, ok := e.(*ast.BinaryExpr); ok && (binary.Op == token.SUB || binary.Op == token.ADD) {
// Negate constants if operation is subtract instead of add
if k := info.Types[binary.Y].Value; k != nil {
return binary.X, constant.UnaryOp(binary.Op, k, 0) // i ± k
}
}
return e, constant.MakeInt64(0)
}
// Handle case where either a or b is a constant
ak := info.Types[a].Value
bk := info.Types[b].Value
if ak != nil || bk != nil {
return ak != nil && bk != nil && constant.Compare(ak, token.LSS, bk)
}
ai, ak := split(a)
bi, bk := split(b)
return astutil.EqualSyntax(ai, bi) && constant.Compare(ak, token.LSS, bk)
}

View file

@ -0,0 +1,124 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
// (Not to be confused with go/analysis/passes/sortslice.)
var SlicesSortAnalyzer = &analysis.Analyzer{
Name: "slicessort",
Doc: analysisinternal.MustExtractDoc(doc, "slicessort"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: slicessort,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicessort",
}
// The slicessort pass replaces sort.Slice(slice, less) with
// slices.Sort(slice) when slice is a []T and less is a FuncLit
// equivalent to cmp.Ordered[T].
//
// sort.Slice(s, func(i, j int) bool { return s[i] < s[j] })
// => slices.Sort(s)
//
// There is no slices.SortStable.
//
// TODO(adonovan): support
//
// - sort.Slice(s, func(i, j int) bool { return s[i] ... s[j] })
// -> slices.SortFunc(s, func(x, y T) int { return x ... y })
// iff all uses of i, j can be replaced by s[i], s[j] and "<" can be replaced with cmp.Compare.
//
// - As above for sort.SliceStable -> slices.SortStableFunc.
//
// - sort.Sort(x) where x has a named slice type whose Less method is the natural order.
// -> sort.Slice(x)
func slicessort(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "slices", "sort", "runtime") {
return nil, nil
}
var (
info = pass.TypesInfo
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
sortSlice = index.Object("sort", "Slice")
)
for curCall := range index.Calls(sortSlice) {
call := curCall.Node().(*ast.CallExpr)
if lit, ok := call.Args[1].(*ast.FuncLit); ok && len(lit.Body.List) == 1 {
sig := info.Types[lit.Type].Type.(*types.Signature)
// Have: sort.Slice(s, func(i, j int) bool { return ... })
s := call.Args[0]
i := sig.Params().At(0)
j := sig.Params().At(1)
if ret, ok := lit.Body.List[0].(*ast.ReturnStmt); ok {
if compare, ok := ret.Results[0].(*ast.BinaryExpr); ok && compare.Op == token.LSS {
// isIndex reports whether e is s[v].
isIndex := func(e ast.Expr, v *types.Var) bool {
index, ok := e.(*ast.IndexExpr)
return ok &&
astutil.EqualSyntax(index.X, s) &&
is[*ast.Ident](index.Index) &&
info.Uses[index.Index.(*ast.Ident)] == v
}
file := astutil.EnclosingFile(curCall)
if isIndex(compare.X, i) && isIndex(compare.Y, j) &&
fileUses(info, file, "go1.21") {
// Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] })
prefix, importEdits := refactor.AddImport(
info, file, "slices", "slices", "Sort", call.Pos())
pass.Report(analysis.Diagnostic{
// Highlight "sort.Slice".
Pos: call.Fun.Pos(),
End: call.Fun.End(),
Message: "sort.Slice can be modernized using slices.Sort",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace sort.Slice call by slices.Sort",
TextEdits: append(importEdits, []analysis.TextEdit{
{
// Replace sort.Slice with slices.Sort.
Pos: call.Fun.Pos(),
End: call.Fun.End(),
NewText: []byte(prefix + "Sort"),
},
{
// Eliminate FuncLit.
Pos: call.Args[0].End(),
End: call.Rparen,
},
}...),
}},
})
}
}
}
}
}
return nil, nil
}

View file

@ -0,0 +1,354 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/goplsexport"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/stdlib"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var stditeratorsAnalyzer = &analysis.Analyzer{
Name: "stditerators",
Doc: analysisinternal.MustExtractDoc(doc, "stditerators"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: stditerators,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stditerators",
}
func init() {
// Export to gopls until this is a published modernizer.
goplsexport.StdIteratorsModernizer = stditeratorsAnalyzer
}
// stditeratorsTable records std types that have legacy T.{Len,At}
// iteration methods as well as a newer T.All method that returns an
// iter.Seq.
var stditeratorsTable = [...]struct {
pkgpath, typename, lenmethod, atmethod, itermethod, elemname string
}{
// Example: in go/types, (*Tuple).Variables returns an
// iterator that replaces a loop over (*Tuple).{Len,At}.
// The loop variable is named "v".
{"go/types", "Interface", "NumEmbeddeds", "EmbeddedType", "EmbeddedTypes", "etyp"},
{"go/types", "Interface", "NumExplicitMethods", "ExplicitMethod", "ExplicitMethods", "method"},
{"go/types", "Interface", "NumMethods", "Method", "Methods", "method"},
{"go/types", "MethodSet", "Len", "At", "Methods", "method"},
{"go/types", "Named", "NumMethods", "Method", "Methods", "method"},
{"go/types", "Scope", "NumChildren", "Child", "Children", "child"},
{"go/types", "Struct", "NumFields", "Field", "Fields", "field"},
{"go/types", "Tuple", "Len", "At", "Variables", "v"},
{"go/types", "TypeList", "Len", "At", "Types", "t"},
{"go/types", "TypeParamList", "Len", "At", "TypeParams", "tparam"},
{"go/types", "Union", "Len", "Term", "Terms", "term"},
// TODO(adonovan): support Seq2. Bonus: transform uses of both key and value.
// {"reflect", "Value", "NumFields", "Field", "Fields", "field"},
}
// stditerators suggests fixes to replace loops using Len/At-style
// iterator APIs by a range loop over an iterator. The set of
// participating types and methods is defined by [iteratorsTable].
//
// Pattern:
//
// for i := 0; i < x.Len(); i++ {
// use(x.At(i))
// }
//
// =>
//
// for elem := range x.All() {
// use(elem)
// }
//
// Variant:
//
// for i := range x.Len() { ... }
//
// Note: Iterators have a dynamic cost. How do we know that
// the user hasn't intentionally chosen not to use an
// iterator for that reason? We don't want to go fix to
// undo optimizations. Do we need a suppression mechanism?
func stditerators(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
)
for _, row := range stditeratorsTable {
// Don't offer fixes within the package
// that defines the iterator in question.
if within(pass, row.pkgpath) {
continue
}
var (
lenMethod = index.Selection(row.pkgpath, row.typename, row.lenmethod)
atMethod = index.Selection(row.pkgpath, row.typename, row.atmethod)
)
// chooseName returns an appropriate fresh name
// for the index variable of the iterator loop
// whose body is specified.
//
// If the loop body starts with
//
// for ... { e := x.At(i); use(e) }
//
// then chooseName prefers the name e and additionally
// returns the var's symbol. We'll transform this to:
//
// for e := range x.Len() { e := e; use(e) }
//
// which leaves a redundant assignment that a
// subsequent 'forvar' pass will eliminate.
chooseName := func(curBody inspector.Cursor, x ast.Expr, i *types.Var) (string, *types.Var) {
// Is body { elem := x.At(i); ... } ?
body := curBody.Node().(*ast.BlockStmt)
if len(body.List) > 0 {
if assign, ok := body.List[0].(*ast.AssignStmt); ok &&
assign.Tok == token.DEFINE &&
len(assign.Lhs) == 1 &&
len(assign.Rhs) == 1 &&
is[*ast.Ident](assign.Lhs[0]) {
// call to x.At(i)?
if call, ok := assign.Rhs[0].(*ast.CallExpr); ok &&
typeutil.Callee(info, call) == atMethod &&
astutil.EqualSyntax(ast.Unparen(call.Fun).(*ast.SelectorExpr).X, x) &&
is[*ast.Ident](call.Args[0]) &&
info.Uses[call.Args[0].(*ast.Ident)] == i {
// Have: { elem := x.At(i); ... }
id := assign.Lhs[0].(*ast.Ident)
return id.Name, info.Defs[id].(*types.Var)
}
}
}
loop := curBody.Parent().Node()
return refactor.FreshName(info.Scopes[loop], loop.Pos(), row.elemname), nil
}
// Process each call of x.Len().
nextCall:
for curLenCall := range index.Calls(lenMethod) {
lenSel, ok := ast.Unparen(curLenCall.Node().(*ast.CallExpr).Fun).(*ast.SelectorExpr)
if !ok {
continue
}
// lenSel is "x.Len"
var (
rng analysis.Range // where to report diagnostic
curBody inspector.Cursor // loop body
indexVar *types.Var // old loop index var
elemVar *types.Var // existing "elem := x.At(i)" var, if present
elem string // name for new loop var
edits []analysis.TextEdit
)
// Analyze enclosing loop.
switch ek, _ := curLenCall.ParentEdge(); ek {
case edge.BinaryExpr_Y:
// pattern 1: for i := 0; i < x.Len(); i++ { ... }
var (
curCmp = curLenCall.Parent()
cmp = curCmp.Node().(*ast.BinaryExpr)
)
if cmp.Op != token.LSS ||
!astutil.IsChildOf(curCmp, edge.ForStmt_Cond) {
continue
}
if id, ok := cmp.X.(*ast.Ident); ok {
// Have: for _; i < x.Len(); _ { ... }
var (
v = info.Uses[id].(*types.Var)
curFor = curCmp.Parent()
loop = curFor.Node().(*ast.ForStmt)
)
if v != isIncrementLoop(info, loop) {
continue
}
// Have: for i := 0; i < x.Len(); i++ { ... }.
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
rng = analysisinternal.Range(loop.For, loop.Post.End())
indexVar = v
curBody = curFor.ChildAt(edge.ForStmt_Body, -1)
elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
// for i := 0; i < x.Len(); i++ {
// ---- ------- --- -----
// for elem := range x.All() {
edits = []analysis.TextEdit{
{
Pos: v.Pos(),
End: v.Pos() + token.Pos(len(v.Name())),
NewText: []byte(elem),
},
{
Pos: loop.Init.(*ast.AssignStmt).Rhs[0].Pos(),
End: cmp.Y.Pos(),
NewText: []byte("range "),
},
{
Pos: lenSel.Sel.Pos(),
End: lenSel.Sel.End(),
NewText: []byte(row.itermethod),
},
{
Pos: curLenCall.Node().End(),
End: loop.Post.End(),
},
}
}
case edge.RangeStmt_X:
// pattern 2: for i := range x.Len() { ... }
var (
curRange = curLenCall.Parent()
loop = curRange.Node().(*ast.RangeStmt)
)
if id, ok := loop.Key.(*ast.Ident); ok &&
loop.Value == nil &&
loop.Tok == token.DEFINE {
// Have: for i := range x.Len() { ... }
// ~~~~~~~~~~~~~
rng = analysisinternal.Range(loop.Range, loop.X.End())
indexVar = info.Defs[id].(*types.Var)
curBody = curRange.ChildAt(edge.RangeStmt_Body, -1)
elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
// for i := range x.Len() {
// ---- ---
// for elem := range x.All() {
edits = []analysis.TextEdit{
{
Pos: loop.Key.Pos(),
End: loop.Key.End(),
NewText: []byte(elem),
},
{
Pos: lenSel.Sel.Pos(),
End: lenSel.Sel.End(),
NewText: []byte(row.itermethod),
},
}
}
}
if indexVar == nil {
continue // no loop of the required form
}
// TODO(adonovan): what about possible
// modifications of x within the loop?
// Aliasing seems to make a conservative
// treatment impossible.
// Check that all uses of var i within loop body are x.At(i).
for curUse := range index.Uses(indexVar) {
if !curBody.Contains(curUse) {
continue
}
if ek, argidx := curUse.ParentEdge(); ek != edge.CallExpr_Args || argidx != 0 {
continue nextCall // use is not arg of call
}
curAtCall := curUse.Parent()
atCall := curAtCall.Node().(*ast.CallExpr)
if typeutil.Callee(info, atCall) != atMethod {
continue nextCall // use is not arg of call to T.At
}
atSel := ast.Unparen(atCall.Fun).(*ast.SelectorExpr)
// Check receivers of Len, At calls match (syntactically).
if !astutil.EqualSyntax(lenSel.X, atSel.X) {
continue nextCall
}
// At each point of use, check that
// the fresh variable is not shadowed
// by an intervening local declaration
// (or by the idiomatic elemVar optionally
// found by chooseName).
if obj := lookup(info, curAtCall, elem); obj != nil && obj != elemVar && obj.Pos() > indexVar.Pos() {
// (Ideally, instead of giving up, we would
// embellish the name and try again.)
continue nextCall
}
// use(x.At(i))
// -------
// use(elem )
edits = append(edits, analysis.TextEdit{
Pos: atCall.Pos(),
End: atCall.End(),
NewText: []byte(elem),
})
}
// Check file Go version is new enough for the iterator method.
// (In the long run, version filters are not highly selective,
// so there's no need to do them first, especially as this check
// may be somewhat expensive.)
if v, ok := methodGoVersion(row.pkgpath, row.typename, row.itermethod); !ok {
panic("no version found")
} else if file := astutil.EnclosingFile(curLenCall); !fileUses(info, file, v.String()) {
continue nextCall
}
pass.Report(analysis.Diagnostic{
Pos: rng.Pos(),
End: rng.End(),
Message: fmt.Sprintf("%s/%s loop can simplified using %s.%s iteration",
row.lenmethod, row.atmethod, row.typename, row.itermethod),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf(
"Replace %s/%s loop with %s.%s iteration",
row.lenmethod, row.atmethod, row.typename, row.itermethod),
TextEdits: edits,
}},
})
}
}
return nil, nil
}
// -- helpers --
// methodGoVersion reports the version at which the method
// (pkgpath.recvtype).method appeared in the standard library.
func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, bool) {
// TODO(adonovan): opt: this might be inefficient for large packages
// like go/types. If so, memoize using a map (and kill two birds with
// one stone by also memoizing the 'within' check above).
for _, sym := range stdlib.PackageSymbols[pkgpath] {
if sym.Kind == stdlib.Method {
_, recv, name := sym.SplitMethod()
if recv == recvtype && name == method {
return sym.Version, true
}
}
}
return 0, false
}

View file

@ -0,0 +1,328 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/constant"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var StringsBuilderAnalyzer = &analysis.Analyzer{
Name: "stringsbuilder",
Doc: analysisinternal.MustExtractDoc(doc, "stringsbuilder"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: stringsbuilder,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringbuilder",
}
// stringsbuilder replaces string += string in a loop by strings.Builder.
func stringsbuilder(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
// Skip the analyzer in packages where its
// fixes would create an import cycle.
if within(pass, "strings", "runtime") {
return nil, nil
}
var (
inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
)
// Gather all local string variables that appear on the
// LHS of some string += string assignment.
candidates := make(map[*types.Var]bool)
for curAssign := range inspect.Root().Preorder((*ast.AssignStmt)(nil)) {
assign := curAssign.Node().(*ast.AssignStmt)
if assign.Tok == token.ADD_ASSIGN && is[*ast.Ident](assign.Lhs[0]) {
if v, ok := pass.TypesInfo.Uses[assign.Lhs[0].(*ast.Ident)].(*types.Var); ok &&
!typesinternal.IsPackageLevel(v) && // TODO(adonovan): in go1.25, use v.Kind() == types.LocalVar &&
types.Identical(v.Type(), builtinString.Type()) {
candidates[v] = true
}
}
}
// Now check each candidate variable's decl and uses.
nextcand:
for v := range candidates {
var edits []analysis.TextEdit
// Check declaration of s:
//
// s := expr
// var s [string] [= expr]
//
// and transform to:
//
// var s strings.Builder; s.WriteString(expr)
//
def, ok := index.Def(v)
if !ok {
continue
}
ek, _ := def.ParentEdge()
if ek == edge.AssignStmt_Lhs &&
len(def.Parent().Node().(*ast.AssignStmt).Lhs) == 1 {
// Have: s := expr
// => var s strings.Builder; s.WriteString(expr)
assign := def.Parent().Node().(*ast.AssignStmt)
// Reject "if s := f(); ..." since in that context
// we can't replace the assign with two statements.
switch def.Parent().Parent().Node().(type) {
case *ast.BlockStmt, *ast.CaseClause, *ast.CommClause:
// OK: these are the parts of syntax that
// allow unrestricted statement lists.
default:
continue
}
// Add strings import.
prefix, importEdits := refactor.AddImport(
pass.TypesInfo, astutil.EnclosingFile(def), "strings", "strings", "Builder", v.Pos())
edits = append(edits, importEdits...)
if isEmptyString(pass.TypesInfo, assign.Rhs[0]) {
// s := ""
// ---------------------
// var s strings.Builder
edits = append(edits, analysis.TextEdit{
Pos: assign.Pos(),
End: assign.End(),
NewText: fmt.Appendf(nil, "var %[1]s %[2]sBuilder", v.Name(), prefix),
})
} else {
// s := expr
// ------------------------------------- -
// var s strings.Builder; s.WriteString(expr)
edits = append(edits, []analysis.TextEdit{
{
Pos: assign.Pos(),
End: assign.Rhs[0].Pos(),
NewText: fmt.Appendf(nil, "var %[1]s %[2]sBuilder; %[1]s.WriteString(", v.Name(), prefix),
},
{
Pos: assign.End(),
End: assign.End(),
NewText: []byte(")"),
},
}...)
}
} else if ek == edge.ValueSpec_Names &&
len(def.Parent().Node().(*ast.ValueSpec).Names) == 1 {
// Have: var s [string] [= expr]
// => var s strings.Builder; s.WriteString(expr)
// Add strings import.
prefix, importEdits := refactor.AddImport(
pass.TypesInfo, astutil.EnclosingFile(def), "strings", "strings", "Builder", v.Pos())
edits = append(edits, importEdits...)
spec := def.Parent().Node().(*ast.ValueSpec)
decl := def.Parent().Parent().Node().(*ast.GenDecl)
init := spec.Names[0].End() // start of " = expr"
if spec.Type != nil {
init = spec.Type.End()
}
// var s [string]
// ----------------
// var s strings.Builder
edits = append(edits, analysis.TextEdit{
Pos: spec.Names[0].End(),
End: init,
NewText: fmt.Appendf(nil, " %sBuilder", prefix),
})
if len(spec.Values) > 0 && !isEmptyString(pass.TypesInfo, spec.Values[0]) {
// = expr
// ---------------- -
// ; s.WriteString(expr)
edits = append(edits, []analysis.TextEdit{
{
Pos: init,
End: spec.Values[0].Pos(),
NewText: fmt.Appendf(nil, "; %s.WriteString(", v.Name()),
},
{
Pos: decl.End(),
End: decl.End(),
NewText: []byte(")"),
},
}...)
} else {
// delete "= expr"
edits = append(edits, analysis.TextEdit{
Pos: init,
End: spec.End(),
})
}
} else {
continue
}
// Check uses of s.
//
// - All uses of s except the final one must be of the form
//
// s += expr
//
// Each of these will become s.WriteString(expr).
// At least one of them must be in an intervening loop
// w.r.t. the declaration of s:
//
// var s string
// for ... { s += expr }
//
// - The final use of s must be as an rvalue (e.g. use(s), not &s).
// This will become s.String().
//
// Perhaps surprisingly, it is fine for there to be an
// intervening loop or lambda w.r.t. the declaration of s:
//
// var s strings.Builder
// for range kSmall { s.WriteString(expr) }
// for range kLarge { use(s.String()) } // called repeatedly
//
// Even though that might cause the s.String() operation to be
// executed repeatedly, this is not a deoptimization because,
// by design, (*strings.Builder).String does not allocate.
var (
numLoopAssigns int // number of += assignments within a loop
loopAssign *ast.AssignStmt // first += assignment within a loop
seenRvalueUse bool // => we've seen the sole final use of s as an rvalue
)
for curUse := range index.Uses(v) {
// Strip enclosing parens around Ident.
ek, _ := curUse.ParentEdge()
for ek == edge.ParenExpr_X {
curUse = curUse.Parent()
ek, _ = curUse.ParentEdge()
}
// The rvalueUse must be the lexically last use.
if seenRvalueUse {
continue nextcand
}
// intervening reports whether cur has an ancestor of
// one of the given types that is within the scope of v.
intervening := func(types ...ast.Node) bool {
for cur := range curUse.Enclosing(types...) {
if v.Pos() <= cur.Node().Pos() { // in scope of v
return true
}
}
return false
}
if ek == edge.AssignStmt_Lhs {
assign := curUse.Parent().Node().(*ast.AssignStmt)
if assign.Tok != token.ADD_ASSIGN {
continue nextcand
}
// Have: s += expr
// At least one of the += operations
// must appear within a loop.
// relative to the declaration of s.
if intervening((*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)) {
numLoopAssigns++
if loopAssign == nil {
loopAssign = assign
}
}
// s += expr
// ------------- -
// s.WriteString(expr)
edits = append(edits, []analysis.TextEdit{
// replace += with .WriteString()
{
Pos: assign.TokPos,
End: assign.Rhs[0].Pos(),
NewText: []byte(".WriteString("),
},
// insert ")"
{
Pos: assign.End(),
End: assign.End(),
NewText: []byte(")"),
},
}...)
} else if ek == edge.UnaryExpr_X &&
curUse.Parent().Node().(*ast.UnaryExpr).Op == token.AND {
// Have: use(&s)
continue nextcand // s is used as an lvalue; reject
} else {
// The only possible l-value uses of a string variable
// are assignments (s=expr, s+=expr, etc) and &s.
// (For strings, we can ignore method calls s.m().)
// All other uses are r-values.
seenRvalueUse = true
edits = append(edits, analysis.TextEdit{
// insert ".String()"
Pos: curUse.Node().End(),
End: curUse.Node().End(),
NewText: []byte(".String()"),
})
}
}
if !seenRvalueUse {
continue nextcand // no rvalue use; reject
}
if numLoopAssigns == 0 {
continue nextcand // no += in a loop; reject
}
pass.Report(analysis.Diagnostic{
Pos: loopAssign.Pos(),
End: loopAssign.End(),
Message: "using string += string in a loop is inefficient",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Replace string += string with strings.Builder",
TextEdits: edits,
}},
})
}
return nil, nil
}
// isEmptyString reports whether e (a string-typed expression) has constant value "".
func isEmptyString(info *types.Info, e ast.Expr) bool {
tv, ok := info.Types[e]
return ok && tv.Value != nil && constant.StringVal(tv.Value) == ""
}

View file

@ -0,0 +1,261 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/token"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var StringsCutPrefixAnalyzer = &analysis.Analyzer{
Name: "stringscutprefix",
Doc: analysisinternal.MustExtractDoc(doc, "stringscutprefix"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: stringscutprefix,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringscutprefix",
}
// stringscutprefix offers a fix to replace an if statement which
// calls to the 2 patterns below with strings.CutPrefix or strings.CutSuffix.
//
// Patterns:
//
// 1. if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre) }
// =>
// if after, ok := strings.CutPrefix(s, pre); ok { use(after) }
//
// 2. if after := strings.TrimPrefix(s, pre); after != s { use(after) }
// =>
// if after, ok := strings.CutPrefix(s, pre); ok { use(after) }
//
// Similar patterns apply for CutSuffix.
//
// The use must occur within the first statement of the block, and the offered fix
// only replaces the first occurrence of strings.TrimPrefix/TrimSuffix.
//
// Variants:
// - bytes.HasPrefix/HasSuffix usage as pattern 1.
func stringscutprefix(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
var (
inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
stringsTrimPrefix = index.Object("strings", "TrimPrefix")
bytesTrimPrefix = index.Object("bytes", "TrimPrefix")
stringsTrimSuffix = index.Object("strings", "TrimSuffix")
bytesTrimSuffix = index.Object("bytes", "TrimSuffix")
)
if !index.Used(stringsTrimPrefix, bytesTrimPrefix, stringsTrimSuffix, bytesTrimSuffix) {
return nil, nil
}
for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.20") {
for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) {
ifStmt := curIfStmt.Node().(*ast.IfStmt)
// pattern1
if call, ok := ifStmt.Cond.(*ast.CallExpr); ok && ifStmt.Init == nil && len(ifStmt.Body.List) > 0 {
obj := typeutil.Callee(info, call)
if !typesinternal.IsFunctionNamed(obj, "strings", "HasPrefix", "HasSuffix") &&
!typesinternal.IsFunctionNamed(obj, "bytes", "HasPrefix", "HasSuffix") {
continue
}
isPrefix := strings.HasSuffix(obj.Name(), "Prefix")
// Replace the first occurrence of strings.TrimPrefix(s, pre) in the first statement only,
// but not later statements in case s or pre are modified by intervening logic (ditto Suffix).
firstStmt := curIfStmt.Child(ifStmt.Body).Child(ifStmt.Body.List[0])
for curCall := range firstStmt.Preorder((*ast.CallExpr)(nil)) {
call1 := curCall.Node().(*ast.CallExpr)
obj1 := typeutil.Callee(info, call1)
// bytesTrimPrefix or stringsTrimPrefix might be nil if the file doesn't import it,
// so we need to ensure the obj1 is not nil otherwise the call1 is not TrimPrefix and cause a panic (ditto Suffix).
if obj1 == nil ||
obj1 != stringsTrimPrefix && obj1 != bytesTrimPrefix &&
obj1 != stringsTrimSuffix && obj1 != bytesTrimSuffix {
continue
}
isPrefix1 := strings.HasSuffix(obj1.Name(), "Prefix")
var cutFuncName, varName, message, fixMessage string
if isPrefix && isPrefix1 {
cutFuncName = "CutPrefix"
varName = "after"
message = "HasPrefix + TrimPrefix can be simplified to CutPrefix"
fixMessage = "Replace HasPrefix/TrimPrefix with CutPrefix"
} else if !isPrefix && !isPrefix1 {
cutFuncName = "CutSuffix"
varName = "before"
message = "HasSuffix + TrimSuffix can be simplified to CutSuffix"
fixMessage = "Replace HasSuffix/TrimSuffix with CutSuffix"
} else {
continue
}
// Have: if strings.HasPrefix(s0, pre0) { ...strings.TrimPrefix(s, pre)... } (ditto Suffix)
var (
s0 = call.Args[0]
pre0 = call.Args[1]
s = call1.Args[0]
pre = call1.Args[1]
)
// check whether the obj1 uses the exact the same argument with strings.HasPrefix
// shadow variables won't be valid because we only access the first statement (ditto Suffix).
if astutil.EqualSyntax(s0, s) && astutil.EqualSyntax(pre0, pre) {
after := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), varName)
prefix, importEdits := refactor.AddImport(
info,
curFile.Node().(*ast.File),
obj1.Pkg().Name(),
obj1.Pkg().Path(),
cutFuncName,
call.Pos(),
)
okVarName := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok")
pass.Report(analysis.Diagnostic{
// highlight at HasPrefix call (ditto Suffix).
Pos: call.Pos(),
End: call.End(),
Message: message,
SuggestedFixes: []analysis.SuggestedFix{{
Message: fixMessage,
// if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre)) }
// ------------ ----------------- ----- --------------------------
// if after, ok := strings.CutPrefix(s, pre); ok { use(after) }
// (ditto Suffix)
TextEdits: append(importEdits, []analysis.TextEdit{
{
Pos: call.Fun.Pos(),
End: call.Fun.Pos(),
NewText: fmt.Appendf(nil, "%s, %s :=", after, okVarName),
},
{
Pos: call.Fun.Pos(),
End: call.Fun.End(),
NewText: fmt.Appendf(nil, "%s%s", prefix, cutFuncName),
},
{
Pos: call.End(),
End: call.End(),
NewText: fmt.Appendf(nil, "; %s ", okVarName),
},
{
Pos: call1.Pos(),
End: call1.End(),
NewText: []byte(after),
},
}...),
}}},
)
break
}
}
}
// pattern2
if bin, ok := ifStmt.Cond.(*ast.BinaryExpr); ok &&
bin.Op == token.NEQ &&
ifStmt.Init != nil &&
isSimpleAssign(ifStmt.Init) {
assign := ifStmt.Init.(*ast.AssignStmt)
if call, ok := assign.Rhs[0].(*ast.CallExpr); ok && assign.Tok == token.DEFINE {
lhs := assign.Lhs[0]
obj := typeutil.Callee(info, call)
if obj == nil ||
obj != stringsTrimPrefix && obj != bytesTrimPrefix && obj != stringsTrimSuffix && obj != bytesTrimSuffix {
continue
}
isPrefix1 := strings.HasSuffix(obj.Name(), "Prefix")
var cutFuncName, message, fixMessage string
if isPrefix1 {
cutFuncName = "CutPrefix"
message = "TrimPrefix can be simplified to CutPrefix"
fixMessage = "Replace TrimPrefix with CutPrefix"
} else {
cutFuncName = "CutSuffix"
message = "TrimSuffix can be simplified to CutSuffix"
fixMessage = "Replace TrimSuffix with CutSuffix"
}
if astutil.EqualSyntax(lhs, bin.X) && astutil.EqualSyntax(call.Args[0], bin.Y) ||
(astutil.EqualSyntax(lhs, bin.Y) && astutil.EqualSyntax(call.Args[0], bin.X)) {
okVarName := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok")
// Have one of:
// if rest := TrimPrefix(s, prefix); rest != s { (ditto Suffix)
// if rest := TrimPrefix(s, prefix); s != rest { (ditto Suffix)
// We use AddImport not to add an import (since it exists already)
// but to compute the correct prefix in the dot-import case.
prefix, importEdits := refactor.AddImport(
info,
curFile.Node().(*ast.File),
obj.Pkg().Name(),
obj.Pkg().Path(),
cutFuncName,
call.Pos(),
)
pass.Report(analysis.Diagnostic{
// highlight from the init and the condition end.
Pos: ifStmt.Init.Pos(),
End: ifStmt.Cond.End(),
Message: message,
SuggestedFixes: []analysis.SuggestedFix{{
Message: fixMessage,
// if x := strings.TrimPrefix(s, pre); x != s ...
// ---- ---------- ------
// if x, ok := strings.CutPrefix (s, pre); ok ...
// (ditto Suffix)
TextEdits: append(importEdits, []analysis.TextEdit{
{
Pos: assign.Lhs[0].End(),
End: assign.Lhs[0].End(),
NewText: fmt.Appendf(nil, ", %s", okVarName),
},
{
Pos: call.Fun.Pos(),
End: call.Fun.End(),
NewText: fmt.Appendf(nil, "%s%s", prefix, cutFuncName),
},
{
Pos: ifStmt.Cond.Pos(),
End: ifStmt.Cond.End(),
NewText: []byte(okVarName),
},
}...),
}},
})
}
}
}
}
}
return nil, nil
}

View file

@ -0,0 +1,145 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var StringsSeqAnalyzer = &analysis.Analyzer{
Name: "stringsseq",
Doc: analysisinternal.MustExtractDoc(doc, "stringsseq"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: stringsseq,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringsseq",
}
// stringsseq offers a fix to replace a call to strings.Split with
// SplitSeq or strings.Fields with FieldsSeq
// when it is the operand of a range loop, either directly:
//
// for _, line := range strings.Split() {...}
//
// or indirectly, if the variable's sole use is the range statement:
//
// lines := strings.Split()
// for _, line := range lines {...}
//
// Variants:
// - bytes.SplitSeq
// - bytes.FieldsSeq
func stringsseq(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
var (
inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
stringsSplit = index.Object("strings", "Split")
stringsFields = index.Object("strings", "Fields")
bytesSplit = index.Object("bytes", "Split")
bytesFields = index.Object("bytes", "Fields")
)
if !index.Used(stringsSplit, stringsFields, bytesSplit, bytesFields) {
return nil, nil
}
for curFile := range filesUsing(inspect, info, "go1.24") {
for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) {
rng := curRange.Node().(*ast.RangeStmt)
// Reject "for i, line := ..." since SplitSeq is not an iter.Seq2.
// (We require that i is blank.)
if id, ok := rng.Key.(*ast.Ident); ok && id.Name != "_" {
continue
}
// Find the call operand of the range statement,
// whether direct or indirect.
call, ok := rng.X.(*ast.CallExpr)
if !ok {
if id, ok := rng.X.(*ast.Ident); ok {
if v, ok := info.Uses[id].(*types.Var); ok {
if ek, idx := curRange.ParentEdge(); ek == edge.BlockStmt_List && idx > 0 {
curPrev, _ := curRange.PrevSibling()
if assign, ok := curPrev.Node().(*ast.AssignStmt); ok &&
assign.Tok == token.DEFINE &&
len(assign.Lhs) == 1 &&
len(assign.Rhs) == 1 &&
info.Defs[assign.Lhs[0].(*ast.Ident)] == v &&
soleUseIs(index, v, id) {
// Have:
// lines := ...
// for _, line := range lines {...}
// and no other uses of lines.
call, _ = assign.Rhs[0].(*ast.CallExpr)
}
}
}
}
}
if call != nil {
var edits []analysis.TextEdit
if rng.Key != nil {
// Delete (blank) RangeStmt.Key:
// for _, line := -> for line :=
// for _, _ := -> for
// for _ := -> for
end := rng.Range
if rng.Value != nil {
end = rng.Value.Pos()
}
edits = append(edits, analysis.TextEdit{
Pos: rng.Key.Pos(),
End: end,
})
}
sel, ok := call.Fun.(*ast.SelectorExpr)
if !ok {
continue
}
switch obj := typeutil.Callee(info, call); obj {
case stringsSplit, stringsFields, bytesSplit, bytesFields:
oldFnName := obj.Name()
seqFnName := fmt.Sprintf("%sSeq", oldFnName)
pass.Report(analysis.Diagnostic{
Pos: sel.Pos(),
End: sel.End(),
Message: fmt.Sprintf("Ranging over %s is more efficient", seqFnName),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Replace %s with %s", oldFnName, seqFnName),
TextEdits: append(edits, analysis.TextEdit{
Pos: sel.Sel.Pos(),
End: sel.Sel.End(),
NewText: []byte(seqFnName)}),
}},
})
}
}
}
}
return nil, nil
}

View file

@ -0,0 +1,253 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"strings"
"unicode"
"unicode/utf8"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/edge"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typesinternal"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var TestingContextAnalyzer = &analysis.Analyzer{
Name: "testingcontext",
Doc: analysisinternal.MustExtractDoc(doc, "testingcontext"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: testingContext,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#testingcontext",
}
// The testingContext pass replaces calls to context.WithCancel from within
// tests to a use of testing.{T,B,F}.Context(), added in Go 1.24.
//
// Specifically, the testingContext pass suggests to replace:
//
// ctx, cancel := context.WithCancel(context.Background()) // or context.TODO
// defer cancel()
//
// with:
//
// ctx := t.Context()
//
// provided:
//
// - ctx and cancel are declared by the assignment
// - the deferred call is the only use of cancel
// - the call is within a test or subtest function
// - the relevant testing.{T,B,F} is named and not shadowed at the call
func testingContext(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
contextWithCancel = index.Object("context", "WithCancel")
)
calls:
for cur := range index.Calls(contextWithCancel) {
call := cur.Node().(*ast.CallExpr)
// Have: context.WithCancel(...)
arg, ok := call.Args[0].(*ast.CallExpr)
if !ok {
continue
}
if !typesinternal.IsFunctionNamed(typeutil.Callee(info, arg), "context", "Background", "TODO") {
continue
}
// Have: context.WithCancel(context.{Background,TODO}())
parent := cur.Parent()
assign, ok := parent.Node().(*ast.AssignStmt)
if !ok || assign.Tok != token.DEFINE {
continue
}
// Have: a, b := context.WithCancel(context.{Background,TODO}())
// Check that both a and b are declared, not redeclarations.
var lhs []types.Object
for _, expr := range assign.Lhs {
id, ok := expr.(*ast.Ident)
if !ok {
continue calls
}
obj, ok := info.Defs[id]
if !ok {
continue calls
}
lhs = append(lhs, obj)
}
next, ok := parent.NextSibling()
if !ok {
continue
}
defr, ok := next.Node().(*ast.DeferStmt)
if !ok {
continue
}
deferId, ok := defr.Call.Fun.(*ast.Ident)
if !ok || !soleUseIs(index, lhs[1], deferId) {
continue // b is used elsewhere
}
// Have:
// a, b := context.WithCancel(context.{Background,TODO}())
// defer b()
// Check that we are in a test func.
var testObj types.Object // relevant testing.{T,B,F}, or nil
if curFunc, ok := enclosingFunc(cur); ok {
switch n := curFunc.Node().(type) {
case *ast.FuncLit:
if ek, idx := curFunc.ParentEdge(); ek == edge.CallExpr_Args && idx == 1 {
// Have: call(..., func(...) { ...context.WithCancel(...)... })
obj := typeutil.Callee(info, curFunc.Parent().Node().(*ast.CallExpr))
if (typesinternal.IsMethodNamed(obj, "testing", "T", "Run") ||
typesinternal.IsMethodNamed(obj, "testing", "B", "Run")) &&
len(n.Type.Params.List[0].Names) == 1 {
// Have tb.Run(..., func(..., tb *testing.[TB]) { ...context.WithCancel(...)... }
testObj = info.Defs[n.Type.Params.List[0].Names[0]]
}
}
case *ast.FuncDecl:
testObj = isTestFn(info, n)
}
}
if testObj != nil && fileUses(info, astutil.EnclosingFile(cur), "go1.24") {
// Have a test function. Check that we can resolve the relevant
// testing.{T,B,F} at the current position.
if _, obj := lhs[0].Parent().LookupParent(testObj.Name(), lhs[0].Pos()); obj == testObj {
pass.Report(analysis.Diagnostic{
Pos: call.Fun.Pos(),
End: call.Fun.End(),
Message: fmt.Sprintf("context.WithCancel can be modernized using %s.Context", testObj.Name()),
SuggestedFixes: []analysis.SuggestedFix{{
Message: fmt.Sprintf("Replace context.WithCancel with %s.Context", testObj.Name()),
TextEdits: []analysis.TextEdit{{
Pos: assign.Pos(),
End: defr.End(),
NewText: fmt.Appendf(nil, "%s := %s.Context()", lhs[0].Name(), testObj.Name()),
}},
}},
})
}
}
}
return nil, nil
}
// soleUseIs reports whether id is the sole Ident that uses obj.
// (It returns false if there were no uses of obj.)
func soleUseIs(index *typeindex.Index, obj types.Object, id *ast.Ident) bool {
empty := true
for use := range index.Uses(obj) {
empty = false
if use.Node() != id {
return false
}
}
return !empty
}
// isTestFn checks whether fn is a test function (TestX, BenchmarkX, FuzzX),
// returning the corresponding types.Object of the *testing.{T,B,F} argument.
// Returns nil if fn is a test function, but the testing.{T,B,F} argument is
// unnamed (or _).
//
// TODO(rfindley): consider handling the case of an unnamed argument, by adding
// an edit to give the argument a name.
//
// Adapted from go/analysis/passes/tests.
// TODO(rfindley): consider refactoring to share logic.
func isTestFn(info *types.Info, fn *ast.FuncDecl) types.Object {
// Want functions with 0 results and 1 parameter.
if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
fn.Type.Params == nil ||
len(fn.Type.Params.List) != 1 ||
len(fn.Type.Params.List[0].Names) != 1 {
return nil
}
prefix := testKind(fn.Name.Name)
if prefix == "" {
return nil
}
if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 {
return nil // test functions must not be generic
}
obj := info.Defs[fn.Type.Params.List[0].Names[0]]
if obj == nil {
return nil // e.g. _ *testing.T
}
var name string
switch prefix {
case "Test":
name = "T"
case "Benchmark":
name = "B"
case "Fuzz":
name = "F"
}
if !typesinternal.IsPointerToNamed(obj.Type(), "testing", name) {
return nil
}
return obj
}
// testKind returns "Test", "Benchmark", or "Fuzz" if name is a valid resp.
// test, benchmark, or fuzz function name. Otherwise, isTestName returns "".
//
// Adapted from go/analysis/passes/tests.isTestName.
func testKind(name string) string {
var prefix string
switch {
case strings.HasPrefix(name, "Test"):
prefix = "Test"
case strings.HasPrefix(name, "Benchmark"):
prefix = "Benchmark"
case strings.HasPrefix(name, "Fuzz"):
prefix = "Fuzz"
}
if prefix == "" {
return ""
}
suffix := name[len(prefix):]
if len(suffix) == 0 {
// "Test" is ok.
return prefix
}
r, _ := utf8.DecodeRuneInString(suffix)
if unicode.IsLower(r) {
return ""
}
return prefix
}

View file

@ -0,0 +1,175 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package modernize
import (
"bytes"
"fmt"
"go/ast"
"go/printer"
"slices"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/analysisinternal"
"golang.org/x/tools/internal/analysisinternal/generated"
typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/refactor"
"golang.org/x/tools/internal/typesinternal/typeindex"
)
var WaitGroupAnalyzer = &analysis.Analyzer{
Name: "waitgroup",
Doc: analysisinternal.MustExtractDoc(doc, "waitgroup"),
Requires: []*analysis.Analyzer{
generated.Analyzer,
inspect.Analyzer,
typeindexanalyzer.Analyzer,
},
Run: waitgroup,
URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#waitgroup",
}
// The waitgroup pass replaces old more complex code with
// go1.25 added API WaitGroup.Go.
//
// Patterns:
//
// 1. wg.Add(1); go func() { defer wg.Done(); ... }()
// =>
// wg.Go(go func() { ... })
//
// 2. wg.Add(1); go func() { ...; wg.Done() }()
// =>
// wg.Go(go func() { ... })
//
// The wg.Done must occur within the first statement of the block in a
// defer format or last statement of the block, and the offered fix
// only removes the first/last wg.Done call. It doesn't fix existing
// wrong usage of sync.WaitGroup.
//
// The use of WaitGroup.Go in pattern 1 implicitly introduces a
// 'defer', which may change the behavior in the case of panic from
// the "..." logic. In this instance, the change is safe: before and
// after the transformation, an unhandled panic inevitably results in
// a fatal crash. The fact that the transformed code calls wg.Done()
// before the crash doesn't materially change anything. (If Done had
// other effects, or blocked, or if WaitGroup.Go propagated panics
// from child to parent goroutine, the argument would be different.)
func waitgroup(pass *analysis.Pass) (any, error) {
skipGenerated(pass)
var (
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
info = pass.TypesInfo
syncWaitGroupAdd = index.Selection("sync", "WaitGroup", "Add")
syncWaitGroupDone = index.Selection("sync", "WaitGroup", "Done")
)
if !index.Used(syncWaitGroupDone) {
return nil, nil
}
for curAddCall := range index.Calls(syncWaitGroupAdd) {
// Extract receiver from wg.Add call.
addCall := curAddCall.Node().(*ast.CallExpr)
if !isIntLiteral(info, addCall.Args[0], 1) {
continue // not a call to wg.Add(1)
}
// Inv: the Args[0] check ensures addCall is not of
// the form sync.WaitGroup.Add(&wg, 1).
addCallRecv := ast.Unparen(addCall.Fun).(*ast.SelectorExpr).X
// Following statement must be go func() { ... } ().
curAddStmt := curAddCall.Parent()
if !is[*ast.ExprStmt](curAddStmt.Node()) {
continue // unnecessary parens?
}
curNext, ok := curAddCall.Parent().NextSibling()
if !ok {
continue // no successor
}
goStmt, ok := curNext.Node().(*ast.GoStmt)
if !ok {
continue // not a go stmt
}
lit, ok := goStmt.Call.Fun.(*ast.FuncLit)
if !ok || len(goStmt.Call.Args) != 0 {
continue // go argument is not func(){...}()
}
list := lit.Body.List
if len(list) == 0 {
continue
}
// Body must start with "defer wg.Done()" or end with "wg.Done()".
var doneStmt ast.Stmt
if deferStmt, ok := list[0].(*ast.DeferStmt); ok &&
typeutil.Callee(info, deferStmt.Call) == syncWaitGroupDone &&
astutil.EqualSyntax(ast.Unparen(deferStmt.Call.Fun).(*ast.SelectorExpr).X, addCallRecv) {
doneStmt = deferStmt // "defer wg.Done()"
} else if lastStmt, ok := list[len(list)-1].(*ast.ExprStmt); ok {
if doneCall, ok := lastStmt.X.(*ast.CallExpr); ok &&
typeutil.Callee(info, doneCall) == syncWaitGroupDone &&
astutil.EqualSyntax(ast.Unparen(doneCall.Fun).(*ast.SelectorExpr).X, addCallRecv) {
doneStmt = lastStmt // "wg.Done()"
}
}
if doneStmt == nil {
continue
}
curDoneStmt, ok := curNext.FindNode(doneStmt)
if !ok {
panic("can't find Cursor for 'done' statement")
}
file := astutil.EnclosingFile(curAddCall)
if !fileUses(info, file, "go1.25") {
continue
}
tokFile := pass.Fset.File(file.Pos())
var addCallRecvText bytes.Buffer
err := printer.Fprint(&addCallRecvText, pass.Fset, addCallRecv)
if err != nil {
continue // error getting text for the edit
}
pass.Report(analysis.Diagnostic{
Pos: addCall.Pos(),
End: goStmt.End(),
Message: "Goroutine creation can be simplified using WaitGroup.Go",
SuggestedFixes: []analysis.SuggestedFix{{
Message: "Simplify by using WaitGroup.Go",
TextEdits: slices.Concat(
// delete "wg.Add(1)"
refactor.DeleteStmt(tokFile, curAddStmt),
// delete "wg.Done()" or "defer wg.Done()"
refactor.DeleteStmt(tokFile, curDoneStmt),
[]analysis.TextEdit{
// go func()
// ------
// wg.Go(func()
{
Pos: goStmt.Pos(),
End: goStmt.Call.Pos(),
NewText: fmt.Appendf(nil, "%s.Go(", addCallRecvText.String()),
},
// ... }()
// -
// ... } )
{
Pos: goStmt.Call.Lparen,
End: goStmt.Call.Rparen,
},
},
),
}},
})
}
return nil, nil
}

View file

@ -0,0 +1,663 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package astutil
// This file defines utilities for working with source positions.
import (
"fmt"
"go/ast"
"go/token"
"sort"
)
// PathEnclosingInterval returns the node that encloses the source
// interval [start, end), and all its ancestors up to the AST root.
//
// The definition of "enclosing" used by this function considers
// additional whitespace abutting a node to be enclosed by it.
// In this example:
//
// z := x + y // add them
// <-A->
// <----B----->
//
// the ast.BinaryExpr(+) node is considered to enclose interval B
// even though its [Pos()..End()) is actually only interval A.
// This behaviour makes user interfaces more tolerant of imperfect
// input.
//
// This function treats tokens as nodes, though they are not included
// in the result. e.g. PathEnclosingInterval("+") returns the
// enclosing ast.BinaryExpr("x + y").
//
// If start==end, the 1-char interval following start is used instead.
//
// The 'exact' result is true if the interval contains only path[0]
// and perhaps some adjacent whitespace. It is false if the interval
// overlaps multiple children of path[0], or if it contains only
// interior whitespace of path[0].
// In this example:
//
// z := x + y // add them
// <--C--> <---E-->
// ^
// D
//
// intervals C, D and E are inexact. C is contained by the
// z-assignment statement, because it spans three of its children (:=,
// x, +). So too is the 1-char interval D, because it contains only
// interior whitespace of the assignment. E is considered interior
// whitespace of the BlockStmt containing the assignment.
//
// The resulting path is never empty; it always contains at least the
// 'root' *ast.File. Ideally PathEnclosingInterval would reject
// intervals that lie wholly or partially outside the range of the
// file, but unfortunately ast.File records only the token.Pos of
// the 'package' keyword, but not of the start of the file itself.
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
var visit func(node ast.Node) bool
visit = func(node ast.Node) bool {
path = append(path, node)
nodePos := node.Pos()
nodeEnd := node.End()
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
// Intersect [start, end) with interval of node.
if start < nodePos {
start = nodePos
}
if end > nodeEnd {
end = nodeEnd
}
// Find sole child that contains [start, end).
children := childrenOf(node)
l := len(children)
for i, child := range children {
// [childPos, childEnd) is unaugmented interval of child.
childPos := child.Pos()
childEnd := child.End()
// [augPos, augEnd) is whitespace-augmented interval of child.
augPos := childPos
augEnd := childEnd
if i > 0 {
augPos = children[i-1].End() // start of preceding whitespace
}
if i < l-1 {
nextChildPos := children[i+1].Pos()
// Does [start, end) lie between child and next child?
if start >= augEnd && end <= nextChildPos {
return false // inexact match
}
augEnd = nextChildPos // end of following whitespace
}
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
// i, augPos, augEnd, start, end) // debugging
// Does augmented child strictly contain [start, end)?
if augPos <= start && end <= augEnd {
if is[tokenNode](child) {
return true
}
// childrenOf elides the FuncType node beneath FuncDecl.
// Add it back here for TypeParams, Params, Results,
// all FieldLists). But we don't add it back for the "func" token
// even though it is the tree at FuncDecl.Type.Func.
if decl, ok := node.(*ast.FuncDecl); ok {
if fields, ok := child.(*ast.FieldList); ok && fields != decl.Recv {
path = append(path, decl.Type)
}
}
return visit(child)
}
// Does [start, end) overlap multiple children?
// i.e. left-augmented child contains start
// but LR-augmented child does not contain end.
if start < childEnd && end > augEnd {
break
}
}
// No single child contained [start, end),
// so node is the result. Is it exact?
// (It's tempting to put this condition before the
// child loop, but it gives the wrong result in the
// case where a node (e.g. ExprStmt) and its sole
// child have equal intervals.)
if start == nodePos && end == nodeEnd {
return true // exact match
}
return false // inexact: overlaps multiple children
}
// Ensure [start,end) is nondecreasing.
if start > end {
start, end = end, start
}
if start < root.End() && end > root.Pos() {
if start == end {
end = start + 1 // empty interval => interval of size 1
}
exact = visit(root)
// Reverse the path:
for i, l := 0, len(path); i < l/2; i++ {
path[i], path[l-1-i] = path[l-1-i], path[i]
}
} else {
// Selection lies within whitespace preceding the
// first (or following the last) declaration in the file.
// The result nonetheless always includes the ast.File.
path = append(path, root)
}
return
}
// tokenNode is a dummy implementation of ast.Node for a single token.
// They are used transiently by PathEnclosingInterval but never escape
// this package.
type tokenNode struct {
pos token.Pos
end token.Pos
}
func (n tokenNode) Pos() token.Pos {
return n.pos
}
func (n tokenNode) End() token.Pos {
return n.end
}
func tok(pos token.Pos, len int) ast.Node {
return tokenNode{pos, pos + token.Pos(len)}
}
// childrenOf returns the direct non-nil children of ast.Node n.
// It may include fake ast.Node implementations for bare tokens.
// it is not safe to call (e.g.) ast.Walk on such nodes.
func childrenOf(n ast.Node) []ast.Node {
var children []ast.Node
// First add nodes for all true subtrees.
ast.Inspect(n, func(node ast.Node) bool {
if node == n { // push n
return true // recur
}
if node != nil { // push child
children = append(children, node)
}
return false // no recursion
})
// TODO(adonovan): be more careful about missing (!Pos.Valid)
// tokens in trees produced from invalid input.
// Then add fake Nodes for bare tokens.
switch n := n.(type) {
case *ast.ArrayType:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Elt.End(), len("]")))
case *ast.AssignStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.BasicLit:
children = append(children,
tok(n.ValuePos, len(n.Value)))
case *ast.BinaryExpr:
children = append(children, tok(n.OpPos, len(n.Op.String())))
case *ast.BlockStmt:
if n.Lbrace.IsValid() {
children = append(children, tok(n.Lbrace, len("{")))
}
if n.Rbrace.IsValid() {
children = append(children, tok(n.Rbrace, len("}")))
}
case *ast.BranchStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.CallExpr:
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
if n.Ellipsis != 0 {
children = append(children, tok(n.Ellipsis, len("...")))
}
case *ast.CaseClause:
if n.List == nil {
children = append(children,
tok(n.Case, len("default")))
} else {
children = append(children,
tok(n.Case, len("case")))
}
children = append(children, tok(n.Colon, len(":")))
case *ast.ChanType:
switch n.Dir {
case ast.RECV:
children = append(children, tok(n.Begin, len("<-chan")))
case ast.SEND:
children = append(children, tok(n.Begin, len("chan<-")))
case ast.RECV | ast.SEND:
children = append(children, tok(n.Begin, len("chan")))
}
case *ast.CommClause:
if n.Comm == nil {
children = append(children,
tok(n.Case, len("default")))
} else {
children = append(children,
tok(n.Case, len("case")))
}
children = append(children, tok(n.Colon, len(":")))
case *ast.Comment:
// nop
case *ast.CommentGroup:
// nop
case *ast.CompositeLit:
children = append(children,
tok(n.Lbrace, len("{")),
tok(n.Rbrace, len("{")))
case *ast.DeclStmt:
// nop
case *ast.DeferStmt:
children = append(children,
tok(n.Defer, len("defer")))
case *ast.Ellipsis:
children = append(children,
tok(n.Ellipsis, len("...")))
case *ast.EmptyStmt:
// nop
case *ast.ExprStmt:
// nop
case *ast.Field:
// TODO(adonovan): Field.{Doc,Comment,Tag}?
case *ast.FieldList:
if n.Opening.IsValid() {
children = append(children, tok(n.Opening, len("(")))
}
if n.Closing.IsValid() {
children = append(children, tok(n.Closing, len(")")))
}
case *ast.File:
// TODO test: Doc
children = append(children,
tok(n.Package, len("package")))
case *ast.ForStmt:
children = append(children,
tok(n.For, len("for")))
case *ast.FuncDecl:
// TODO(adonovan): FuncDecl.Comment?
// Uniquely, FuncDecl breaks the invariant that
// preorder traversal yields tokens in lexical order:
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
//
// As a workaround, we inline the case for FuncType
// here and order things correctly.
// We also need to insert the elided FuncType just
// before the 'visit' recursion.
//
children = nil // discard ast.Walk(FuncDecl) info subtrees
children = append(children, tok(n.Type.Func, len("func")))
if n.Recv != nil {
children = append(children, n.Recv)
}
children = append(children, n.Name)
if tparams := n.Type.TypeParams; tparams != nil {
children = append(children, tparams)
}
if n.Type.Params != nil {
children = append(children, n.Type.Params)
}
if n.Type.Results != nil {
children = append(children, n.Type.Results)
}
if n.Body != nil {
children = append(children, n.Body)
}
case *ast.FuncLit:
// nop
case *ast.FuncType:
if n.Func != 0 {
children = append(children,
tok(n.Func, len("func")))
}
case *ast.GenDecl:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
if n.Lparen != 0 {
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
}
case *ast.GoStmt:
children = append(children,
tok(n.Go, len("go")))
case *ast.Ident:
children = append(children,
tok(n.NamePos, len(n.Name)))
case *ast.IfStmt:
children = append(children,
tok(n.If, len("if")))
case *ast.ImportSpec:
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
case *ast.IncDecStmt:
children = append(children,
tok(n.TokPos, len(n.Tok.String())))
case *ast.IndexExpr:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Rbrack, len("]")))
case *ast.IndexListExpr:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Rbrack, len("]")))
case *ast.InterfaceType:
children = append(children,
tok(n.Interface, len("interface")))
case *ast.KeyValueExpr:
children = append(children,
tok(n.Colon, len(":")))
case *ast.LabeledStmt:
children = append(children,
tok(n.Colon, len(":")))
case *ast.MapType:
children = append(children,
tok(n.Map, len("map")))
case *ast.ParenExpr:
children = append(children,
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
case *ast.RangeStmt:
children = append(children,
tok(n.For, len("for")),
tok(n.TokPos, len(n.Tok.String())))
case *ast.ReturnStmt:
children = append(children,
tok(n.Return, len("return")))
case *ast.SelectStmt:
children = append(children,
tok(n.Select, len("select")))
case *ast.SelectorExpr:
// nop
case *ast.SendStmt:
children = append(children,
tok(n.Arrow, len("<-")))
case *ast.SliceExpr:
children = append(children,
tok(n.Lbrack, len("[")),
tok(n.Rbrack, len("]")))
case *ast.StarExpr:
children = append(children, tok(n.Star, len("*")))
case *ast.StructType:
children = append(children, tok(n.Struct, len("struct")))
case *ast.SwitchStmt:
children = append(children, tok(n.Switch, len("switch")))
case *ast.TypeAssertExpr:
children = append(children,
tok(n.Lparen-1, len(".")),
tok(n.Lparen, len("(")),
tok(n.Rparen, len(")")))
case *ast.TypeSpec:
// TODO(adonovan): TypeSpec.{Doc,Comment}?
case *ast.TypeSwitchStmt:
children = append(children, tok(n.Switch, len("switch")))
case *ast.UnaryExpr:
children = append(children, tok(n.OpPos, len(n.Op.String())))
case *ast.ValueSpec:
// TODO(adonovan): ValueSpec.{Doc,Comment}?
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
// nop
}
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
// the switch above so we can make interleaved callbacks for
// both Nodes and Tokens in the right order and avoid the need
// to sort.
sort.Sort(byPos(children))
return children
}
type byPos []ast.Node
func (sl byPos) Len() int {
return len(sl)
}
func (sl byPos) Less(i, j int) bool {
return sl[i].Pos() < sl[j].Pos()
}
func (sl byPos) Swap(i, j int) {
sl[i], sl[j] = sl[j], sl[i]
}
// NodeDescription returns a description of the concrete type of n suitable
// for a user interface.
//
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
// StarExpr) we could be much more specific given the path to the AST
// root. Perhaps we should do that.
func NodeDescription(n ast.Node) string {
switch n := n.(type) {
case *ast.ArrayType:
return "array type"
case *ast.AssignStmt:
return "assignment"
case *ast.BadDecl:
return "bad declaration"
case *ast.BadExpr:
return "bad expression"
case *ast.BadStmt:
return "bad statement"
case *ast.BasicLit:
return "basic literal"
case *ast.BinaryExpr:
return fmt.Sprintf("binary %s operation", n.Op)
case *ast.BlockStmt:
return "block"
case *ast.BranchStmt:
switch n.Tok {
case token.BREAK:
return "break statement"
case token.CONTINUE:
return "continue statement"
case token.GOTO:
return "goto statement"
case token.FALLTHROUGH:
return "fall-through statement"
}
case *ast.CallExpr:
if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
return "function call (or conversion)"
}
return "function call"
case *ast.CaseClause:
return "case clause"
case *ast.ChanType:
return "channel type"
case *ast.CommClause:
return "communication clause"
case *ast.Comment:
return "comment"
case *ast.CommentGroup:
return "comment group"
case *ast.CompositeLit:
return "composite literal"
case *ast.DeclStmt:
return NodeDescription(n.Decl) + " statement"
case *ast.DeferStmt:
return "defer statement"
case *ast.Ellipsis:
return "ellipsis"
case *ast.EmptyStmt:
return "empty statement"
case *ast.ExprStmt:
return "expression statement"
case *ast.Field:
// Can be any of these:
// struct {x, y int} -- struct field(s)
// struct {T} -- anon struct field
// interface {I} -- interface embedding
// interface {f()} -- interface method
// func (A) func(B) C -- receiver, param(s), result(s)
return "field/method/parameter"
case *ast.FieldList:
return "field/method/parameter list"
case *ast.File:
return "source file"
case *ast.ForStmt:
return "for loop"
case *ast.FuncDecl:
return "function declaration"
case *ast.FuncLit:
return "function literal"
case *ast.FuncType:
return "function type"
case *ast.GenDecl:
switch n.Tok {
case token.IMPORT:
return "import declaration"
case token.CONST:
return "constant declaration"
case token.TYPE:
return "type declaration"
case token.VAR:
return "variable declaration"
}
case *ast.GoStmt:
return "go statement"
case *ast.Ident:
return "identifier"
case *ast.IfStmt:
return "if statement"
case *ast.ImportSpec:
return "import specification"
case *ast.IncDecStmt:
if n.Tok == token.INC {
return "increment statement"
}
return "decrement statement"
case *ast.IndexExpr:
return "index expression"
case *ast.IndexListExpr:
return "index list expression"
case *ast.InterfaceType:
return "interface type"
case *ast.KeyValueExpr:
return "key/value association"
case *ast.LabeledStmt:
return "statement label"
case *ast.MapType:
return "map type"
case *ast.Package:
return "package"
case *ast.ParenExpr:
return "parenthesized " + NodeDescription(n.X)
case *ast.RangeStmt:
return "range loop"
case *ast.ReturnStmt:
return "return statement"
case *ast.SelectStmt:
return "select statement"
case *ast.SelectorExpr:
return "selector"
case *ast.SendStmt:
return "channel send"
case *ast.SliceExpr:
return "slice expression"
case *ast.StarExpr:
return "*-operation" // load/store expr or pointer type
case *ast.StructType:
return "struct type"
case *ast.SwitchStmt:
return "switch statement"
case *ast.TypeAssertExpr:
return "type assertion"
case *ast.TypeSpec:
return "type specification"
case *ast.TypeSwitchStmt:
return "type switch"
case *ast.UnaryExpr:
return fmt.Sprintf("unary %s operation", n.Op)
case *ast.ValueSpec:
return "value specification"
}
panic(fmt.Sprintf("unexpected node type: %T", n))
}
func is[T any](x any) bool {
_, ok := x.(T)
return ok
}

View file

@ -0,0 +1,472 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package astutil contains common utilities for working with the Go AST.
package astutil // import "golang.org/x/tools/go/ast/astutil"
import (
"fmt"
"go/ast"
"go/token"
"slices"
"strconv"
"strings"
)
// AddImport adds the import path to the file f, if absent.
func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
return AddNamedImport(fset, f, "", path)
}
// AddNamedImport adds the import with the given name and path to the file f, if absent.
// If name is not empty, it is used to rename the import.
//
// For example, calling
//
// AddNamedImport(fset, f, "pathpkg", "path")
//
// adds
//
// import pathpkg "path"
func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
if imports(f, name, path) {
return false
}
newImport := &ast.ImportSpec{
Path: &ast.BasicLit{
Kind: token.STRING,
Value: strconv.Quote(path),
},
}
if name != "" {
newImport.Name = &ast.Ident{Name: name}
}
// Find an import decl to add to.
// The goal is to find an existing import
// whose import path has the longest shared
// prefix with path.
var (
bestMatch = -1 // length of longest shared prefix
lastImport = -1 // index in f.Decls of the file's final import decl
impDecl *ast.GenDecl // import decl containing the best match
impIndex = -1 // spec index in impDecl containing the best match
isThirdPartyPath = isThirdParty(path)
)
for i, decl := range f.Decls {
gen, ok := decl.(*ast.GenDecl)
if ok && gen.Tok == token.IMPORT {
lastImport = i
// Do not add to import "C", to avoid disrupting the
// association with its doc comment, breaking cgo.
if declImports(gen, "C") {
continue
}
// Match an empty import decl if that's all that is available.
if len(gen.Specs) == 0 && bestMatch == -1 {
impDecl = gen
}
// Compute longest shared prefix with imports in this group and find best
// matched import spec.
// 1. Always prefer import spec with longest shared prefix.
// 2. While match length is 0,
// - for stdlib package: prefer first import spec.
// - for third party package: prefer first third party import spec.
// We cannot use last import spec as best match for third party package
// because grouped imports are usually placed last by goimports -local
// flag.
// See issue #19190.
seenAnyThirdParty := false
for j, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
p := importPath(impspec)
n := matchLen(p, path)
if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
bestMatch = n
impDecl = gen
impIndex = j
}
seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
}
}
}
// If no import decl found, add one after the last import.
if impDecl == nil {
impDecl = &ast.GenDecl{
Tok: token.IMPORT,
}
if lastImport >= 0 {
impDecl.TokPos = f.Decls[lastImport].End()
} else {
// There are no existing imports.
// Our new import, preceded by a blank line, goes after the package declaration
// and after the comment, if any, that starts on the same line as the
// package declaration.
impDecl.TokPos = f.Package
file := fset.File(f.Package)
pkgLine := file.Line(f.Package)
for _, c := range f.Comments {
if file.Line(c.Pos()) > pkgLine {
break
}
// +2 for a blank line
impDecl.TokPos = c.End() + 2
}
}
f.Decls = append(f.Decls, nil)
copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
f.Decls[lastImport+1] = impDecl
}
// Insert new import at insertAt.
insertAt := 0
if impIndex >= 0 {
// insert after the found import
insertAt = impIndex + 1
}
impDecl.Specs = append(impDecl.Specs, nil)
copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
impDecl.Specs[insertAt] = newImport
pos := impDecl.Pos()
if insertAt > 0 {
// If there is a comment after an existing import, preserve the comment
// position by adding the new import after the comment.
if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
pos = spec.Comment.End()
} else {
// Assign same position as the previous import,
// so that the sorter sees it as being in the same block.
pos = impDecl.Specs[insertAt-1].Pos()
}
}
if newImport.Name != nil {
newImport.Name.NamePos = pos
}
newImport.Path.ValuePos = pos
newImport.EndPos = pos
// Clean up parens. impDecl contains at least one spec.
if len(impDecl.Specs) == 1 {
// Remove unneeded parens.
impDecl.Lparen = token.NoPos
} else if !impDecl.Lparen.IsValid() {
// impDecl needs parens added.
impDecl.Lparen = impDecl.Specs[0].Pos()
}
f.Imports = append(f.Imports, newImport)
if len(f.Decls) <= 1 {
return true
}
// Merge all the import declarations into the first one.
var first *ast.GenDecl
for i := 0; i < len(f.Decls); i++ {
decl := f.Decls[i]
gen, ok := decl.(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
continue
}
if first == nil {
first = gen
continue // Don't touch the first one.
}
// We now know there is more than one package in this import
// declaration. Ensure that it ends up parenthesized.
first.Lparen = first.Pos()
// Move the imports of the other import declaration to the first one.
for _, spec := range gen.Specs {
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
first.Specs = append(first.Specs, spec)
}
f.Decls = slices.Delete(f.Decls, i, i+1)
i--
}
return true
}
func isThirdParty(importPath string) bool {
// Third party package import path usually contains "." (".com", ".org", ...)
// This logic is taken from golang.org/x/tools/imports package.
return strings.Contains(importPath, ".")
}
// DeleteImport deletes the import path from the file f, if present.
// If there are duplicate import declarations, all matching ones are deleted.
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
return DeleteNamedImport(fset, f, "", path)
}
// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
// If there are duplicate import declarations, all matching ones are deleted.
func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
var (
delspecs = make(map[*ast.ImportSpec]bool)
delcomments = make(map[*ast.CommentGroup]bool)
)
// Find the import nodes that import path, if any.
for i := 0; i < len(f.Decls); i++ {
gen, ok := f.Decls[i].(*ast.GenDecl)
if !ok || gen.Tok != token.IMPORT {
continue
}
for j := 0; j < len(gen.Specs); j++ {
impspec := gen.Specs[j].(*ast.ImportSpec)
if importName(impspec) != name || importPath(impspec) != path {
continue
}
// We found an import spec that imports path.
// Delete it.
delspecs[impspec] = true
deleted = true
gen.Specs = slices.Delete(gen.Specs, j, j+1)
// If this was the last import spec in this decl,
// delete the decl, too.
if len(gen.Specs) == 0 {
f.Decls = slices.Delete(f.Decls, i, i+1)
i--
break
} else if len(gen.Specs) == 1 {
if impspec.Doc != nil {
delcomments[impspec.Doc] = true
}
if impspec.Comment != nil {
delcomments[impspec.Comment] = true
}
for _, cg := range f.Comments {
// Found comment on the same line as the import spec.
if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
delcomments[cg] = true
break
}
}
spec := gen.Specs[0].(*ast.ImportSpec)
// Move the documentation right after the import decl.
if spec.Doc != nil {
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
}
}
for _, cg := range f.Comments {
if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
}
break
}
}
}
if j > 0 {
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
line := fset.PositionFor(impspec.Path.ValuePos, false).Line
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.
if line-lastLine > 1 || !gen.Rparen.IsValid() {
// There was a blank line immediately preceding the deleted import,
// so there's no need to close the hole. The right parenthesis is
// invalid after AddImport to an import statement without parenthesis.
// Do nothing.
} else if line != fset.File(gen.Rparen).LineCount() {
// There was no blank line. Close the hole.
fset.File(gen.Rparen).MergeLine(line)
}
}
j--
}
}
// Delete imports from f.Imports.
before := len(f.Imports)
f.Imports = slices.DeleteFunc(f.Imports, func(imp *ast.ImportSpec) bool {
_, ok := delspecs[imp]
return ok
})
if len(f.Imports)+len(delspecs) != before {
// This can happen when the AST is invalid (i.e. imports differ between f.Decls and f.Imports).
panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
}
// Delete comments from f.Comments.
f.Comments = slices.DeleteFunc(f.Comments, func(cg *ast.CommentGroup) bool {
_, ok := delcomments[cg]
return ok
})
return
}
// RewriteImport rewrites any import of path oldPath to path newPath.
func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
for _, imp := range f.Imports {
if importPath(imp) == oldPath {
rewrote = true
// record old End, because the default is to compute
// it using the length of imp.Path.Value.
imp.EndPos = imp.End()
imp.Path.Value = strconv.Quote(newPath)
}
}
return
}
// UsesImport reports whether a given import is used.
// The provided File must have been parsed with syntactic object resolution
// (not using go/parser.SkipObjectResolution).
func UsesImport(f *ast.File, path string) (used bool) {
if f.Scope == nil {
panic("file f was not parsed with syntactic object resolution")
}
spec := importSpec(f, path)
if spec == nil {
return
}
name := spec.Name.String()
switch name {
case "<nil>":
// If the package name is not explicitly specified,
// make an educated guess. This is not guaranteed to be correct.
lastSlash := strings.LastIndex(path, "/")
if lastSlash == -1 {
name = path
} else {
name = path[lastSlash+1:]
}
case "_", ".":
// Not sure if this import is used - err on the side of caution.
return true
}
ast.Walk(visitFn(func(n ast.Node) {
sel, ok := n.(*ast.SelectorExpr)
if ok && isTopName(sel.X, name) {
used = true
}
}), f)
return
}
type visitFn func(node ast.Node)
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
fn(node)
return fn
}
// imports reports whether f has an import with the specified name and path.
func imports(f *ast.File, name, path string) bool {
for _, s := range f.Imports {
if importName(s) == name && importPath(s) == path {
return true
}
}
return false
}
// importSpec returns the import spec if f imports path,
// or nil otherwise.
func importSpec(f *ast.File, path string) *ast.ImportSpec {
for _, s := range f.Imports {
if importPath(s) == path {
return s
}
}
return nil
}
// importName returns the name of s,
// or "" if the import is not named.
func importName(s *ast.ImportSpec) string {
if s.Name == nil {
return ""
}
return s.Name.Name
}
// importPath returns the unquoted import path of s,
// or "" if the path is not properly quoted.
func importPath(s *ast.ImportSpec) string {
t, err := strconv.Unquote(s.Path.Value)
if err != nil {
return ""
}
return t
}
// declImports reports whether gen contains an import of path.
func declImports(gen *ast.GenDecl, path string) bool {
if gen.Tok != token.IMPORT {
return false
}
for _, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
if importPath(impspec) == path {
return true
}
}
return false
}
// matchLen returns the length of the longest path segment prefix shared by x and y.
func matchLen(x, y string) int {
n := 0
for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
if x[i] == '/' {
n++
}
}
return n
}
// isTopName returns true if n is a top-level unresolved identifier with the given name.
func isTopName(n ast.Expr, name string) bool {
id, ok := n.(*ast.Ident)
return ok && id.Name == name && id.Obj == nil
}
// Imports returns the file imports grouped by paragraph.
func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
var groups [][]*ast.ImportSpec
for _, decl := range f.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok || genDecl.Tok != token.IMPORT {
break
}
group := []*ast.ImportSpec{}
var lastLine int
for _, spec := range genDecl.Specs {
importSpec := spec.(*ast.ImportSpec)
pos := importSpec.Path.ValuePos
line := fset.Position(pos).Line
if lastLine > 0 && pos > 0 && line-lastLine > 1 {
groups = append(groups, group)
group = []*ast.ImportSpec{}
}
group = append(group, importSpec)
lastLine = line
}
groups = append(groups, group)
}
return groups
}

View file

@ -0,0 +1,490 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package astutil
import (
"fmt"
"go/ast"
"reflect"
"sort"
)
// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
// before and/or after the node's children, using a Cursor describing
// the current node and providing operations on it.
//
// The return value of ApplyFunc controls the syntax tree traversal.
// See Apply for details.
type ApplyFunc func(*Cursor) bool
// Apply traverses a syntax tree recursively, starting with root,
// and calling pre and post for each node as described below.
// Apply returns the syntax tree, possibly modified.
//
// If pre is not nil, it is called for each node before the node's
// children are traversed (pre-order). If pre returns false, no
// children are traversed, and post is not called for that node.
//
// If post is not nil, and a prior call of pre didn't return false,
// post is called for each node after its children are traversed
// (post-order). If post returns false, traversal is terminated and
// Apply returns immediately.
//
// Only fields that refer to AST nodes are considered children;
// i.e., token.Pos, Scopes, Objects, and fields of basic types
// (strings, etc.) are ignored.
//
// Children are traversed in the order in which they appear in the
// respective node's struct definition. A package's files are
// traversed in the filenames' alphabetical order.
func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
parent := &struct{ ast.Node }{root}
defer func() {
if r := recover(); r != nil && r != abort {
panic(r)
}
result = parent.Node
}()
a := &application{pre: pre, post: post}
a.apply(parent, "Node", nil, root)
return
}
var abort = new(int) // singleton, to signal termination of Apply
// A Cursor describes a node encountered during Apply.
// Information about the node and its parent is available
// from the Node, Parent, Name, and Index methods.
//
// If p is a variable of type and value of the current parent node
// c.Parent(), and f is the field identifier with name c.Name(),
// the following invariants hold:
//
// p.f == c.Node() if c.Index() < 0
// p.f[c.Index()] == c.Node() if c.Index() >= 0
//
// The methods Replace, Delete, InsertBefore, and InsertAfter
// can be used to change the AST without disrupting Apply.
//
// This type is not to be confused with [inspector.Cursor] from
// package [golang.org/x/tools/go/ast/inspector], which provides
// stateless navigation of immutable syntax trees.
type Cursor struct {
parent ast.Node
name string
iter *iterator // valid if non-nil
node ast.Node
}
// Node returns the current Node.
func (c *Cursor) Node() ast.Node { return c.node }
// Parent returns the parent of the current Node.
func (c *Cursor) Parent() ast.Node { return c.parent }
// Name returns the name of the parent Node field that contains the current Node.
// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
// the filename for the current Node.
func (c *Cursor) Name() string { return c.name }
// Index reports the index >= 0 of the current Node in the slice of Nodes that
// contains it, or a value < 0 if the current Node is not part of a slice.
// The index of the current node changes if InsertBefore is called while
// processing the current node.
func (c *Cursor) Index() int {
if c.iter != nil {
return c.iter.index
}
return -1
}
// field returns the current node's parent field value.
func (c *Cursor) field() reflect.Value {
return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
}
// Replace replaces the current Node with n.
// The replacement node is not walked by Apply.
func (c *Cursor) Replace(n ast.Node) {
if _, ok := c.node.(*ast.File); ok {
file, ok := n.(*ast.File)
if !ok {
panic("attempt to replace *ast.File with non-*ast.File")
}
c.parent.(*ast.Package).Files[c.name] = file
return
}
v := c.field()
if i := c.Index(); i >= 0 {
v = v.Index(i)
}
v.Set(reflect.ValueOf(n))
}
// Delete deletes the current Node from its containing slice.
// If the current Node is not part of a slice, Delete panics.
// As a special case, if the current node is a package file,
// Delete removes it from the package's Files map.
func (c *Cursor) Delete() {
if _, ok := c.node.(*ast.File); ok {
delete(c.parent.(*ast.Package).Files, c.name)
return
}
i := c.Index()
if i < 0 {
panic("Delete node not contained in slice")
}
v := c.field()
l := v.Len()
reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
v.SetLen(l - 1)
c.iter.step--
}
// InsertAfter inserts n after the current Node in its containing slice.
// If the current Node is not part of a slice, InsertAfter panics.
// Apply does not walk n.
func (c *Cursor) InsertAfter(n ast.Node) {
i := c.Index()
if i < 0 {
panic("InsertAfter node not contained in slice")
}
v := c.field()
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
l := v.Len()
reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
v.Index(i + 1).Set(reflect.ValueOf(n))
c.iter.step++
}
// InsertBefore inserts n before the current Node in its containing slice.
// If the current Node is not part of a slice, InsertBefore panics.
// Apply will not walk n.
func (c *Cursor) InsertBefore(n ast.Node) {
i := c.Index()
if i < 0 {
panic("InsertBefore node not contained in slice")
}
v := c.field()
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
l := v.Len()
reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
v.Index(i).Set(reflect.ValueOf(n))
c.iter.index++
}
// application carries all the shared data so we can pass it around cheaply.
type application struct {
pre, post ApplyFunc
cursor Cursor
iter iterator
}
func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
// convert typed nil into untyped nil
if v := reflect.ValueOf(n); v.Kind() == reflect.Pointer && v.IsNil() {
n = nil
}
// avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
saved := a.cursor
a.cursor.parent = parent
a.cursor.name = name
a.cursor.iter = iter
a.cursor.node = n
if a.pre != nil && !a.pre(&a.cursor) {
a.cursor = saved
return
}
// walk children
// (the order of the cases matches the order of the corresponding node types in go/ast)
switch n := n.(type) {
case nil:
// nothing to do
// Comments and fields
case *ast.Comment:
// nothing to do
case *ast.CommentGroup:
if n != nil {
a.applyList(n, "List")
}
case *ast.Field:
a.apply(n, "Doc", nil, n.Doc)
a.applyList(n, "Names")
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Tag", nil, n.Tag)
a.apply(n, "Comment", nil, n.Comment)
case *ast.FieldList:
a.applyList(n, "List")
// Expressions
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
// nothing to do
case *ast.Ellipsis:
a.apply(n, "Elt", nil, n.Elt)
case *ast.FuncLit:
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Body", nil, n.Body)
case *ast.CompositeLit:
a.apply(n, "Type", nil, n.Type)
a.applyList(n, "Elts")
case *ast.ParenExpr:
a.apply(n, "X", nil, n.X)
case *ast.SelectorExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Sel", nil, n.Sel)
case *ast.IndexExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Index", nil, n.Index)
case *ast.IndexListExpr:
a.apply(n, "X", nil, n.X)
a.applyList(n, "Indices")
case *ast.SliceExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Low", nil, n.Low)
a.apply(n, "High", nil, n.High)
a.apply(n, "Max", nil, n.Max)
case *ast.TypeAssertExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Type", nil, n.Type)
case *ast.CallExpr:
a.apply(n, "Fun", nil, n.Fun)
a.applyList(n, "Args")
case *ast.StarExpr:
a.apply(n, "X", nil, n.X)
case *ast.UnaryExpr:
a.apply(n, "X", nil, n.X)
case *ast.BinaryExpr:
a.apply(n, "X", nil, n.X)
a.apply(n, "Y", nil, n.Y)
case *ast.KeyValueExpr:
a.apply(n, "Key", nil, n.Key)
a.apply(n, "Value", nil, n.Value)
// Types
case *ast.ArrayType:
a.apply(n, "Len", nil, n.Len)
a.apply(n, "Elt", nil, n.Elt)
case *ast.StructType:
a.apply(n, "Fields", nil, n.Fields)
case *ast.FuncType:
if tparams := n.TypeParams; tparams != nil {
a.apply(n, "TypeParams", nil, tparams)
}
a.apply(n, "Params", nil, n.Params)
a.apply(n, "Results", nil, n.Results)
case *ast.InterfaceType:
a.apply(n, "Methods", nil, n.Methods)
case *ast.MapType:
a.apply(n, "Key", nil, n.Key)
a.apply(n, "Value", nil, n.Value)
case *ast.ChanType:
a.apply(n, "Value", nil, n.Value)
// Statements
case *ast.BadStmt:
// nothing to do
case *ast.DeclStmt:
a.apply(n, "Decl", nil, n.Decl)
case *ast.EmptyStmt:
// nothing to do
case *ast.LabeledStmt:
a.apply(n, "Label", nil, n.Label)
a.apply(n, "Stmt", nil, n.Stmt)
case *ast.ExprStmt:
a.apply(n, "X", nil, n.X)
case *ast.SendStmt:
a.apply(n, "Chan", nil, n.Chan)
a.apply(n, "Value", nil, n.Value)
case *ast.IncDecStmt:
a.apply(n, "X", nil, n.X)
case *ast.AssignStmt:
a.applyList(n, "Lhs")
a.applyList(n, "Rhs")
case *ast.GoStmt:
a.apply(n, "Call", nil, n.Call)
case *ast.DeferStmt:
a.apply(n, "Call", nil, n.Call)
case *ast.ReturnStmt:
a.applyList(n, "Results")
case *ast.BranchStmt:
a.apply(n, "Label", nil, n.Label)
case *ast.BlockStmt:
a.applyList(n, "List")
case *ast.IfStmt:
a.apply(n, "Init", nil, n.Init)
a.apply(n, "Cond", nil, n.Cond)
a.apply(n, "Body", nil, n.Body)
a.apply(n, "Else", nil, n.Else)
case *ast.CaseClause:
a.applyList(n, "List")
a.applyList(n, "Body")
case *ast.SwitchStmt:
a.apply(n, "Init", nil, n.Init)
a.apply(n, "Tag", nil, n.Tag)
a.apply(n, "Body", nil, n.Body)
case *ast.TypeSwitchStmt:
a.apply(n, "Init", nil, n.Init)
a.apply(n, "Assign", nil, n.Assign)
a.apply(n, "Body", nil, n.Body)
case *ast.CommClause:
a.apply(n, "Comm", nil, n.Comm)
a.applyList(n, "Body")
case *ast.SelectStmt:
a.apply(n, "Body", nil, n.Body)
case *ast.ForStmt:
a.apply(n, "Init", nil, n.Init)
a.apply(n, "Cond", nil, n.Cond)
a.apply(n, "Post", nil, n.Post)
a.apply(n, "Body", nil, n.Body)
case *ast.RangeStmt:
a.apply(n, "Key", nil, n.Key)
a.apply(n, "Value", nil, n.Value)
a.apply(n, "X", nil, n.X)
a.apply(n, "Body", nil, n.Body)
// Declarations
case *ast.ImportSpec:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Name", nil, n.Name)
a.apply(n, "Path", nil, n.Path)
a.apply(n, "Comment", nil, n.Comment)
case *ast.ValueSpec:
a.apply(n, "Doc", nil, n.Doc)
a.applyList(n, "Names")
a.apply(n, "Type", nil, n.Type)
a.applyList(n, "Values")
a.apply(n, "Comment", nil, n.Comment)
case *ast.TypeSpec:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Name", nil, n.Name)
if tparams := n.TypeParams; tparams != nil {
a.apply(n, "TypeParams", nil, tparams)
}
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Comment", nil, n.Comment)
case *ast.BadDecl:
// nothing to do
case *ast.GenDecl:
a.apply(n, "Doc", nil, n.Doc)
a.applyList(n, "Specs")
case *ast.FuncDecl:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Recv", nil, n.Recv)
a.apply(n, "Name", nil, n.Name)
a.apply(n, "Type", nil, n.Type)
a.apply(n, "Body", nil, n.Body)
// Files and packages
case *ast.File:
a.apply(n, "Doc", nil, n.Doc)
a.apply(n, "Name", nil, n.Name)
a.applyList(n, "Decls")
// Don't walk n.Comments; they have either been walked already if
// they are Doc comments, or they can be easily walked explicitly.
case *ast.Package:
// collect and sort names for reproducible behavior
var names []string
for name := range n.Files {
names = append(names, name)
}
sort.Strings(names)
for _, name := range names {
a.apply(n, name, nil, n.Files[name])
}
default:
panic(fmt.Sprintf("Apply: unexpected node type %T", n))
}
if a.post != nil && !a.post(&a.cursor) {
panic(abort)
}
a.cursor = saved
}
// An iterator controls iteration over a slice of nodes.
type iterator struct {
index, step int
}
func (a *application) applyList(parent ast.Node, name string) {
// avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
saved := a.iter
a.iter.index = 0
for {
// must reload parent.name each time, since cursor modifications might change it
v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
if a.iter.index >= v.Len() {
break
}
// element x may be nil in a bad AST - be cautious
var x ast.Node
if e := v.Index(a.iter.index); e.IsValid() {
x = e.Interface().(ast.Node)
}
a.iter.step = 1
a.apply(parent, name, &a.iter, x)
a.iter.index += a.iter.step
}
a.iter = saved
}

View file

@ -0,0 +1,13 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package astutil
import "go/ast"
// Unparen returns e with any enclosing parentheses stripped.
// Deprecated: use [ast.Unparen].
//
//go:fix inline
func Unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) }

View file

@ -0,0 +1,41 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package generated defines an analyzer whose result makes it
// convenient to skip diagnostics within generated files.
package generated
import (
"go/ast"
"go/token"
"reflect"
"golang.org/x/tools/go/analysis"
)
var Analyzer = &analysis.Analyzer{
Name: "generated",
Doc: "detect which Go files are generated",
URL: "https://pkg.go.dev/golang.org/x/tools/internal/analysisinternal/generated",
ResultType: reflect.TypeFor[*Result](),
Run: func(pass *analysis.Pass) (any, error) {
set := make(map[*token.File]bool)
for _, file := range pass.Files {
if ast.IsGenerated(file) {
set[pass.Fset.File(file.FileStart)] = true
}
}
return &Result{fset: pass.Fset, generatedFiles: set}, nil
},
}
type Result struct {
fset *token.FileSet
generatedFiles map[*token.File]bool
}
// IsGenerated reports whether the position is within a generated file.
func (r *Result) IsGenerated(pos token.Pos) bool {
return r.generatedFiles[r.fset.File(pos)]
}

View file

@ -0,0 +1,15 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package goplsexport provides various backdoors to not-yet-published
// parts of x/tools that are needed by gopls.
package goplsexport
import "golang.org/x/tools/go/analysis"
var (
ErrorsAsTypeModernizer *analysis.Analyzer // = modernize.errorsastypeAnalyzer
StdIteratorsModernizer *analysis.Analyzer // = modernize.stditeratorsAnalyzer
PlusBuildModernizer *analysis.Analyzer // = modernize.plusbuildAnalyzer
)

View file

@ -0,0 +1,867 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package inline
// This file defines the analysis of the callee function.
import (
"bytes"
"encoding/gob"
"fmt"
"go/ast"
"go/parser"
"go/token"
"go/types"
"slices"
"strings"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/astutil"
"golang.org/x/tools/internal/typeparams"
"golang.org/x/tools/internal/typesinternal"
)
// A Callee holds information about an inlinable function. Gob-serializable.
type Callee struct {
impl gobCallee
}
func (callee *Callee) String() string { return callee.impl.Name }
type gobCallee struct {
Content []byte // file content, compacted to a single func decl
// results of type analysis (does not reach go/types data structures)
PkgPath string // package path of declaring package
Name string // user-friendly name for error messages
Unexported []string // names of free objects that are unexported
FreeRefs []freeRef // locations of references to free objects
FreeObjs []object // descriptions of free objects
ValidForCallStmt bool // function body is "return expr" where expr is f() or <-ch
NumResults int // number of results (according to type, not ast.FieldList)
Params []*paramInfo // information about parameters (incl. receiver)
TypeParams []*paramInfo // information about type parameters
Results []*paramInfo // information about result variables
Effects []int // order in which parameters are evaluated (see calleefx)
HasDefer bool // uses defer
HasBareReturn bool // uses bare return in non-void function
Returns [][]returnOperandFlags // metadata about result expressions for each return
Labels []string // names of all control labels
Falcon falconResult // falcon constraint system
}
// returnOperandFlags records metadata about a single result expression in a return
// statement.
type returnOperandFlags int
const (
nonTrivialResult returnOperandFlags = 1 << iota // return operand has non-trivial conversion to result type
untypedNilResult // return operand is nil literal
)
// A freeRef records a reference to a free object. Gob-serializable.
// (This means free relative to the FuncDecl as a whole, i.e. excluding parameters.)
type freeRef struct {
Offset int // byte offset of the reference relative to the FuncDecl
Object int // index into Callee.freeObjs
}
// An object abstracts a free types.Object referenced by the callee. Gob-serializable.
type object struct {
Name string // Object.Name()
Kind string // one of {var,func,const,type,pkgname,nil,builtin}
PkgPath string // path of object's package (or imported package if kind="pkgname")
PkgName string // name of object's package (or imported package if kind="pkgname")
// TODO(rfindley): should we also track LocalPkgName here? Do we want to
// preserve the local package name?
ValidPos bool // Object.Pos().IsValid()
Shadow shadowMap // shadowing info for the object's refs
}
// AnalyzeCallee analyzes a function that is a candidate for inlining
// and returns a Callee that describes it. The Callee object, which is
// serializable, can be passed to one or more subsequent calls to
// Inline, each with a different Caller.
//
// This design allows separate analysis of callers and callees in the
// golang.org/x/tools/go/analysis framework: the inlining information
// about a callee can be recorded as a "fact".
//
// The content should be the actual input to the compiler, not the
// apparent source file according to any //line directives that
// may be present within it.
func AnalyzeCallee(logf func(string, ...any), fset *token.FileSet, pkg *types.Package, info *types.Info, decl *ast.FuncDecl, content []byte) (*Callee, error) {
checkInfoFields(info)
// The client is expected to have determined that the callee
// is a function with a declaration (not a built-in or var).
fn := info.Defs[decl.Name].(*types.Func)
sig := fn.Type().(*types.Signature)
logf("analyzeCallee %v @ %v", fn, fset.PositionFor(decl.Pos(), false))
// Create user-friendly name ("pkg.Func" or "(pkg.T).Method")
var name string
if sig.Recv() == nil {
name = fmt.Sprintf("%s.%s", fn.Pkg().Name(), fn.Name())
} else {
name = fmt.Sprintf("(%s).%s", types.TypeString(sig.Recv().Type(), (*types.Package).Name), fn.Name())
}
if decl.Body == nil {
return nil, fmt.Errorf("cannot inline function %s as it has no body", name)
}
// Record the location of all free references in the FuncDecl.
// (Parameters are not free by this definition.)
var (
fieldObjs = fieldObjs(sig)
freeObjIndex = make(map[types.Object]int)
freeObjs []object
freeRefs []freeRef // free refs that may need renaming
unexported []string // free refs to unexported objects, for later error checks
)
var f func(n ast.Node, stack []ast.Node) bool
var stack []ast.Node
stack = append(stack, decl.Type) // for scope of function itself
visit := func(n ast.Node, stack []ast.Node) { astutil.PreorderStack(n, stack, f) }
f = func(n ast.Node, stack []ast.Node) bool {
switch n := n.(type) {
case *ast.SelectorExpr:
// Check selections of free fields/methods.
if sel, ok := info.Selections[n]; ok &&
!within(sel.Obj().Pos(), decl) &&
!n.Sel.IsExported() {
sym := fmt.Sprintf("(%s).%s", info.TypeOf(n.X), n.Sel.Name)
unexported = append(unexported, sym)
}
// Don't recur into SelectorExpr.Sel.
visit(n.X, stack)
return false
case *ast.CompositeLit:
// Check for struct literals that refer to unexported fields,
// whether keyed or unkeyed. (Logic assumes well-typedness.)
litType := typeparams.Deref(info.TypeOf(n))
if s, ok := typeparams.CoreType(litType).(*types.Struct); ok {
if n.Type != nil {
visit(n.Type, stack)
}
for i, elt := range n.Elts {
var field *types.Var
var value ast.Expr
if kv, ok := elt.(*ast.KeyValueExpr); ok {
field = info.Uses[kv.Key.(*ast.Ident)].(*types.Var)
value = kv.Value
} else {
field = s.Field(i)
value = elt
}
if !within(field.Pos(), decl) && !field.Exported() {
sym := fmt.Sprintf("(%s).%s", litType, field.Name())
unexported = append(unexported, sym)
}
// Don't recur into KeyValueExpr.Key.
visit(value, stack)
}
return false
}
case *ast.Ident:
if obj, ok := info.Uses[n]; ok {
// Methods and fields are handled by SelectorExpr and CompositeLit.
if isField(obj) || isMethod(obj) {
panic(obj)
}
// Inv: id is a lexical reference.
// A reference to an unexported package-level declaration
// cannot be inlined into another package.
if !n.IsExported() &&
obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() {
unexported = append(unexported, n.Name)
}
// Record free reference (incl. self-reference).
if obj == fn || !within(obj.Pos(), decl) {
objidx, ok := freeObjIndex[obj]
if !ok {
objidx = len(freeObjIndex)
var pkgPath, pkgName string
if pn, ok := obj.(*types.PkgName); ok {
pkgPath = pn.Imported().Path()
pkgName = pn.Imported().Name()
} else if obj.Pkg() != nil {
pkgPath = obj.Pkg().Path()
pkgName = obj.Pkg().Name()
}
freeObjs = append(freeObjs, object{
Name: obj.Name(),
Kind: objectKind(obj),
PkgName: pkgName,
PkgPath: pkgPath,
ValidPos: obj.Pos().IsValid(),
})
freeObjIndex[obj] = objidx
}
freeObjs[objidx].Shadow = freeObjs[objidx].Shadow.add(info, fieldObjs, obj.Name(), stack)
freeRefs = append(freeRefs, freeRef{
Offset: int(n.Pos() - decl.Pos()),
Object: objidx,
})
}
}
}
return true
}
visit(decl, stack)
// Analyze callee body for "return expr" form,
// where expr is f() or <-ch. These forms are
// safe to inline as a standalone statement.
validForCallStmt := false
if len(decl.Body.List) != 1 {
// not just a return statement
} else if ret, ok := decl.Body.List[0].(*ast.ReturnStmt); ok && len(ret.Results) == 1 {
validForCallStmt = func() bool {
switch expr := ast.Unparen(ret.Results[0]).(type) {
case *ast.CallExpr: // f(x)
callee := typeutil.Callee(info, expr)
if callee == nil {
return false // conversion T(x)
}
// The only non-void built-in functions that may be
// called as a statement are copy and recover
// (though arguably a call to recover should never
// be inlined as that changes its behavior).
if builtin, ok := callee.(*types.Builtin); ok {
return builtin.Name() == "copy" ||
builtin.Name() == "recover"
}
return true // ordinary call f()
case *ast.UnaryExpr: // <-x
return expr.Op == token.ARROW // channel receive <-ch
}
// No other expressions are valid statements.
return false
}()
}
// Record information about control flow in the callee
// (but not any nested functions).
var (
hasDefer = false
hasBareReturn = false
returnInfo [][]returnOperandFlags
labels []string
)
ast.Inspect(decl.Body, func(n ast.Node) bool {
switch n := n.(type) {
case *ast.FuncLit:
return false // prune traversal
case *ast.DeferStmt:
hasDefer = true
case *ast.LabeledStmt:
labels = append(labels, n.Label.Name)
case *ast.ReturnStmt:
// Are implicit assignment conversions
// to result variables all trivial?
var resultInfo []returnOperandFlags
if len(n.Results) > 0 {
argInfo := func(i int) (ast.Expr, types.Type) {
expr := n.Results[i]
return expr, info.TypeOf(expr)
}
if len(n.Results) == 1 && sig.Results().Len() > 1 {
// Spread return: return f() where f.Results > 1.
tuple := info.TypeOf(n.Results[0]).(*types.Tuple)
argInfo = func(i int) (ast.Expr, types.Type) {
return nil, tuple.At(i).Type()
}
}
for i := range sig.Results().Len() {
expr, typ := argInfo(i)
var flags returnOperandFlags
if typ == types.Typ[types.UntypedNil] { // untyped nil is preserved by go/types
flags |= untypedNilResult
}
if !trivialConversion(info.Types[expr].Value, typ, sig.Results().At(i).Type()) {
flags |= nonTrivialResult
}
resultInfo = append(resultInfo, flags)
}
} else if sig.Results().Len() > 0 {
hasBareReturn = true
}
returnInfo = append(returnInfo, resultInfo)
}
return true
})
// Reject attempts to inline cgo-generated functions.
for _, obj := range freeObjs {
// There are others (iconst fconst sconst fpvar macro)
// but this is probably sufficient.
if strings.HasPrefix(obj.Name, "_Cfunc_") ||
strings.HasPrefix(obj.Name, "_Ctype_") ||
strings.HasPrefix(obj.Name, "_Cvar_") {
return nil, fmt.Errorf("cannot inline cgo-generated functions")
}
}
// Compact content to just the FuncDecl.
//
// As a space optimization, we don't retain the complete
// callee file content; all we need is "package _; func f() { ... }".
// This reduces the size of analysis facts.
//
// Offsets in the callee information are "relocatable"
// since they are all relative to the FuncDecl.
content = append([]byte("package _\n"),
content[offsetOf(fset, decl.Pos()):offsetOf(fset, decl.End())]...)
// Sanity check: re-parse the compacted content.
if _, _, err := parseCompact(content); err != nil {
return nil, err
}
params, results, effects, falcon := analyzeParams(logf, fset, info, decl)
tparams := analyzeTypeParams(logf, fset, info, decl)
return &Callee{gobCallee{
Content: content,
PkgPath: pkg.Path(),
Name: name,
Unexported: unexported,
FreeObjs: freeObjs,
FreeRefs: freeRefs,
ValidForCallStmt: validForCallStmt,
NumResults: sig.Results().Len(),
Params: params,
TypeParams: tparams,
Results: results,
Effects: effects,
HasDefer: hasDefer,
HasBareReturn: hasBareReturn,
Returns: returnInfo,
Labels: labels,
Falcon: falcon,
}}, nil
}
// parseCompact parses a Go source file of the form "package _\n func f() { ... }"
// and returns the sole function declaration.
func parseCompact(content []byte) (*token.FileSet, *ast.FuncDecl, error) {
fset := token.NewFileSet()
const mode = parser.ParseComments | parser.SkipObjectResolution | parser.AllErrors
f, err := parser.ParseFile(fset, "callee.go", content, mode)
if err != nil {
return nil, nil, fmt.Errorf("internal error: cannot compact file: %v", err)
}
return fset, f.Decls[0].(*ast.FuncDecl), nil
}
// A paramInfo records information about a callee receiver, parameter, or result variable.
type paramInfo struct {
Name string // parameter name (may be blank, or even "")
Index int // index within signature
IsResult bool // false for receiver or parameter, true for result variable
IsInterface bool // parameter has a (non-type parameter) interface type
Assigned bool // parameter appears on left side of an assignment statement
Escapes bool // parameter has its address taken
Refs []refInfo // information about references to parameter within body
Shadow shadowMap // shadowing info for the above refs; see [shadowMap]
FalconType string // name of this parameter's type (if basic) in the falcon system
}
type refInfo struct {
Offset int // FuncDecl-relative byte offset of parameter ref within body
Assignable bool // ref appears in context of assignment to known type
IfaceAssignment bool // ref is being assigned to an interface
AffectsInference bool // ref type may affect type inference
// IsSelectionOperand indicates whether the parameter reference is the
// operand of a selection (param.f). If so, and param's argument is itself
// a receiver parameter (a common case), we don't need to desugar (&v or *ptr)
// the selection: if param.Method is a valid selection, then so is param.fieldOrMethod.
IsSelectionOperand bool
}
// analyzeParams computes information about parameters of the function declared by decl,
// including a simple "address taken" escape analysis.
//
// It returns two new arrays, one of the receiver and parameters, and
// the other of the result variables of the function.
//
// The input must be well-typed.
func analyzeParams(logf func(string, ...any), fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) (params, results []*paramInfo, effects []int, _ falconResult) {
sig := signature(fset, info, decl)
paramInfos := make(map[*types.Var]*paramInfo)
{
newParamInfo := func(param *types.Var, isResult bool) *paramInfo {
info := &paramInfo{
Name: param.Name(),
IsResult: isResult,
Index: len(paramInfos),
IsInterface: isNonTypeParamInterface(param.Type()),
}
paramInfos[param] = info
return info
}
if sig.Recv() != nil {
params = append(params, newParamInfo(sig.Recv(), false))
}
for i := 0; i < sig.Params().Len(); i++ {
params = append(params, newParamInfo(sig.Params().At(i), false))
}
for i := 0; i < sig.Results().Len(); i++ {
results = append(results, newParamInfo(sig.Results().At(i), true))
}
}
// Search function body for operations &x, x.f(), and x = y
// where x is a parameter, and record it.
escape(info, decl, func(v *types.Var, escapes bool) {
if info := paramInfos[v]; info != nil {
if escapes {
info.Escapes = true
} else {
info.Assigned = true
}
}
})
// Record locations of all references to parameters.
// And record the set of intervening definitions for each parameter.
//
// TODO(adonovan): combine this traversal with the one that computes
// FreeRefs. The tricky part is that calleefx needs this one first.
fieldObjs := fieldObjs(sig)
var stack []ast.Node
stack = append(stack, decl.Type) // for scope of function itself
astutil.PreorderStack(decl.Body, stack, func(n ast.Node, stack []ast.Node) bool {
if id, ok := n.(*ast.Ident); ok {
if v, ok := info.Uses[id].(*types.Var); ok {
if pinfo, ok := paramInfos[v]; ok {
// Record ref information, and any intervening (shadowing) names.
//
// If the parameter v has an interface type, and the reference id
// appears in a context where assignability rules apply, there may be
// an implicit interface-to-interface widening. In that case it is
// not necessary to insert an explicit conversion from the argument
// to the parameter's type.
//
// Contrapositively, if param is not an interface type, then the
// assignment may lose type information, for example in the case that
// the substituted expression is an untyped constant or unnamed type.
stack = append(stack, n) // (the two calls below want n)
assignable, ifaceAssign, affectsInference := analyzeAssignment(info, stack)
ref := refInfo{
Offset: int(n.Pos() - decl.Pos()),
Assignable: assignable,
IfaceAssignment: ifaceAssign,
AffectsInference: affectsInference,
IsSelectionOperand: isSelectionOperand(stack),
}
pinfo.Refs = append(pinfo.Refs, ref)
pinfo.Shadow = pinfo.Shadow.add(info, fieldObjs, pinfo.Name, stack)
}
}
}
return true
})
// Compute subset and order of parameters that are strictly evaluated.
// (Depends on Refs computed above.)
effects = calleefx(info, decl.Body, paramInfos)
logf("effects list = %v", effects)
falcon := falcon(logf, fset, paramInfos, info, decl)
return params, results, effects, falcon
}
// analyzeTypeParams computes information about the type parameters of the function declared by decl.
func analyzeTypeParams(_ logger, fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) []*paramInfo {
sig := signature(fset, info, decl)
paramInfos := make(map[*types.TypeName]*paramInfo)
var params []*paramInfo
collect := func(tpl *types.TypeParamList) {
for i := range tpl.Len() {
typeName := tpl.At(i).Obj()
info := &paramInfo{Name: typeName.Name()}
params = append(params, info)
paramInfos[typeName] = info
}
}
collect(sig.RecvTypeParams())
collect(sig.TypeParams())
// Find references.
// We don't care about most of the properties that matter for parameter references:
// a type is immutable, cannot have its address taken, and does not undergo conversions.
// TODO(jba): can we nevertheless combine this with the traversal in analyzeParams?
var stack []ast.Node
stack = append(stack, decl.Type) // for scope of function itself
astutil.PreorderStack(decl.Body, stack, func(n ast.Node, stack []ast.Node) bool {
if id, ok := n.(*ast.Ident); ok {
if v, ok := info.Uses[id].(*types.TypeName); ok {
if pinfo, ok := paramInfos[v]; ok {
ref := refInfo{Offset: int(n.Pos() - decl.Pos())}
pinfo.Refs = append(pinfo.Refs, ref)
pinfo.Shadow = pinfo.Shadow.add(info, nil, pinfo.Name, stack)
}
}
}
return true
})
return params
}
func signature(fset *token.FileSet, info *types.Info, decl *ast.FuncDecl) *types.Signature {
fnobj, ok := info.Defs[decl.Name]
if !ok {
panic(fmt.Sprintf("%s: no func object for %q",
fset.PositionFor(decl.Name.Pos(), false), decl.Name)) // ill-typed?
}
return fnobj.Type().(*types.Signature)
}
// -- callee helpers --
// analyzeAssignment looks at the given stack, and analyzes certain
// attributes of the innermost expression.
//
// In all cases we 'fail closed' when we cannot detect (or for simplicity
// choose not to detect) the condition in question, meaning we err on the side
// of the more restrictive rule. This is noted for each result below.
//
// - assignable reports whether the expression is used in a position where
// assignability rules apply, such as in an actual assignment, as call
// argument, or in a send to a channel. Defaults to 'false'. If assignable
// is false, the other two results are irrelevant.
// - ifaceAssign reports whether that assignment is to an interface type.
// This is important as we want to preserve the concrete type in that
// assignment. Defaults to 'true'. Notably, if the assigned type is a type
// parameter, we assume that it could have interface type.
// - affectsInference is (somewhat vaguely) defined as whether or not the
// type of the operand may affect the type of the surrounding syntax,
// through type inference. It is infeasible to completely reverse engineer
// type inference, so we over approximate: if the expression is an argument
// to a call to a generic function (but not method!) that uses type
// parameters, assume that unification of that argument may affect the
// inferred types.
func analyzeAssignment(info *types.Info, stack []ast.Node) (assignable, ifaceAssign, affectsInference bool) {
remaining, parent, expr := exprContext(stack)
if parent == nil {
return false, false, false
}
// TODO(golang/go#70638): simplify when types.Info records implicit conversions.
// Types do not need to match for assignment to a variable.
if assign, ok := parent.(*ast.AssignStmt); ok {
for i, v := range assign.Rhs {
if v == expr {
if i >= len(assign.Lhs) {
return false, false, false // ill typed
}
// Check to see if the assignment is to an interface type.
if i < len(assign.Lhs) {
// TODO: We could handle spread calls here, but in current usage expr
// is an ident.
if id, _ := assign.Lhs[i].(*ast.Ident); id != nil && info.Defs[id] != nil {
// Types must match for a defining identifier in a short variable
// declaration.
return false, false, false
}
// In all other cases, types should be known.
typ := info.TypeOf(assign.Lhs[i])
return true, typ == nil || types.IsInterface(typ), false
}
// Default:
return assign.Tok == token.ASSIGN, true, false
}
}
}
// Types do not need to match for an initializer with known type.
if spec, ok := parent.(*ast.ValueSpec); ok && spec.Type != nil {
if slices.Contains(spec.Values, expr) {
typ := info.TypeOf(spec.Type)
return true, typ == nil || types.IsInterface(typ), false
}
}
// Types do not need to match for index expressions.
if ix, ok := parent.(*ast.IndexExpr); ok {
if ix.Index == expr {
typ := info.TypeOf(ix.X)
if typ == nil {
return true, true, false
}
m, _ := typeparams.CoreType(typ).(*types.Map)
return true, m == nil || types.IsInterface(m.Key()), false
}
}
// Types do not need to match for composite literal keys, values, or
// fields.
if kv, ok := parent.(*ast.KeyValueExpr); ok {
var under types.Type
if len(remaining) > 0 {
if complit, ok := remaining[len(remaining)-1].(*ast.CompositeLit); ok {
if typ := info.TypeOf(complit); typ != nil {
// Unpointer to allow for pointers to slices or arrays, which are
// permitted as the types of nested composite literals without a type
// name.
under = typesinternal.Unpointer(typeparams.CoreType(typ))
}
}
}
if kv.Key == expr { // M{expr: ...}: assign to map key
m, _ := under.(*types.Map)
return true, m == nil || types.IsInterface(m.Key()), false
}
if kv.Value == expr {
switch under := under.(type) {
case interface{ Elem() types.Type }: // T{...: expr}: assign to map/array/slice element
return true, types.IsInterface(under.Elem()), false
case *types.Struct: // Struct{k: expr}
if id, _ := kv.Key.(*ast.Ident); id != nil {
for fi := range under.NumFields() {
field := under.Field(fi)
if info.Uses[id] == field {
return true, types.IsInterface(field.Type()), false
}
}
}
default:
return true, true, false
}
}
}
if lit, ok := parent.(*ast.CompositeLit); ok {
for i, v := range lit.Elts {
if v == expr {
typ := info.TypeOf(lit)
if typ == nil {
return true, true, false
}
// As in the KeyValueExpr case above, unpointer to handle pointers to
// array/slice literals.
under := typesinternal.Unpointer(typeparams.CoreType(typ))
switch under := under.(type) {
case interface{ Elem() types.Type }: // T{expr}: assign to map/array/slice element
return true, types.IsInterface(under.Elem()), false
case *types.Struct: // Struct{expr}: assign to unkeyed struct field
if i < under.NumFields() {
return true, types.IsInterface(under.Field(i).Type()), false
}
}
return true, true, false
}
}
}
// Types do not need to match for values sent to a channel.
if send, ok := parent.(*ast.SendStmt); ok {
if send.Value == expr {
typ := info.TypeOf(send.Chan)
if typ == nil {
return true, true, false
}
ch, _ := typeparams.CoreType(typ).(*types.Chan)
return true, ch == nil || types.IsInterface(ch.Elem()), false
}
}
// Types do not need to match for an argument to a call, unless the
// corresponding parameter has type parameters, as in that case the
// argument type may affect inference.
if call, ok := parent.(*ast.CallExpr); ok {
if _, ok := isConversion(info, call); ok {
return false, false, false // redundant conversions are handled at the call site
}
// Ordinary call. Could be a call of a func, builtin, or function value.
for i, arg := range call.Args {
if arg == expr {
typ := info.TypeOf(call.Fun)
if typ == nil {
return true, true, false
}
sig, _ := typeparams.CoreType(typ).(*types.Signature)
if sig != nil {
// Find the relevant parameter type, accounting for variadics.
paramType := paramTypeAtIndex(sig, call, i)
ifaceAssign := paramType == nil || types.IsInterface(paramType)
affectsInference := false
if fn := typeutil.StaticCallee(info, call); fn != nil {
if sig2 := fn.Type().(*types.Signature); sig2.Recv() == nil {
originParamType := paramTypeAtIndex(sig2, call, i)
affectsInference = originParamType == nil || new(typeparams.Free).Has(originParamType)
}
}
return true, ifaceAssign, affectsInference
}
}
}
}
return false, false, false
}
// paramTypeAtIndex returns the effective parameter type at the given argument
// index in call, if valid.
func paramTypeAtIndex(sig *types.Signature, call *ast.CallExpr, index int) types.Type {
if plen := sig.Params().Len(); sig.Variadic() && index >= plen-1 && !call.Ellipsis.IsValid() {
if s, ok := sig.Params().At(plen - 1).Type().(*types.Slice); ok {
return s.Elem()
}
} else if index < plen {
return sig.Params().At(index).Type()
}
return nil // ill typed
}
// exprContext returns the innermost parent->child expression nodes for the
// given outer-to-inner stack, after stripping parentheses, along with the
// remaining stack up to the parent node.
//
// If no such context exists, returns (nil, nil, nil).
func exprContext(stack []ast.Node) (remaining []ast.Node, parent ast.Node, expr ast.Expr) {
expr, _ = stack[len(stack)-1].(ast.Expr)
if expr == nil {
return nil, nil, nil
}
i := len(stack) - 2
for ; i >= 0; i-- {
if pexpr, ok := stack[i].(*ast.ParenExpr); ok {
expr = pexpr
} else {
parent = stack[i]
break
}
}
if parent == nil {
return nil, nil, nil
}
// inv: i is the index of parent in the stack.
return stack[:i], parent, expr
}
// isSelectionOperand reports whether the innermost node of stack is operand
// (x) of a selection x.f.
func isSelectionOperand(stack []ast.Node) bool {
_, parent, expr := exprContext(stack)
if parent == nil {
return false
}
sel, ok := parent.(*ast.SelectorExpr)
return ok && sel.X == expr
}
// A shadowMap records information about shadowing at any of the parameter's
// references within the callee decl.
//
// For each name shadowed at a reference to the parameter within the callee
// body, shadow map records the 1-based index of the callee decl parameter
// causing the shadowing, or -1, if the shadowing is not due to a callee decl.
// A value of zero (or missing) indicates no shadowing. By convention,
// self-shadowing is excluded from the map.
//
// For example, in the following callee
//
// func f(a, b int) int {
// c := 2 + b
// return a + c
// }
//
// the shadow map of a is {b: 2, c: -1}, because b is shadowed by the 2nd
// parameter. The shadow map of b is {a: 1}, because c is not shadowed at the
// use of b.
type shadowMap map[string]int
// add returns the [shadowMap] augmented by the set of names
// locally shadowed at the location of the reference in the callee
// (identified by the stack). The name of the reference itself is
// excluded.
//
// These shadowed names may not be used in a replacement expression
// for the reference.
func (s shadowMap) add(info *types.Info, paramIndexes map[types.Object]int, exclude string, stack []ast.Node) shadowMap {
for _, n := range stack {
if scope := scopeFor(info, n); scope != nil {
for _, name := range scope.Names() {
if name != exclude {
if s == nil {
s = make(shadowMap)
}
obj := scope.Lookup(name)
if idx, ok := paramIndexes[obj]; ok {
s[name] = idx + 1
} else {
s[name] = -1
}
}
}
}
}
return s
}
// fieldObjs returns a map of each types.Object defined by the given signature
// to its index in the parameter list. Parameters with missing or blank name
// are skipped.
func fieldObjs(sig *types.Signature) map[types.Object]int {
m := make(map[types.Object]int)
for i := range sig.Params().Len() {
if p := sig.Params().At(i); p.Name() != "" && p.Name() != "_" {
m[p] = i
}
}
return m
}
func isField(obj types.Object) bool {
if v, ok := obj.(*types.Var); ok && v.IsField() {
return true
}
return false
}
func isMethod(obj types.Object) bool {
if f, ok := obj.(*types.Func); ok && f.Type().(*types.Signature).Recv() != nil {
return true
}
return false
}
// -- serialization --
var (
_ gob.GobEncoder = (*Callee)(nil)
_ gob.GobDecoder = (*Callee)(nil)
)
func (callee *Callee) GobEncode() ([]byte, error) {
var out bytes.Buffer
if err := gob.NewEncoder(&out).Encode(callee.impl); err != nil {
return nil, err
}
return out.Bytes(), nil
}
func (callee *Callee) GobDecode(data []byte) error {
return gob.NewDecoder(bytes.NewReader(data)).Decode(&callee.impl)
}

View file

@ -0,0 +1,349 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package inline
// This file defines the analysis of callee effects.
import (
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/internal/typesinternal"
)
const (
rinf = -1 // R∞: arbitrary read from memory
winf = -2 // W∞: arbitrary write to memory (or unknown control)
)
// calleefx returns a list of parameter indices indicating the order
// in which parameters are first referenced during evaluation of the
// callee, relative both to each other and to other effects of the
// callee (if any), such as arbitrary reads (rinf) and arbitrary
// effects (winf), including unknown control flow. Each parameter
// that is referenced appears once in the list.
//
// For example, the effects list of this function:
//
// func f(x, y, z int) int {
// return y + x + g() + z
// }
//
// is [1 0 -2 2], indicating reads of y and x, followed by the unknown
// effects of the g() call, and finally the read of parameter z. This
// information is used during inlining to ascertain when it is safe
// for parameter references to be replaced by their corresponding
// argument expressions. Such substitutions are permitted only when
// they do not cause "write" operations (those with effects) to
// commute with "read" operations (those that have no effect but are
// not pure). Impure operations may be reordered with other impure
// operations, and pure operations may be reordered arbitrarily.
//
// The analysis ignores the effects of runtime panics, on the
// assumption that well-behaved programs shouldn't encounter them.
func calleefx(info *types.Info, body *ast.BlockStmt, paramInfos map[*types.Var]*paramInfo) []int {
// This traversal analyzes the callee's statements (in syntax
// form, though one could do better with SSA) to compute the
// sequence of events of the following kinds:
//
// 1 read of a parameter variable.
// 2. reads from other memory.
// 3. writes to memory
var effects []int // indices of parameters, or rinf/winf (-ve)
seen := make(map[int]bool)
effect := func(i int) {
if !seen[i] {
seen[i] = true
effects = append(effects, i)
}
}
// unknown is called for statements of unknown effects (or control).
unknown := func() {
effect(winf)
// Ensure that all remaining parameters are "seen"
// after we go into the unknown (unless they are
// unreferenced by the function body). This lets us
// not bother implementing the complete traversal into
// control structures.
//
// TODO(adonovan): add them in a deterministic order.
// (This is not a bug but determinism is good.)
for _, pinfo := range paramInfos {
if !pinfo.IsResult && len(pinfo.Refs) > 0 {
effect(pinfo.Index)
}
}
}
var visitExpr func(n ast.Expr)
var visitStmt func(n ast.Stmt) bool
visitExpr = func(n ast.Expr) {
switch n := n.(type) {
case *ast.Ident:
if v, ok := info.Uses[n].(*types.Var); ok && !v.IsField() {
// Use of global?
if v.Parent() == v.Pkg().Scope() {
effect(rinf) // read global var
}
// Use of parameter?
if pinfo, ok := paramInfos[v]; ok && !pinfo.IsResult {
effect(pinfo.Index) // read parameter var
}
// Use of local variables is ok.
}
case *ast.BasicLit:
// no effect
case *ast.FuncLit:
// A func literal has no read or write effect
// until called, and (most) function calls are
// considered to have arbitrary effects.
// So, no effect.
case *ast.CompositeLit:
for _, elt := range n.Elts {
visitExpr(elt) // note: visits KeyValueExpr
}
case *ast.ParenExpr:
visitExpr(n.X)
case *ast.SelectorExpr:
if seln, ok := info.Selections[n]; ok {
visitExpr(n.X)
// See types.SelectionKind for background.
switch seln.Kind() {
case types.MethodExpr:
// A method expression T.f acts like a
// reference to a func decl,
// so it doesn't read x until called.
case types.MethodVal, types.FieldVal:
// A field or method value selection x.f
// reads x if the selection indirects a pointer.
if indirectSelection(seln) {
effect(rinf)
}
}
} else {
// qualified identifier: treat like unqualified
visitExpr(n.Sel)
}
case *ast.IndexExpr:
if tv := info.Types[n.Index]; tv.IsType() {
// no effect (G[T] instantiation)
} else {
visitExpr(n.X)
visitExpr(n.Index)
switch tv.Type.Underlying().(type) {
case *types.Slice, *types.Pointer: // []T, *[n]T (not string, [n]T)
effect(rinf) // indirect read of slice/array element
}
}
case *ast.IndexListExpr:
// no effect (M[K,V] instantiation)
case *ast.SliceExpr:
visitExpr(n.X)
visitExpr(n.Low)
visitExpr(n.High)
visitExpr(n.Max)
case *ast.TypeAssertExpr:
visitExpr(n.X)
case *ast.CallExpr:
if info.Types[n.Fun].IsType() {
// conversion T(x)
visitExpr(n.Args[0])
} else {
// call f(args)
visitExpr(n.Fun)
for i, arg := range n.Args {
if i == 0 && info.Types[arg].IsType() {
continue // new(T), make(T, n)
}
visitExpr(arg)
}
// The pure built-ins have no effects beyond
// those of their operands (not even memory reads).
// All other calls have unknown effects.
if !typesinternal.CallsPureBuiltin(info, n) {
unknown() // arbitrary effects
}
}
case *ast.StarExpr:
visitExpr(n.X)
effect(rinf) // *ptr load or store depends on state of heap
case *ast.UnaryExpr: // + - ! ^ & ~ <-
visitExpr(n.X)
if n.Op == token.ARROW {
unknown() // effect: channel receive
}
case *ast.BinaryExpr:
visitExpr(n.X)
visitExpr(n.Y)
case *ast.KeyValueExpr:
visitExpr(n.Key) // may be a struct field
visitExpr(n.Value)
case *ast.BadExpr:
// no effect
case nil:
// optional subtree
default:
// type syntax: unreachable given traversal
panic(n)
}
}
// visitStmt's result indicates the continuation:
// false for return, true for the next statement.
//
// We could treat return as an unknown, but this way
// yields definite effects for simple sequences like
// {S1; S2; return}, so unreferenced parameters are
// not spuriously added to the effects list, and thus
// not spuriously disqualified from elimination.
visitStmt = func(n ast.Stmt) bool {
switch n := n.(type) {
case *ast.DeclStmt:
decl := n.Decl.(*ast.GenDecl)
for _, spec := range decl.Specs {
switch spec := spec.(type) {
case *ast.ValueSpec:
for _, v := range spec.Values {
visitExpr(v)
}
case *ast.TypeSpec:
// no effect
}
}
case *ast.LabeledStmt:
return visitStmt(n.Stmt)
case *ast.ExprStmt:
visitExpr(n.X)
case *ast.SendStmt:
visitExpr(n.Chan)
visitExpr(n.Value)
unknown() // effect: channel send
case *ast.IncDecStmt:
visitExpr(n.X)
unknown() // effect: variable increment
case *ast.AssignStmt:
for _, lhs := range n.Lhs {
visitExpr(lhs)
}
for _, rhs := range n.Rhs {
visitExpr(rhs)
}
for _, lhs := range n.Lhs {
id, _ := lhs.(*ast.Ident)
if id != nil && id.Name == "_" {
continue // blank assign has no effect
}
if n.Tok == token.DEFINE && id != nil && info.Defs[id] != nil {
continue // new var declared by := has no effect
}
unknown() // assignment to existing var
break
}
case *ast.GoStmt:
visitExpr(n.Call.Fun)
for _, arg := range n.Call.Args {
visitExpr(arg)
}
unknown() // effect: create goroutine
case *ast.DeferStmt:
visitExpr(n.Call.Fun)
for _, arg := range n.Call.Args {
visitExpr(arg)
}
unknown() // effect: push defer
case *ast.ReturnStmt:
for _, res := range n.Results {
visitExpr(res)
}
return false
case *ast.BlockStmt:
for _, stmt := range n.List {
if !visitStmt(stmt) {
return false
}
}
case *ast.BranchStmt:
unknown() // control flow
case *ast.IfStmt:
visitStmt(n.Init)
visitExpr(n.Cond)
unknown() // control flow
case *ast.SwitchStmt:
visitStmt(n.Init)
visitExpr(n.Tag)
unknown() // control flow
case *ast.TypeSwitchStmt:
visitStmt(n.Init)
visitStmt(n.Assign)
unknown() // control flow
case *ast.SelectStmt:
unknown() // control flow
case *ast.ForStmt:
visitStmt(n.Init)
visitExpr(n.Cond)
unknown() // control flow
case *ast.RangeStmt:
visitExpr(n.X)
unknown() // control flow
case *ast.EmptyStmt, *ast.BadStmt:
// no effect
case nil:
// optional subtree
default:
panic(n)
}
return true
}
visitStmt(body)
return effects
}

View file

@ -0,0 +1,288 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package inline implements inlining of Go function calls.
The client provides information about the caller and callee,
including the source text, syntax tree, and type information, and
the inliner returns the modified source file for the caller, or an
error if the inlining operation is invalid (for example because the
function body refers to names that are inaccessible to the caller).
Although this interface demands more information from the client
than might seem necessary, it enables smoother integration with
existing batch and interactive tools that have their own ways of
managing the processes of reading, parsing, and type-checking
packages. In particular, this package does not assume that the
caller and callee belong to the same token.FileSet or
types.Importer realms.
There are many aspects to a function call. It is the only construct
that can simultaneously bind multiple variables of different
explicit types, with implicit assignment conversions. (Neither var
nor := declarations can do that.) It defines the scope of control
labels, of return statements, and of defer statements. Arguments
and results of function calls may be tuples even though tuples are
not first-class values in Go, and a tuple-valued call expression
may be "spread" across the argument list of a call or the operands
of a return statement. All these unique features mean that in the
general case, not everything that can be expressed by a function
call can be expressed without one.
So, in general, inlining consists of modifying a function or method
call expression f(a1, ..., an) so that the name of the function f
is replaced ("literalized") by a literal copy of the function
declaration, with free identifiers suitably modified to use the
locally appropriate identifiers or perhaps constant argument
values.
Inlining must not change the semantics of the call. Semantics
preservation is crucial for clients such as codebase maintenance
tools that automatically inline all calls to designated functions
on a large scale. Such tools must not introduce subtle behavior
changes. (Fully inlining a call is dynamically observable using
reflection over the call stack, but this exception to the rule is
explicitly allowed.)
In many cases it is possible to entirely replace ("reduce") the
call by a copy of the function's body in which parameters have been
replaced by arguments. The inliner supports a number of reduction
strategies, and we expect this set to grow. Nonetheless, sound
reduction is surprisingly tricky.
The inliner is in some ways like an optimizing compiler. A compiler
is considered correct if it doesn't change the meaning of the
program in translation from source language to target language. An
optimizing compiler exploits the particulars of the input to
generate better code, where "better" usually means more efficient.
When a case is found in which it emits suboptimal code, the
compiler is improved to recognize more cases, or more rules, and
more exceptions to rules; this process has no end. Inlining is
similar except that "better" code means tidier code. The baseline
translation (literalization) is correct, but there are endless
rules--and exceptions to rules--by which the output can be
improved.
The following section lists some of the challenges, and ways in
which they can be addressed.
- All effects of the call argument expressions must be preserved,
both in their number (they must not be eliminated or repeated),
and in their order (both with respect to other arguments, and any
effects in the callee function).
This must be the case even if the corresponding parameters are
never referenced, are referenced multiple times, referenced in
a different order from the arguments, or referenced within a
nested function that may be executed an arbitrary number of
times.
Currently, parameter replacement is not applied to arguments
with effects, but with further analysis of the sequence of
strict effects within the callee we could relax this constraint.
- When not all parameters can be substituted by their arguments
(e.g. due to possible effects), if the call appears in a
statement context, the inliner may introduce a var declaration
that declares the parameter variables (with the correct types)
and assigns them to their corresponding argument values.
The rest of the function body may then follow.
For example, the call
f(1, 2)
to the function
func f(x, y int32) { stmts }
may be reduced to
{ var x, y int32 = 1, 2; stmts }.
There are many reasons why this is not always possible. For
example, true parameters are statically resolved in the same
scope, and are dynamically assigned their arguments in
parallel; but each spec in a var declaration is statically
resolved in sequence and dynamically executed in sequence, so
earlier parameters may shadow references in later ones.
- Even an argument expression as simple as ptr.x may not be
referentially transparent, because another argument may have the
effect of changing the value of ptr.
This constraint could be relaxed by some kind of alias or
escape analysis that proves that ptr cannot be mutated during
the call.
- Although constants are referentially transparent, as a matter of
style we do not wish to duplicate literals that are referenced
multiple times in the body because this undoes proper factoring.
Also, string literals may be arbitrarily large.
- If the function body consists of statements other than just
"return expr", in some contexts it may be syntactically
impossible to reduce the call. Consider:
if x := f(); cond { ... }
Go has no equivalent to Lisp's progn or Rust's blocks,
nor ML's let expressions (let param = arg in body);
its closest equivalent is func(param){body}(arg).
Reduction strategies must therefore consider the syntactic
context of the call.
In such situations we could work harder to extract a statement
context for the call, by transforming it to:
{ x := f(); if cond { ... } }
- Similarly, without the equivalent of Rust-style blocks and
first-class tuples, there is no general way to reduce a call
to a function such as
func(params)(args)(results) { stmts; return expr }
to an expression such as
{ var params = args; stmts; expr }
or even a statement such as
results = { var params = args; stmts; expr }
Consequently the declaration and scope of the result variables,
and the assignment and control-flow implications of the return
statement, must be dealt with by cases.
- A standalone call statement that calls a function whose body is
"return expr" cannot be simply replaced by the body expression
if it is not itself a call or channel receive expression; it is
necessary to explicitly discard the result using "_ = expr".
Similarly, if the body is a call expression, only calls to some
built-in functions with no result (such as copy or panic) are
permitted as statements, whereas others (such as append) return
a result that must be used, even if just by discarding.
- If a parameter or result variable is updated by an assignment
within the function body, it cannot always be safely replaced
by a variable in the caller. For example, given
func f(a int) int { a++; return a }
The call y = f(x) cannot be replaced by { x++; y = x } because
this would change the value of the caller's variable x.
Only if the caller is finished with x is this safe.
A similar argument applies to parameter or result variables
that escape: by eliminating a variable, inlining would change
the identity of the variable that escapes.
- If the function body uses 'defer' and the inlined call is not a
tail-call, inlining may delay the deferred effects.
- Because the scope of a control label is the entire function, a
call cannot be reduced if the caller and callee have intersecting
sets of control labels. (It is possible to α-rename any
conflicting ones, but our colleagues building C++ refactoring
tools report that, when tools must choose new identifiers, they
generally do a poor job.)
- Given
func f() uint8 { return 0 }
var x any = f()
reducing the call to var x any = 0 is unsound because it
discards the implicit conversion to uint8. We may need to make
each argument-to-parameter conversion explicit if the types
differ. Assignments to variadic parameters may need to
explicitly construct a slice.
An analogous problem applies to the implicit assignments in
return statements:
func g() any { return f() }
Replacing the call f() with 0 would silently lose a
conversion to uint8 and change the behavior of the program.
- When inlining a call f(1, x, g()) where those parameters are
unreferenced, we should be able to avoid evaluating 1 and x
since they are pure and thus have no effect. But x may be the
last reference to a local variable in the caller, so removing
it would cause a compilation error. Parameter substitution must
avoid making the caller's local variables unreferenced (or must
be prepared to eliminate the declaration too---this is where an
iterative framework for simplification would really help).
- An expression such as s[i] may be valid if s and i are
variables but invalid if either or both of them are constants.
For example, a negative constant index s[-1] is always out of
bounds, and even a non-negative constant index may be out of
bounds depending on the particular string constant (e.g.
"abc"[4]).
So, if a parameter participates in any expression that is
subject to additional compile-time checks when its operands are
constant, it may be unsafe to substitute that parameter by a
constant argument value (#62664).
More complex callee functions are inlinable with more elaborate and
invasive changes to the statements surrounding the call expression.
TODO(adonovan): future work:
- Handle more of the above special cases by careful analysis,
thoughtful factoring of the large design space, and thorough
test coverage.
- Compute precisely (not conservatively) when parameter
substitution would remove the last reference to a caller local
variable, and blank out the local instead of retreating from
the substitution.
- Afford the client more control such as a limit on the total
increase in line count, or a refusal to inline using the
general approach (replacing name by function literal). This
could be achieved by returning metadata alongside the result
and having the client conditionally discard the change.
- Support inlining of generic functions, replacing type parameters
by their instantiations.
- Support inlining of calls to function literals ("closures").
But note that the existing algorithm makes widespread assumptions
that the callee is a package-level function or method.
- Eliminate explicit conversions of "untyped" literals inserted
conservatively when they are redundant. For example, the
conversion int32(1) is redundant when this value is used only as a
slice index; but it may be crucial if it is used in x := int32(1)
as it changes the type of x, which may have further implications.
The conversions may also be important to the falcon analysis.
- Allow non-'go' build systems such as Bazel/Blaze a chance to
decide whether an import is accessible using logic other than
"/internal/" path segments. This could be achieved by returning
the list of added import paths instead of a text diff.
- Inlining a function from another module may change the
effective version of the Go language spec that governs it. We
should probably make the client responsible for rejecting
attempts to inline from newer callees to older callers, since
there's no way for this package to access module versions.
- Use an alternative implementation of the import-organizing
operation that doesn't require operating on a complete file
(and reformatting). Then return the results in a higher-level
form as a set of import additions and deletions plus a single
diff that encloses the call expression. This interface could
perhaps be implemented atop imports.Process by post-processing
its result to obtain the abstract import changes and discarding
its formatted output.
*/
package inline

View file

@ -0,0 +1,102 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package inline
import (
"fmt"
"go/ast"
"go/token"
"go/types"
)
// escape implements a simple "address-taken" escape analysis. It
// calls f for each local variable that appears on the left side of an
// assignment (escapes=false) or has its address taken (escapes=true).
// The initialization of a variable by its declaration does not count
// as an assignment.
func escape(info *types.Info, root ast.Node, f func(v *types.Var, escapes bool)) {
// lvalue is called for each address-taken expression or LHS of assignment.
// Supported forms are: x, (x), x[i], x.f, *x, T{}.
var lvalue func(e ast.Expr, escapes bool)
lvalue = func(e ast.Expr, escapes bool) {
switch e := e.(type) {
case *ast.Ident:
if v, ok := info.Uses[e].(*types.Var); ok {
if !isPkgLevel(v) {
f(v, escapes)
}
}
case *ast.ParenExpr:
lvalue(e.X, escapes)
case *ast.IndexExpr:
// TODO(adonovan): support generics without assuming e.X has a core type.
// Consider:
//
// func Index[T interface{ [3]int | []int }](t T, i int) *int {
// return &t[i]
// }
//
// We must traverse the normal terms and check
// whether any of them is an array.
//
// We assume TypeOf returns non-nil.
if _, ok := info.TypeOf(e.X).Underlying().(*types.Array); ok {
lvalue(e.X, escapes) // &a[i] on array
}
case *ast.SelectorExpr:
// We assume TypeOf returns non-nil.
if _, ok := info.TypeOf(e.X).Underlying().(*types.Struct); ok {
lvalue(e.X, escapes) // &s.f on struct
}
case *ast.StarExpr:
// *ptr indirects an existing pointer
case *ast.CompositeLit:
// &T{...} creates a new variable
default:
panic(fmt.Sprintf("&x on %T", e)) // unreachable in well-typed code
}
}
// Search function body for operations &x, x.f(), x++, and x = y
// where x is a parameter. Each of these treats x as an address.
ast.Inspect(root, func(n ast.Node) bool {
switch n := n.(type) {
case *ast.UnaryExpr:
if n.Op == token.AND {
lvalue(n.X, true) // &x
}
case *ast.CallExpr:
// implicit &x in method call x.f(),
// where x has type T and method is (*T).f
if sel, ok := n.Fun.(*ast.SelectorExpr); ok {
if seln, ok := info.Selections[sel]; ok &&
seln.Kind() == types.MethodVal &&
isPointer(seln.Obj().Type().Underlying().(*types.Signature).Recv().Type()) {
tArg, indirect := effectiveReceiver(seln)
if !indirect && !isPointer(tArg) {
lvalue(sel.X, true) // &x.f
}
}
}
case *ast.AssignStmt:
for _, lhs := range n.Lhs {
if id, ok := lhs.(*ast.Ident); ok &&
info.Defs[id] != nil &&
n.Tok == token.DEFINE {
// declaration: doesn't count
} else {
lvalue(lhs, false)
}
}
case *ast.IncDecStmt:
lvalue(n.X, false)
}
return true
})
}

View file

@ -0,0 +1,879 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package inline
// This file defines the callee side of the "fallible constant" analysis.
import (
"fmt"
"go/ast"
"go/constant"
"go/format"
"go/token"
"go/types"
"strconv"
"strings"
"golang.org/x/tools/go/types/typeutil"
"golang.org/x/tools/internal/typeparams"
)
// falconResult is the result of the analysis of the callee.
type falconResult struct {
Types []falconType // types for falcon constraint environment
Constraints []string // constraints (Go expressions) on values of fallible constants
}
// A falconType specifies the name and underlying type of a synthetic
// defined type for use in falcon constraints.
//
// Unique types from callee code are bijectively mapped onto falcon
// types so that constraints are independent of callee type
// information but preserve type equivalence classes.
//
// Fresh names are deliberately obscure to avoid shadowing even if a
// callee parameter has a name like "int" or "any".
type falconType struct {
Name string
Kind types.BasicKind // string/number/bool
}
// falcon identifies "fallible constant" expressions, which are
// expressions that may fail to compile if one or more of their
// operands is changed from non-constant to constant.
//
// Consider:
//
// func sub(s string, i, j int) string { return s[i:j] }
//
// If parameters are replaced by constants, the compiler is
// required to perform these additional checks:
//
// - if i is constant, 0 <= i.
// - if s and i are constant, i <= len(s).
// - ditto for j.
// - if i and j are constant, i <= j.
//
// s[i:j] is thus a "fallible constant" expression dependent on {s, i,
// j}. Each falcon creates a set of conditional constraints across one
// or more parameter variables.
//
// - When inlining a call such as sub("abc", -1, 2), the parameter i
// cannot be eliminated by substitution as its argument value is
// negative.
//
// - When inlining sub("", 2, 1), all three parameters cannot be
// simultaneously eliminated by substitution without violating i
// <= len(s) and j <= len(s), but the parameters i and j could be
// safely eliminated without s.
//
// Parameters that cannot be eliminated must remain non-constant,
// either in the form of a binding declaration:
//
// { var i int = -1; return "abc"[i:2] }
//
// or a parameter of a literalization:
//
// func (i int) string { return "abc"[i:2] }(-1)
//
// These example expressions are obviously doomed to fail at run
// time, but in realistic cases such expressions are dominated by
// appropriate conditions that make them reachable only when safe:
//
// if 0 <= i && i <= j && j <= len(s) { _ = s[i:j] }
//
// (In principle a more sophisticated inliner could entirely eliminate
// such unreachable blocks based on the condition being always-false
// for the given parameter substitution, but this is tricky to do safely
// because the type-checker considers only a single configuration.
// Consider: if runtime.GOOS == "linux" { ... }.)
//
// We believe this is an exhaustive list of "fallible constant" operations:
//
// - switch z { case x: case y } // duplicate case values
// - s[i], s[i:j], s[i:j:k] // index out of bounds (0 <= i <= j <= k <= len(s))
// - T{x: 0} // index out of bounds, duplicate index
// - x/y, x%y, x/=y, x%=y // integer division by zero; minint/-1 overflow
// - x+y, x-y, x*y // arithmetic overflow
// - x<<y // shift out of range
// - -x // negation of minint
// - T(x) // value out of range
//
// The fundamental reason for this elaborate algorithm is that the
// "separate analysis" of callee and caller, as required when running
// in an environment such as unitchecker, means that there is no way
// for us to simply invoke the type checker on the combination of
// caller and callee code, as by the time we analyze the caller, we no
// longer have access to type information for the callee (and, in
// particular, any of its direct dependencies that are not direct
// dependencies of the caller). So, in effect, we are forced to map
// the problem in a neutral (callee-type-independent) constraint
// system that can be verified later.
func falcon(logf func(string, ...any), fset *token.FileSet, params map[*types.Var]*paramInfo, info *types.Info, decl *ast.FuncDecl) falconResult {
st := &falconState{
logf: logf,
fset: fset,
params: params,
info: info,
decl: decl,
}
// type mapping
st.int = st.typename(types.Typ[types.Int])
st.any = "interface{}" // don't use "any" as it may be shadowed
for obj, info := range st.params {
if isBasic(obj.Type(), types.IsConstType) {
info.FalconType = st.typename(obj.Type())
}
}
st.stmt(st.decl.Body)
return st.result
}
type falconState struct {
// inputs
logf func(string, ...any)
fset *token.FileSet
params map[*types.Var]*paramInfo
info *types.Info
decl *ast.FuncDecl
// working state
int string
any string
typenames typeutil.Map
result falconResult
}
// typename returns the name in the falcon constraint system
// of a given string/number/bool type t. Falcon types are
// specified directly in go/types data structures rather than
// by name, avoiding potential shadowing conflicts with
// confusing parameter names such as "int".
//
// Also, each distinct type (as determined by types.Identical)
// is mapped to a fresh type in the falcon system so that we
// can map the types in the callee code into a neutral form
// that does not depend on imports, allowing us to detect
// potential conflicts such as
//
// map[any]{T1(1): 0, T2(1): 0}
//
// where T1=T2.
func (st *falconState) typename(t types.Type) string {
name, ok := st.typenames.At(t).(string)
if !ok {
basic := t.Underlying().(*types.Basic)
// That dot ۰ is an Arabic zero numeral U+06F0.
// It is very unlikely to appear in a real program.
// TODO(adonovan): use a non-heuristic solution.
name = fmt.Sprintf("%s۰%d", basic, st.typenames.Len())
st.typenames.Set(t, name)
st.logf("falcon: emit type %s %s // %q", name, basic, t)
st.result.Types = append(st.result.Types, falconType{
Name: name,
Kind: basic.Kind(),
})
}
return name
}
// -- constraint emission --
// emit emits a Go expression that must have a legal type.
// In effect, we let the go/types constant folding algorithm
// do most of the heavy lifting (though it may be hard to
// believe from the complexity of this algorithm!).
func (st *falconState) emit(constraint ast.Expr) {
var out strings.Builder
if err := format.Node(&out, st.fset, constraint); err != nil {
panic(err) // can't happen
}
syntax := out.String()
st.logf("falcon: emit constraint %s", syntax)
st.result.Constraints = append(st.result.Constraints, syntax)
}
// emitNonNegative emits an []T{}[index] constraint,
// which ensures index is non-negative if constant.
func (st *falconState) emitNonNegative(index ast.Expr) {
st.emit(&ast.IndexExpr{
X: &ast.CompositeLit{
Type: &ast.ArrayType{
Elt: makeIdent(st.int),
},
},
Index: index,
})
}
// emitMonotonic emits an []T{}[i:j] constraint,
// which ensures i <= j if both are constant.
func (st *falconState) emitMonotonic(i, j ast.Expr) {
st.emit(&ast.SliceExpr{
X: &ast.CompositeLit{
Type: &ast.ArrayType{
Elt: makeIdent(st.int),
},
},
Low: i,
High: j,
})
}
// emitUnique emits a T{elem1: 0, ... elemN: 0} constraint,
// which ensures that all constant elems are unique.
// T may be a map, slice, or array depending
// on the desired check semantics.
func (st *falconState) emitUnique(typ ast.Expr, elems []ast.Expr) {
if len(elems) > 1 {
var elts []ast.Expr
for _, elem := range elems {
elts = append(elts, &ast.KeyValueExpr{
Key: elem,
Value: makeIntLit(0),
})
}
st.emit(&ast.CompositeLit{
Type: typ,
Elts: elts,
})
}
}
// -- traversal --
// The traversal functions scan the callee body for expressions that
// are not constant but would become constant if the parameter vars
// were redeclared as constants, and emits for each one a constraint
// (a Go expression) with the property that it will not type-check
// (using types.CheckExpr) if the particular argument values are
// unsuitable.
//
// These constraints are checked by Inline with the actual
// constant argument values. Violations cause it to reject
// parameters as candidates for substitution.
func (st *falconState) stmt(s ast.Stmt) {
ast.Inspect(s, func(n ast.Node) bool {
switch n := n.(type) {
case ast.Expr:
_ = st.expr(n)
return false // skip usual traversal
case *ast.AssignStmt:
switch n.Tok {
case token.QUO_ASSIGN, token.REM_ASSIGN:
// x /= y
// Possible "integer division by zero"
// Emit constraint: 1/y.
_ = st.expr(n.Lhs[0])
kY := st.expr(n.Rhs[0])
if kY, ok := kY.(ast.Expr); ok {
op := token.QUO
if n.Tok == token.REM_ASSIGN {
op = token.REM
}
st.emit(&ast.BinaryExpr{
Op: op,
X: makeIntLit(1),
Y: kY,
})
}
return false // skip usual traversal
}
case *ast.SwitchStmt:
if n.Init != nil {
st.stmt(n.Init)
}
tBool := types.Type(types.Typ[types.Bool])
tagType := tBool // default: true
if n.Tag != nil {
st.expr(n.Tag)
tagType = st.info.TypeOf(n.Tag)
}
// Possible "duplicate case value".
// Emit constraint map[T]int{v1: 0, ..., vN:0}
// to ensure all maybe-constant case values are unique
// (unless switch tag is boolean, which is relaxed).
var unique []ast.Expr
for _, clause := range n.Body.List {
clause := clause.(*ast.CaseClause)
for _, caseval := range clause.List {
if k := st.expr(caseval); k != nil {
unique = append(unique, st.toExpr(k))
}
}
for _, stmt := range clause.Body {
st.stmt(stmt)
}
}
if unique != nil && !types.Identical(tagType.Underlying(), tBool) {
tname := st.any
if !types.IsInterface(tagType) {
tname = st.typename(tagType)
}
t := &ast.MapType{
Key: makeIdent(tname),
Value: makeIdent(st.int),
}
st.emitUnique(t, unique)
}
}
return true
})
}
// fieldTypes visits the .Type of each field in the list.
func (st *falconState) fieldTypes(fields *ast.FieldList) {
if fields != nil {
for _, field := range fields.List {
_ = st.expr(field.Type)
}
}
}
// expr visits the expression (or type) and returns a
// non-nil result if the expression is constant or would
// become constant if all suitable function parameters were
// redeclared as constants.
//
// If the expression is constant, st.expr returns its type
// and value (types.TypeAndValue). If the expression would
// become constant, st.expr returns an ast.Expr tree whose
// leaves are literals and parameter references, and whose
// interior nodes are operations that may become constant,
// such as -x, x+y, f(x), and T(x). We call these would-be
// constant expressions "fallible constants", since they may
// fail to type-check for some values of x, i, and j. (We
// refer to the non-nil cases collectively as "maybe
// constant", and the nil case as "definitely non-constant".)
//
// As a side effect, st.expr emits constraints for each
// fallible constant expression; this is its main purpose.
//
// Consequently, st.expr must visit the entire subtree so
// that all necessary constraints are emitted. It may not
// short-circuit the traversal when it encounters a constant
// subexpression as constants may contain arbitrary other
// syntax that may impose constraints. Consider (as always)
// this contrived but legal example of a type parameter (!)
// that contains statement syntax:
//
// func f[T [unsafe.Sizeof(func() { stmts })]int]()
//
// There is no need to emit constraints for (e.g.) s[i] when s
// and i are already constants, because we know the expression
// is sound, but it is sometimes easier to emit these
// redundant constraints than to avoid them.
func (st *falconState) expr(e ast.Expr) (res any) { // = types.TypeAndValue | ast.Expr
tv := st.info.Types[e]
if tv.Value != nil {
// A constant value overrides any other result.
defer func() { res = tv }()
}
switch e := e.(type) {
case *ast.Ident:
if v, ok := st.info.Uses[e].(*types.Var); ok {
if _, ok := st.params[v]; ok && isBasic(v.Type(), types.IsConstType) {
return e // reference to constable parameter
}
}
// (References to *types.Const are handled by the defer.)
case *ast.BasicLit:
// constant
case *ast.ParenExpr:
return st.expr(e.X)
case *ast.FuncLit:
_ = st.expr(e.Type)
st.stmt(e.Body)
// definitely non-constant
case *ast.CompositeLit:
// T{k: v, ...}, where T ∈ {array,*array,slice,map},
// imposes a constraint that all constant k are
// distinct and, for arrays [n]T, within range 0-n.
//
// Types matter, not just values. For example,
// an interface-keyed map may contain keys
// that are numerically equal so long as they
// are of distinct types. For example:
//
// type myint int
// map[any]bool{1: true, 1: true} // error: duplicate key
// map[any]bool{1: true, int16(1): true} // ok
// map[any]bool{1: true, myint(1): true} // ok
//
// This can be asserted by emitting a
// constraint of the form T{k1: 0, ..., kN: 0}.
if e.Type != nil {
_ = st.expr(e.Type)
}
t := types.Unalias(typeparams.Deref(tv.Type))
ct := typeparams.CoreType(t)
var mapKeys []ast.Expr // map key expressions; must be distinct if constant
for _, elt := range e.Elts {
if kv, ok := elt.(*ast.KeyValueExpr); ok {
if is[*types.Map](ct) {
if k := st.expr(kv.Key); k != nil {
mapKeys = append(mapKeys, st.toExpr(k))
}
}
_ = st.expr(kv.Value)
} else {
_ = st.expr(elt)
}
}
if len(mapKeys) > 0 {
// Inlining a map literal may replace variable key expressions by constants.
// All such constants must have distinct values.
// (Array and slice literals do not permit non-constant keys.)
t := ct.(*types.Map)
var typ ast.Expr
if types.IsInterface(t.Key()) {
typ = &ast.MapType{
Key: makeIdent(st.any),
Value: makeIdent(st.int),
}
} else {
typ = &ast.MapType{
Key: makeIdent(st.typename(t.Key())),
Value: makeIdent(st.int),
}
}
st.emitUnique(typ, mapKeys)
}
// definitely non-constant
case *ast.SelectorExpr:
_ = st.expr(e.X)
_ = st.expr(e.Sel)
// The defer is sufficient to handle
// qualified identifiers (pkg.Const).
// All other cases are definitely non-constant.
case *ast.IndexExpr:
if tv.IsType() {
// type C[T]
_ = st.expr(e.X)
_ = st.expr(e.Index)
} else {
// term x[i]
//
// Constraints (if x is slice/string/array/*array, not map):
// - i >= 0
// if i is a fallible constant
// - i < len(x)
// if x is array/*array and
// i is a fallible constant;
// or if s is a string and both i,
// s are maybe-constants,
// but not both are constants.
kX := st.expr(e.X)
kI := st.expr(e.Index)
if kI != nil && !is[*types.Map](st.info.TypeOf(e.X).Underlying()) {
if kI, ok := kI.(ast.Expr); ok {
st.emitNonNegative(kI)
}
// Emit constraint to check indices against known length.
// TODO(adonovan): factor with SliceExpr logic.
var x ast.Expr
if kX != nil {
// string
x = st.toExpr(kX)
} else if arr, ok := typeparams.CoreType(typeparams.Deref(st.info.TypeOf(e.X))).(*types.Array); ok {
// array, *array
x = &ast.CompositeLit{
Type: &ast.ArrayType{
Len: makeIntLit(arr.Len()),
Elt: makeIdent(st.int),
},
}
}
if x != nil {
st.emit(&ast.IndexExpr{
X: x,
Index: st.toExpr(kI),
})
}
}
}
// definitely non-constant
case *ast.SliceExpr:
// x[low:high:max]
//
// Emit non-negative constraints for each index,
// plus low <= high <= max <= len(x)
// for each pair that are maybe-constant
// but not definitely constant.
kX := st.expr(e.X)
var kLow, kHigh, kMax any
if e.Low != nil {
kLow = st.expr(e.Low)
if kLow != nil {
if kLow, ok := kLow.(ast.Expr); ok {
st.emitNonNegative(kLow)
}
}
}
if e.High != nil {
kHigh = st.expr(e.High)
if kHigh != nil {
if kHigh, ok := kHigh.(ast.Expr); ok {
st.emitNonNegative(kHigh)
}
if kLow != nil {
st.emitMonotonic(st.toExpr(kLow), st.toExpr(kHigh))
}
}
}
if e.Max != nil {
kMax = st.expr(e.Max)
if kMax != nil {
if kMax, ok := kMax.(ast.Expr); ok {
st.emitNonNegative(kMax)
}
if kHigh != nil {
st.emitMonotonic(st.toExpr(kHigh), st.toExpr(kMax))
}
}
}
// Emit constraint to check indices against known length.
var x ast.Expr
if kX != nil {
// string
x = st.toExpr(kX)
} else if arr, ok := typeparams.CoreType(typeparams.Deref(st.info.TypeOf(e.X))).(*types.Array); ok {
// array, *array
x = &ast.CompositeLit{
Type: &ast.ArrayType{
Len: makeIntLit(arr.Len()),
Elt: makeIdent(st.int),
},
}
}
if x != nil {
// Avoid slice[::max] if kHigh is nonconstant (nil).
high, max := st.toExpr(kHigh), st.toExpr(kMax)
if high == nil {
high = max // => slice[:max:max]
}
st.emit(&ast.SliceExpr{
X: x,
Low: st.toExpr(kLow),
High: high,
Max: max,
})
}
// definitely non-constant
case *ast.TypeAssertExpr:
_ = st.expr(e.X)
if e.Type != nil {
_ = st.expr(e.Type)
}
case *ast.CallExpr:
_ = st.expr(e.Fun)
if tv, ok := st.info.Types[e.Fun]; ok && tv.IsType() {
// conversion T(x)
//
// Possible "value out of range".
kX := st.expr(e.Args[0])
if kX != nil && isBasic(tv.Type, types.IsConstType) {
conv := convert(makeIdent(st.typename(tv.Type)), st.toExpr(kX))
if is[ast.Expr](kX) {
st.emit(conv)
}
return conv
}
return nil // definitely non-constant
}
// call f(x)
all := true // all args are possibly-constant
kArgs := make([]ast.Expr, len(e.Args))
for i, arg := range e.Args {
if kArg := st.expr(arg); kArg != nil {
kArgs[i] = st.toExpr(kArg)
} else {
all = false
}
}
// Calls to built-ins with fallibly constant arguments
// may become constant. All other calls are either
// constant or non-constant
if id, ok := e.Fun.(*ast.Ident); ok && all && tv.Value == nil {
if builtin, ok := st.info.Uses[id].(*types.Builtin); ok {
switch builtin.Name() {
case "len", "imag", "real", "complex", "min", "max":
return &ast.CallExpr{
Fun: id,
Args: kArgs,
Ellipsis: e.Ellipsis,
}
}
}
}
case *ast.StarExpr: // *T, *ptr
_ = st.expr(e.X)
case *ast.UnaryExpr:
// + - ! ^ & <- ~
//
// Possible "negation of minint".
// Emit constraint: -x
kX := st.expr(e.X)
if kX != nil && !is[types.TypeAndValue](kX) {
if e.Op == token.SUB {
st.emit(&ast.UnaryExpr{
Op: e.Op,
X: st.toExpr(kX),
})
}
return &ast.UnaryExpr{
Op: e.Op,
X: st.toExpr(kX),
}
}
case *ast.BinaryExpr:
kX := st.expr(e.X)
kY := st.expr(e.Y)
switch e.Op {
case token.QUO, token.REM:
// x/y, x%y
//
// Possible "integer division by zero" or
// "minint / -1" overflow.
// Emit constraint: x/y or 1/y
if kY != nil {
if kX == nil {
kX = makeIntLit(1)
}
st.emit(&ast.BinaryExpr{
Op: e.Op,
X: st.toExpr(kX),
Y: st.toExpr(kY),
})
}
case token.ADD, token.SUB, token.MUL:
// x+y, x-y, x*y
//
// Possible "arithmetic overflow".
// Emit constraint: x+y
if kX != nil && kY != nil {
st.emit(&ast.BinaryExpr{
Op: e.Op,
X: st.toExpr(kX),
Y: st.toExpr(kY),
})
}
case token.SHL, token.SHR:
// x << y, x >> y
//
// Possible "constant shift too large".
// Either operand may be too large individually,
// and they may be too large together.
// Emit constraint:
// x << y (if both maybe-constant)
// x << 0 (if y is non-constant)
// 1 << y (if x is non-constant)
if kX != nil || kY != nil {
x := st.toExpr(kX)
if x == nil {
x = makeIntLit(1)
}
y := st.toExpr(kY)
if y == nil {
y = makeIntLit(0)
}
st.emit(&ast.BinaryExpr{
Op: e.Op,
X: x,
Y: y,
})
}
case token.LSS, token.GTR, token.EQL, token.NEQ, token.LEQ, token.GEQ:
// < > == != <= <=
//
// A "x cmp y" expression with constant operands x, y is
// itself constant, but I can't see how a constant bool
// could be fallible: the compiler doesn't reject duplicate
// boolean cases in a switch, presumably because boolean
// switches are less like n-way branches and more like
// sequential if-else chains with possibly overlapping
// conditions; and there is (sadly) no way to convert a
// boolean constant to an int constant.
}
if kX != nil && kY != nil {
return &ast.BinaryExpr{
Op: e.Op,
X: st.toExpr(kX),
Y: st.toExpr(kY),
}
}
// types
//
// We need to visit types (and even type parameters)
// in order to reach all the places where things could go wrong:
//
// const (
// s = ""
// i = 0
// )
// type C[T [unsafe.Sizeof(func() { _ = s[i] })]int] bool
case *ast.IndexListExpr:
_ = st.expr(e.X)
for _, expr := range e.Indices {
_ = st.expr(expr)
}
case *ast.Ellipsis:
if e.Elt != nil {
_ = st.expr(e.Elt)
}
case *ast.ArrayType:
if e.Len != nil {
_ = st.expr(e.Len)
}
_ = st.expr(e.Elt)
case *ast.StructType:
st.fieldTypes(e.Fields)
case *ast.FuncType:
st.fieldTypes(e.TypeParams)
st.fieldTypes(e.Params)
st.fieldTypes(e.Results)
case *ast.InterfaceType:
st.fieldTypes(e.Methods)
case *ast.MapType:
_ = st.expr(e.Key)
_ = st.expr(e.Value)
case *ast.ChanType:
_ = st.expr(e.Value)
}
return
}
// toExpr converts the result of visitExpr to a falcon expression.
// (We don't do this in visitExpr as we first need to discriminate
// constants from maybe-constants.)
func (st *falconState) toExpr(x any) ast.Expr {
switch x := x.(type) {
case nil:
return nil
case types.TypeAndValue:
lit := makeLiteral(x.Value)
if !isBasic(x.Type, types.IsUntyped) {
// convert to "typed" type
lit = &ast.CallExpr{
Fun: makeIdent(st.typename(x.Type)),
Args: []ast.Expr{lit},
}
}
return lit
case ast.Expr:
return x
default:
panic(x)
}
}
func makeLiteral(v constant.Value) ast.Expr {
switch v.Kind() {
case constant.Bool:
// Rather than refer to the true or false built-ins,
// which could be shadowed by poorly chosen parameter
// names, we use 0 == 0 for true and 0 != 0 for false.
op := token.EQL
if !constant.BoolVal(v) {
op = token.NEQ
}
return &ast.BinaryExpr{
Op: op,
X: makeIntLit(0),
Y: makeIntLit(0),
}
case constant.String:
return &ast.BasicLit{
Kind: token.STRING,
Value: v.ExactString(),
}
case constant.Int:
return &ast.BasicLit{
Kind: token.INT,
Value: v.ExactString(),
}
case constant.Float:
return &ast.BasicLit{
Kind: token.FLOAT,
Value: v.ExactString(),
}
case constant.Complex:
// The components could be float or int.
y := makeLiteral(constant.Imag(v))
y.(*ast.BasicLit).Value += "i" // ugh
if re := constant.Real(v); !consteq(re, kZeroInt) {
// complex: x + yi
y = &ast.BinaryExpr{
Op: token.ADD,
X: makeLiteral(re),
Y: y,
}
}
return y
default:
panic(v.Kind())
}
}
func makeIntLit(x int64) *ast.BasicLit {
return &ast.BasicLit{
Kind: token.INT,
Value: strconv.FormatInt(x, 10),
}
}
func isBasic(t types.Type, info types.BasicInfo) bool {
basic, ok := t.Underlying().(*types.Basic)
return ok && basic.Info()&info != 0
}

View file

@ -0,0 +1,382 @@
// Copyright 2025 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Copied, with considerable changes, from go/parser/resolver.go
// at af53bd2c03.
package inline
import (
"go/ast"
"go/token"
)
// freeishNames computes an approximation to the free names of the AST
// at node n based solely on syntax, inserting values into the map.
//
// In the absence of composite literals, the set of free names is exact. Composite
// literals introduce an ambiguity that can only be resolved with type information:
// whether F is a field name or a value in `T{F: ...}`.
// If includeComplitIdents is true, this function conservatively assumes
// T is not a struct type, so freeishNames overapproximates: the resulting
// set may contain spurious entries that are not free lexical references
// but are references to struct fields.
// If includeComplitIdents is false, this function assumes that T *is*
// a struct type, so freeishNames underapproximates: the resulting set
// may omit names that are free lexical references.
//
// The code is based on go/parser.resolveFile, but heavily simplified. Crucial
// differences are:
// - Instead of resolving names to their objects, this function merely records
// whether they are free.
// - Labels are ignored: they do not refer to values.
// - This is never called on FuncDecls or ImportSpecs, so the function
// panics if it sees one.
func freeishNames(free map[string]bool, n ast.Node, includeComplitIdents bool) {
v := &freeVisitor{free: free, includeComplitIdents: includeComplitIdents}
// Begin with a scope, even though n might not be a form that establishes a scope.
// For example, n might be:
// x := ...
// Then we need to add the first x to some scope.
v.openScope()
ast.Walk(v, n)
v.closeScope()
assert(v.scope == nil, "unbalanced scopes")
}
// A freeVisitor holds state for a free-name analysis.
type freeVisitor struct {
scope *scope // the current innermost scope
free map[string]bool // free names seen so far
includeComplitIdents bool // include identifier key in composite literals
}
// scope contains all the names defined in a lexical scope.
// It is like ast.Scope, but without deprecation warnings.
type scope struct {
names map[string]bool
outer *scope
}
func (s *scope) defined(name string) bool {
for ; s != nil; s = s.outer {
if s.names[name] {
return true
}
}
return false
}
func (v *freeVisitor) Visit(n ast.Node) ast.Visitor {
switch n := n.(type) {
// Expressions.
case *ast.Ident:
v.resolve(n)
case *ast.FuncLit:
v.openScope()
defer v.closeScope()
v.walkFuncType(n.Type)
v.walkBody(n.Body)
case *ast.SelectorExpr:
v.walk(n.X)
// Skip n.Sel: it cannot be free.
case *ast.StructType:
v.openScope()
defer v.closeScope()
v.walkFieldList(n.Fields)
case *ast.FuncType:
v.openScope()
defer v.closeScope()
v.walkFuncType(n)
case *ast.CompositeLit:
v.walk(n.Type)
for _, e := range n.Elts {
if kv, _ := e.(*ast.KeyValueExpr); kv != nil {
if ident, _ := kv.Key.(*ast.Ident); ident != nil {
// It is not possible from syntax alone to know whether
// an identifier used as a composite literal key is
// a struct field (if n.Type is a struct) or a value
// (if n.Type is a map, slice or array).
if v.includeComplitIdents {
// Over-approximate by treating both cases as potentially
// free names.
v.resolve(ident)
} else {
// Under-approximate by ignoring potentially free names.
}
} else {
v.walk(kv.Key)
}
v.walk(kv.Value)
} else {
v.walk(e)
}
}
case *ast.InterfaceType:
v.openScope()
defer v.closeScope()
v.walkFieldList(n.Methods)
// Statements
case *ast.AssignStmt:
walkSlice(v, n.Rhs)
if n.Tok == token.DEFINE {
v.shortVarDecl(n.Lhs)
} else {
walkSlice(v, n.Lhs)
}
case *ast.LabeledStmt:
// ignore labels
// TODO(jba): consider labels?
v.walk(n.Stmt)
case *ast.BranchStmt:
// Ignore labels.
// TODO(jba): consider labels?
case *ast.BlockStmt:
v.openScope()
defer v.closeScope()
walkSlice(v, n.List)
case *ast.IfStmt:
v.openScope()
defer v.closeScope()
v.walk(n.Init)
v.walk(n.Cond)
v.walk(n.Body)
v.walk(n.Else)
case *ast.CaseClause:
walkSlice(v, n.List)
v.openScope()
defer v.closeScope()
walkSlice(v, n.Body)
case *ast.SwitchStmt:
v.openScope()
defer v.closeScope()
v.walk(n.Init)
v.walk(n.Tag)
v.walkBody(n.Body)
case *ast.TypeSwitchStmt:
if n.Init != nil {
v.openScope()
defer v.closeScope()
v.walk(n.Init)
}
v.openScope()
defer v.closeScope()
v.walk(n.Assign)
// We can use walkBody here because we don't track label scopes.
v.walkBody(n.Body)
case *ast.CommClause:
v.openScope()
defer v.closeScope()
v.walk(n.Comm)
walkSlice(v, n.Body)
case *ast.SelectStmt:
v.walkBody(n.Body)
case *ast.ForStmt:
v.openScope()
defer v.closeScope()
v.walk(n.Init)
v.walk(n.Cond)
v.walk(n.Post)
v.walk(n.Body)
case *ast.RangeStmt:
v.openScope()
defer v.closeScope()
v.walk(n.X)
var lhs []ast.Expr
if n.Key != nil {
lhs = append(lhs, n.Key)
}
if n.Value != nil {
lhs = append(lhs, n.Value)
}
if len(lhs) > 0 {
if n.Tok == token.DEFINE {
v.shortVarDecl(lhs)
} else {
walkSlice(v, lhs)
}
}
v.walk(n.Body)
// Declarations
case *ast.GenDecl:
switch n.Tok {
case token.CONST, token.VAR:
for _, spec := range n.Specs {
spec := spec.(*ast.ValueSpec)
walkSlice(v, spec.Values)
if spec.Type != nil {
v.walk(spec.Type)
}
v.declare(spec.Names...)
}
case token.TYPE:
for _, spec := range n.Specs {
spec := spec.(*ast.TypeSpec)
// Go spec: The scope of a type identifier declared inside a
// function begins at the identifier in the TypeSpec and ends
// at the end of the innermost containing block.
v.declare(spec.Name)
if spec.TypeParams != nil {
v.openScope()
defer v.closeScope()
v.walkTypeParams(spec.TypeParams)
}
v.walk(spec.Type)
}
case token.IMPORT:
panic("encountered import declaration in free analysis")
}
case *ast.FuncDecl:
panic("encountered top-level function declaration in free analysis")
default:
return v
}
return nil
}
func (r *freeVisitor) openScope() {
r.scope = &scope{map[string]bool{}, r.scope}
}
func (r *freeVisitor) closeScope() {
r.scope = r.scope.outer
}
func (r *freeVisitor) walk(n ast.Node) {
if n != nil {
ast.Walk(r, n)
}
}
// walkFuncType walks a function type. It is used for explicit
// function types, like this:
//
// type RunFunc func(context.Context) error
//
// and function literals, like this:
//
// func(a, b int) int { return a + b}
//
// neither of which have type parameters.
// Function declarations do involve type parameters, but we don't
// handle them.
func (r *freeVisitor) walkFuncType(typ *ast.FuncType) {
// The order here doesn't really matter, because names in
// a field list cannot appear in types.
// (The situation is different for type parameters, for which
// see [freeVisitor.walkTypeParams].)
r.resolveFieldList(typ.Params)
r.resolveFieldList(typ.Results)
r.declareFieldList(typ.Params)
r.declareFieldList(typ.Results)
}
// walkTypeParams is like walkFieldList, but declares type parameters eagerly so
// that they may be resolved in the constraint expressions held in the field
// Type.
func (r *freeVisitor) walkTypeParams(list *ast.FieldList) {
r.declareFieldList(list)
r.resolveFieldList(list)
}
func (r *freeVisitor) walkBody(body *ast.BlockStmt) {
if body == nil {
return
}
walkSlice(r, body.List)
}
func (r *freeVisitor) walkFieldList(list *ast.FieldList) {
if list == nil {
return
}
r.resolveFieldList(list) // .Type may contain references
r.declareFieldList(list) // .Names declares names
}
func (r *freeVisitor) shortVarDecl(lhs []ast.Expr) {
// Go spec: A short variable declaration may redeclare variables provided
// they were originally declared in the same block with the same type, and
// at least one of the non-blank variables is new.
//
// However, it doesn't matter to free analysis whether a variable is declared
// fresh or redeclared.
for _, x := range lhs {
// In a well-formed program each expr must be an identifier,
// but be forgiving.
if id, ok := x.(*ast.Ident); ok {
r.declare(id)
}
}
}
func walkSlice[S ~[]E, E ast.Node](r *freeVisitor, list S) {
for _, e := range list {
r.walk(e)
}
}
// resolveFieldList resolves the types of the fields in list.
// The companion method declareFieldList declares the names of the fields.
func (r *freeVisitor) resolveFieldList(list *ast.FieldList) {
if list == nil {
return
}
for _, f := range list.List {
r.walk(f.Type)
}
}
// declareFieldList declares the names of the fields in list.
// (Names in a FieldList always establish new bindings.)
// The companion method resolveFieldList resolves the types of the fields.
func (r *freeVisitor) declareFieldList(list *ast.FieldList) {
if list == nil {
return
}
for _, f := range list.List {
r.declare(f.Names...)
}
}
// resolve marks ident as free if it is not in scope.
// TODO(jba): rename: no resolution is happening.
func (r *freeVisitor) resolve(ident *ast.Ident) {
if s := ident.Name; s != "_" && !r.scope.defined(s) {
r.free[s] = true
}
}
// declare adds each non-blank ident to the current scope.
func (r *freeVisitor) declare(idents ...*ast.Ident) {
for _, id := range idents {
if id.Name != "_" {
r.scope.names[id.Name] = true
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,168 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package inline
// This file defines various common helpers.
import (
"go/ast"
"go/constant"
"go/token"
"go/types"
"reflect"
"strings"
"golang.org/x/tools/internal/typeparams"
)
func is[T any](x any) bool {
_, ok := x.(T)
return ok
}
func btoi(b bool) int {
if b {
return 1
} else {
return 0
}
}
func offsetOf(fset *token.FileSet, pos token.Pos) int {
return fset.PositionFor(pos, false).Offset
}
// objectKind returns an object's kind (e.g. var, func, const, typename).
func objectKind(obj types.Object) string {
return strings.TrimPrefix(strings.ToLower(reflect.TypeOf(obj).String()), "*types.")
}
// within reports whether pos is within the half-open interval [n.Pos, n.End).
func within(pos token.Pos, n ast.Node) bool {
return n.Pos() <= pos && pos < n.End()
}
// trivialConversion reports whether it is safe to omit the implicit
// value-to-variable conversion that occurs in argument passing or
// result return. The only case currently allowed is converting from
// untyped constant to its default type (e.g. 0 to int).
//
// The reason for this check is that converting from A to B to C may
// yield a different result than converting A directly to C: consider
// 0 to int32 to any.
//
// trivialConversion under-approximates trivial conversions, as unfortunately
// go/types does not record the type of an expression *before* it is implicitly
// converted, and therefore it cannot distinguish typed constant
// expressions from untyped constant expressions. For example, in the
// expression `c + 2`, where c is a uint32 constant, trivialConversion does not
// detect that the default type of this expression is actually uint32, not untyped
// int.
//
// We could, of course, do better here by reverse engineering some of go/types'
// constant handling. That may or may not be worthwhile.
//
// Example: in func f() int32 { return 0 },
// the type recorded for 0 is int32, not untyped int;
// although it is Identical to the result var,
// the conversion is non-trivial.
func trivialConversion(fromValue constant.Value, from, to types.Type) bool {
if fromValue != nil {
var defaultType types.Type
switch fromValue.Kind() {
case constant.Bool:
defaultType = types.Typ[types.Bool]
case constant.String:
defaultType = types.Typ[types.String]
case constant.Int:
defaultType = types.Typ[types.Int]
case constant.Float:
defaultType = types.Typ[types.Float64]
case constant.Complex:
defaultType = types.Typ[types.Complex128]
default:
return false
}
return types.Identical(defaultType, to)
}
return types.Identical(from, to)
}
func checkInfoFields(info *types.Info) {
assert(info.Defs != nil, "types.Info.Defs is nil")
assert(info.Implicits != nil, "types.Info.Implicits is nil")
assert(info.Scopes != nil, "types.Info.Scopes is nil")
assert(info.Selections != nil, "types.Info.Selections is nil")
assert(info.Types != nil, "types.Info.Types is nil")
assert(info.Uses != nil, "types.Info.Uses is nil")
}
// intersects reports whether the maps' key sets intersect.
func intersects[K comparable, T1, T2 any](x map[K]T1, y map[K]T2) bool {
if len(x) > len(y) {
return intersects(y, x)
}
for k := range x {
if _, ok := y[k]; ok {
return true
}
}
return false
}
// convert returns syntax for the conversion T(x).
func convert(T, x ast.Expr) *ast.CallExpr {
// The formatter generally adds parens as needed,
// but before go1.22 it had a bug (#63362) for
// channel types that requires this workaround.
if ch, ok := T.(*ast.ChanType); ok && ch.Dir == ast.RECV {
T = &ast.ParenExpr{X: T}
}
return &ast.CallExpr{
Fun: T,
Args: []ast.Expr{x},
}
}
// isPointer reports whether t's core type is a pointer.
func isPointer(t types.Type) bool {
return is[*types.Pointer](typeparams.CoreType(t))
}
// indirectSelection is like seln.Indirect() without bug #8353.
func indirectSelection(seln *types.Selection) bool {
// Work around bug #8353 in Selection.Indirect when Kind=MethodVal.
if seln.Kind() == types.MethodVal {
tArg, indirect := effectiveReceiver(seln)
if indirect {
return true
}
tParam := seln.Obj().Type().Underlying().(*types.Signature).Recv().Type()
return isPointer(tArg) && !isPointer(tParam) // implicit *
}
return seln.Indirect()
}
// effectiveReceiver returns the effective type of the method
// receiver after all implicit field selections (but not implicit * or
// & operations) have been applied.
//
// The boolean indicates whether any implicit field selection was indirect.
func effectiveReceiver(seln *types.Selection) (types.Type, bool) {
assert(seln.Kind() == types.MethodVal, "not MethodVal")
t := seln.Recv()
indices := seln.Index()
indirect := false
for _, index := range indices[:len(indices)-1] {
if isPointer(t) {
indirect = true
t = typeparams.MustDeref(t)
}
t = typeparams.CoreType(t).(*types.Struct).Field(index).Type()
}
return t, indirect
}

View file

@ -96,9 +96,12 @@ golang.org/x/tools/go/analysis/passes/framepointer
golang.org/x/tools/go/analysis/passes/hostport
golang.org/x/tools/go/analysis/passes/httpresponse
golang.org/x/tools/go/analysis/passes/ifaceassert
golang.org/x/tools/go/analysis/passes/inline
golang.org/x/tools/go/analysis/passes/inspect
golang.org/x/tools/go/analysis/passes/internal/gofixdirective
golang.org/x/tools/go/analysis/passes/loopclosure
golang.org/x/tools/go/analysis/passes/lostcancel
golang.org/x/tools/go/analysis/passes/modernize
golang.org/x/tools/go/analysis/passes/nilfunc
golang.org/x/tools/go/analysis/passes/printf
golang.org/x/tools/go/analysis/passes/shift
@ -117,6 +120,7 @@ golang.org/x/tools/go/analysis/passes/unsafeptr
golang.org/x/tools/go/analysis/passes/unusedresult
golang.org/x/tools/go/analysis/passes/waitgroup
golang.org/x/tools/go/analysis/unitchecker
golang.org/x/tools/go/ast/astutil
golang.org/x/tools/go/ast/edge
golang.org/x/tools/go/ast/inspector
golang.org/x/tools/go/cfg
@ -124,6 +128,7 @@ golang.org/x/tools/go/types/objectpath
golang.org/x/tools/go/types/typeutil
golang.org/x/tools/internal/aliases
golang.org/x/tools/internal/analysisinternal
golang.org/x/tools/internal/analysisinternal/generated
golang.org/x/tools/internal/analysisinternal/typeindex
golang.org/x/tools/internal/astutil
golang.org/x/tools/internal/bisect
@ -131,9 +136,11 @@ golang.org/x/tools/internal/diff
golang.org/x/tools/internal/diff/lcs
golang.org/x/tools/internal/facts
golang.org/x/tools/internal/fmtstr
golang.org/x/tools/internal/goplsexport
golang.org/x/tools/internal/moreiters
golang.org/x/tools/internal/packagepath
golang.org/x/tools/internal/refactor
golang.org/x/tools/internal/refactor/inline
golang.org/x/tools/internal/stdlib
golang.org/x/tools/internal/typeparams
golang.org/x/tools/internal/typesinternal

View file

@ -4,6 +4,10 @@
package main
// TODO(adonovan): replace this test by a script test
// in cmd/go/testdata/script/vet_suite.txt like we do
// for 'go fix'.
import (
"bytes"
"errors"