mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
vendor/golang.org/x/tools: update to 1ad6f3d
cmd$ go get golang.org/x/tools@1ad6f3d cmd$ GOWORK=off go mod tidy cmd$ GOWORK=off go mod vendor This merge pulls in the following commits, which include several fixes needed for go1.26, marked by an asterisk. None of the unmarked commits affects vendored packages, so it is safe (and simpler) to merge rather than cherrypick via a release branch. tools$ git log --oneline 68724afed209...1ad6f3d02713 *4a3f2f81eb go/analysis/passes/printf: panic when function literal is assigned to the blank identifier *d5d7d21fe7 gopls/internal/cache: fix %q verb use with wrong type *92a094998a go/analysis/passes/modernize: rangeint: handle usages of loop label *ffbdcac342 go/analysis/passes/modernize: stditerators: add reflect iters *2e3e83a050 internal/refactor/inline: preserve local package name used by callee d32ec34454 gopls/internal/protocol/generate: move injections to tables.go 98d172d8bd gopls/internal/protocol: add form field in type CodeAction e1317381e4 go/packages: suppress test on (e.g.) wasm *e31ed53b51 internal/stdlib: regenerate *6f1f89817d internal/analysis/driverutil: include end positions in -json output 7839abf5e8 gopls/internal/metadata: document when Module can be nil 98aa9a7d0b gopls/internal/cache: make unimported completions deterministic 4c5faddb0f internal/modindex: unescape import paths c2c902c441 gopls/completion: avoid nil dereference *4bf3169c8a go/analysis/passes/modernize: waitgroup: highlight "go func" part ba5189b063 gopls/internal/template: fix printf mistake in test *a7d12506a0 go/analysis/passes/printf: clarify checkForward c7a1a29f93 internal/pkgbits: fix printf mistake in test af205c0a29 gopls/doc/release/v0.21.0.md: tweaks Change-Id: I23c991987afeb2db3e0f98f76f8ee5000c8a6e02 Reviewed-on: https://go-review.googlesource.com/c/go/+/725460 Auto-Submit: Alan Donovan <adonovan@google.com> TryBot-Bypass: Alan Donovan <adonovan@google.com> Reviewed-by: Dmitri Shuralyov <dmitshur@golang.org> Reviewed-by: Dmitri Shuralyov <dmitshur@google.com> Commit-Queue: Alan Donovan <adonovan@google.com>
This commit is contained in:
parent
eec1afeb28
commit
1555fad47d
19 changed files with 1210 additions and 765 deletions
|
|
@ -11,7 +11,7 @@ require (
|
|||
golang.org/x/sys v0.38.0
|
||||
golang.org/x/telemetry v0.0.0-20251111182119-bc8e575c7b54
|
||||
golang.org/x/term v0.34.0
|
||||
golang.org/x/tools v0.39.1-0.20251120214200-68724afed209
|
||||
golang.org/x/tools v0.39.1-0.20251130212600-1ad6f3d02713
|
||||
)
|
||||
|
||||
require (
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4=
|
|||
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/tools v0.39.1-0.20251120214200-68724afed209 h1:BGuEUnbWU1H+VhF4Z52lwCvzRT8Q/Z7kJC3okSME58w=
|
||||
golang.org/x/tools v0.39.1-0.20251120214200-68724afed209/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
||||
golang.org/x/tools v0.39.1-0.20251130212600-1ad6f3d02713 h1:i4GzAuZW4RuKXltwKyLYAfk7E1TSKQBxRAI7XKfLjSk=
|
||||
golang.org/x/tools v0.39.1-0.20251130212600-1ad6f3d02713/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
||||
rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef h1:mqLYrXCXYEZOop9/Dbo6RPX11539nwiCNBb1icVPmw8=
|
||||
rsc.io/markdown v0.0.0-20240306144322-0bf8f97ee8ef/go.mod h1:8xcPgWmwlZONN1D9bjxtHEjrUtSEa3fakVF8iaewYKQ=
|
||||
|
|
|
|||
29
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/inline/inline.go
generated
vendored
29
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/inline/inline.go
generated
vendored
|
|
@ -7,7 +7,6 @@ package inline
|
|||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"slices"
|
||||
"strings"
|
||||
|
|
@ -23,7 +22,6 @@ import (
|
|||
"golang.org/x/tools/internal/analysis/analyzerutil"
|
||||
typeindexanalyzer "golang.org/x/tools/internal/analysis/typeindex"
|
||||
"golang.org/x/tools/internal/astutil"
|
||||
"golang.org/x/tools/internal/diff"
|
||||
"golang.org/x/tools/internal/moreiters"
|
||||
"golang.org/x/tools/internal/packagepath"
|
||||
"golang.org/x/tools/internal/refactor"
|
||||
|
|
@ -204,19 +202,12 @@ func (a *analyzer) inlineCall(call *ast.CallExpr, cur inspector.Cursor) {
|
|||
var edits []analysis.TextEdit
|
||||
if !lazyEdits {
|
||||
// Inline the call.
|
||||
content, err := a.readFile(call)
|
||||
if err != nil {
|
||||
a.pass.Reportf(call.Lparen, "invalid inlining candidate: cannot read source file: %v", err)
|
||||
return
|
||||
}
|
||||
curFile := astutil.EnclosingFile(cur)
|
||||
caller := &inline.Caller{
|
||||
Fset: a.pass.Fset,
|
||||
Types: a.pass.Pkg,
|
||||
Info: a.pass.TypesInfo,
|
||||
File: curFile,
|
||||
Call: call,
|
||||
Content: content,
|
||||
Fset: a.pass.Fset,
|
||||
Types: a.pass.Pkg,
|
||||
Info: a.pass.TypesInfo,
|
||||
File: astutil.EnclosingFile(cur),
|
||||
Call: call,
|
||||
CountUses: func(pkgname *types.PkgName) int {
|
||||
return moreiters.Len(a.index.Uses(pkgname))
|
||||
},
|
||||
|
|
@ -245,15 +236,7 @@ func (a *analyzer) inlineCall(call *ast.CallExpr, cur inspector.Cursor) {
|
|||
// The flag allows them to decline such fixes.
|
||||
return
|
||||
}
|
||||
got := res.Content
|
||||
|
||||
for _, edit := range diff.Bytes(content, got) {
|
||||
edits = append(edits, analysis.TextEdit{
|
||||
Pos: curFile.FileStart + token.Pos(edit.Start),
|
||||
End: curFile.FileStart + token.Pos(edit.End),
|
||||
NewText: []byte(edit.New),
|
||||
})
|
||||
}
|
||||
edits = res.Edits
|
||||
}
|
||||
|
||||
a.pass.Report(analysis.Diagnostic{
|
||||
|
|
|
|||
2
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/forvar.go
generated
vendored
2
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/forvar.go
generated
vendored
|
|
@ -35,7 +35,7 @@ var ForVarAnalyzer = &analysis.Analyzer{
|
|||
// where the two idents are the same,
|
||||
// and the ident is defined (:=) as a variable in the for statement.
|
||||
// (Note that this 'fix' does not work for three clause loops
|
||||
// because the Go specfilesUsingGoVersionsays "The variable used by each subsequent iteration
|
||||
// because the Go spec says "The variable used by each subsequent iteration
|
||||
// is declared implicitly before executing the post statement and initialized to the
|
||||
// value of the previous iteration's variable at that moment.")
|
||||
//
|
||||
|
|
|
|||
17
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/rangeint.go
generated
vendored
17
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/rangeint.go
generated
vendored
|
|
@ -161,7 +161,22 @@ func rangeint(pass *analysis.Pass) (any, error) {
|
|||
// don't offer a fix, as a range loop
|
||||
// leaves i with a different final value (limit-1).
|
||||
if init.Tok == token.ASSIGN {
|
||||
for curId := range curLoop.Parent().Preorder((*ast.Ident)(nil)) {
|
||||
// Find the nearest ancestor that is not a label.
|
||||
// Otherwise, checking for i usage outside of a for
|
||||
// loop might not function properly further below.
|
||||
// This is because the i usage might be a child of
|
||||
// the loop's parent's parent, for example:
|
||||
// var i int
|
||||
// Loop:
|
||||
// for i = 0; i < 10; i++ { break loop }
|
||||
// // i is in the sibling of the label, not the loop
|
||||
// fmt.Println(i)
|
||||
//
|
||||
ancestor := curLoop.Parent()
|
||||
for is[*ast.LabeledStmt](ancestor.Node()) {
|
||||
ancestor = ancestor.Parent()
|
||||
}
|
||||
for curId := range ancestor.Preorder((*ast.Ident)(nil)) {
|
||||
id := curId.Node().(*ast.Ident)
|
||||
if info.Uses[id] == v {
|
||||
// Is i used after loop?
|
||||
|
|
|
|||
68
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/stditerators.go
generated
vendored
68
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/stditerators.go
generated
vendored
|
|
@ -43,23 +43,29 @@ func init() {
|
|||
// iter.Seq.
|
||||
var stditeratorsTable = [...]struct {
|
||||
pkgpath, typename, lenmethod, atmethod, itermethod, elemname string
|
||||
|
||||
seqn int // 1 or 2 => "for x" or "for _, x"
|
||||
}{
|
||||
// Example: in go/types, (*Tuple).Variables returns an
|
||||
// iterator that replaces a loop over (*Tuple).{Len,At}.
|
||||
// The loop variable is named "v".
|
||||
{"go/types", "Interface", "NumEmbeddeds", "EmbeddedType", "EmbeddedTypes", "etyp"},
|
||||
{"go/types", "Interface", "NumExplicitMethods", "ExplicitMethod", "ExplicitMethods", "method"},
|
||||
{"go/types", "Interface", "NumMethods", "Method", "Methods", "method"},
|
||||
{"go/types", "MethodSet", "Len", "At", "Methods", "method"},
|
||||
{"go/types", "Named", "NumMethods", "Method", "Methods", "method"},
|
||||
{"go/types", "Scope", "NumChildren", "Child", "Children", "child"},
|
||||
{"go/types", "Struct", "NumFields", "Field", "Fields", "field"},
|
||||
{"go/types", "Tuple", "Len", "At", "Variables", "v"},
|
||||
{"go/types", "TypeList", "Len", "At", "Types", "t"},
|
||||
{"go/types", "TypeParamList", "Len", "At", "TypeParams", "tparam"},
|
||||
{"go/types", "Union", "Len", "Term", "Terms", "term"},
|
||||
// TODO(adonovan): support Seq2. Bonus: transform uses of both key and value.
|
||||
// {"reflect", "Value", "NumFields", "Field", "Fields", "field"},
|
||||
{"go/types", "Interface", "NumEmbeddeds", "EmbeddedType", "EmbeddedTypes", "etyp", 1},
|
||||
{"go/types", "Interface", "NumExplicitMethods", "ExplicitMethod", "ExplicitMethods", "method", 1},
|
||||
{"go/types", "Interface", "NumMethods", "Method", "Methods", "method", 1},
|
||||
{"go/types", "MethodSet", "Len", "At", "Methods", "method", 1},
|
||||
{"go/types", "Named", "NumMethods", "Method", "Methods", "method", 1},
|
||||
{"go/types", "Scope", "NumChildren", "Child", "Children", "child", 1},
|
||||
{"go/types", "Struct", "NumFields", "Field", "Fields", "field", 1},
|
||||
{"go/types", "Tuple", "Len", "At", "Variables", "v", 1},
|
||||
{"go/types", "TypeList", "Len", "At", "Types", "t", 1},
|
||||
{"go/types", "TypeParamList", "Len", "At", "TypeParams", "tparam", 1},
|
||||
{"go/types", "Union", "Len", "Term", "Terms", "term", 1},
|
||||
{"reflect", "Type", "NumField", "Field", "Fields", "field", 1},
|
||||
{"reflect", "Type", "NumMethod", "Method", "Methods", "method", 1},
|
||||
{"reflect", "Type", "NumIn", "In", "Ins", "in", 1},
|
||||
{"reflect", "Type", "NumOut", "Out", "Outs", "out", 1},
|
||||
{"reflect", "Value", "NumField", "Field", "Fields", "field", 2},
|
||||
{"reflect", "Value", "NumMethod", "Method", "Methods", "method", 2},
|
||||
}
|
||||
|
||||
// stditerators suggests fixes to replace loops using Len/At-style
|
||||
|
|
@ -86,6 +92,19 @@ var stditeratorsTable = [...]struct {
|
|||
// the user hasn't intentionally chosen not to use an
|
||||
// iterator for that reason? We don't want to go fix to
|
||||
// undo optimizations. Do we need a suppression mechanism?
|
||||
//
|
||||
// TODO(adonovan): recognize the more complex patterns that
|
||||
// could make full use of both components of an iter.Seq2, e.g.
|
||||
//
|
||||
// for i := 0; i < v.NumField(); i++ {
|
||||
// use(v.Field(i), v.Type().Field(i))
|
||||
// }
|
||||
//
|
||||
// =>
|
||||
//
|
||||
// for structField, field := range v.Fields() {
|
||||
// use(structField, field)
|
||||
// }
|
||||
func stditerators(pass *analysis.Pass) (any, error) {
|
||||
var (
|
||||
index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index)
|
||||
|
|
@ -228,15 +247,17 @@ func stditerators(pass *analysis.Pass) (any, error) {
|
|||
indexVar = v
|
||||
curBody = curFor.ChildAt(edge.ForStmt_Body, -1)
|
||||
elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
|
||||
elemPrefix := cond(row.seqn == 2, "_, ", "")
|
||||
|
||||
// for i := 0; i < x.Len(); i++ {
|
||||
// ---- ------- --- -----
|
||||
// for elem := range x.All() {
|
||||
// for i := 0; i < x.Len(); i++ {
|
||||
// ---- ------- --- -----
|
||||
// for elem := range x.All() {
|
||||
// or for _, elem := ...
|
||||
edits = []analysis.TextEdit{
|
||||
{
|
||||
Pos: v.Pos(),
|
||||
End: v.Pos() + token.Pos(len(v.Name())),
|
||||
NewText: []byte(elem),
|
||||
NewText: []byte(elemPrefix + elem),
|
||||
},
|
||||
{
|
||||
Pos: loop.Init.(*ast.AssignStmt).Rhs[0].Pos(),
|
||||
|
|
@ -271,6 +292,7 @@ func stditerators(pass *analysis.Pass) (any, error) {
|
|||
indexVar = info.Defs[id].(*types.Var)
|
||||
curBody = curRange.ChildAt(edge.RangeStmt_Body, -1)
|
||||
elem, elemVar = chooseName(curBody, lenSel.X, indexVar)
|
||||
elemPrefix := cond(row.seqn == 2, "_, ", "")
|
||||
|
||||
// for i := range x.Len() {
|
||||
// ---- ---
|
||||
|
|
@ -279,7 +301,7 @@ func stditerators(pass *analysis.Pass) (any, error) {
|
|||
{
|
||||
Pos: loop.Key.Pos(),
|
||||
End: loop.Key.End(),
|
||||
NewText: []byte(elem),
|
||||
NewText: []byte(elemPrefix + elem),
|
||||
},
|
||||
{
|
||||
Pos: lenSel.Sel.Pos(),
|
||||
|
|
@ -344,8 +366,8 @@ func stditerators(pass *analysis.Pass) (any, error) {
|
|||
// (In the long run, version filters are not highly selective,
|
||||
// so there's no need to do them first, especially as this check
|
||||
// may be somewhat expensive.)
|
||||
if v, ok := methodGoVersion(row.pkgpath, row.typename, row.itermethod); !ok {
|
||||
panic("no version found")
|
||||
if v, err := methodGoVersion(row.pkgpath, row.typename, row.itermethod); err != nil {
|
||||
panic(err)
|
||||
} else if !analyzerutil.FileUsesGoVersion(pass, astutil.EnclosingFile(curLenCall), v.String()) {
|
||||
continue nextCall
|
||||
}
|
||||
|
|
@ -371,7 +393,7 @@ func stditerators(pass *analysis.Pass) (any, error) {
|
|||
|
||||
// methodGoVersion reports the version at which the method
|
||||
// (pkgpath.recvtype).method appeared in the standard library.
|
||||
func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, bool) {
|
||||
func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, error) {
|
||||
// TODO(adonovan): opt: this might be inefficient for large packages
|
||||
// like go/types. If so, memoize using a map (and kill two birds with
|
||||
// one stone by also memoizing the 'within' check above).
|
||||
|
|
@ -379,9 +401,9 @@ func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, bool) {
|
|||
if sym.Kind == stdlib.Method {
|
||||
_, recv, name := sym.SplitMethod()
|
||||
if recv == recvtype && name == method {
|
||||
return sym.Version, true
|
||||
return sym.Version, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
return 0, fmt.Errorf("methodGoVersion: %s.%s.%s missing from stdlib manifest", pkgpath, recvtype, method)
|
||||
}
|
||||
|
|
|
|||
6
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/waitgroup.go
generated
vendored
6
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/modernize/waitgroup.go
generated
vendored
|
|
@ -137,8 +137,10 @@ func waitgroup(pass *analysis.Pass) (any, error) {
|
|||
}
|
||||
|
||||
pass.Report(analysis.Diagnostic{
|
||||
Pos: addCall.Pos(),
|
||||
End: goStmt.End(),
|
||||
// go func() {
|
||||
// ~~~~~~~~~
|
||||
Pos: goStmt.Pos(),
|
||||
End: lit.Type.End(),
|
||||
Message: "Goroutine creation can be simplified using WaitGroup.Go",
|
||||
SuggestedFixes: []analysis.SuggestedFix{{
|
||||
Message: "Simplify by using WaitGroup.Go",
|
||||
|
|
|
|||
133
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go
generated
vendored
133
src/cmd/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go
generated
vendored
|
|
@ -137,6 +137,7 @@ type wrapper struct {
|
|||
callers []printfCaller
|
||||
}
|
||||
|
||||
// printfCaller is a candidate print{,f} forwarding call from candidate wrapper w.
|
||||
type printfCaller struct {
|
||||
w *wrapper
|
||||
call *ast.CallExpr // forwarding call (nil for implicit interface method -> impl calls)
|
||||
|
|
@ -246,7 +247,7 @@ func findPrintLike(pass *analysis.Pass, res *Result) {
|
|||
switch lhs := lhs.(type) {
|
||||
case *ast.Ident:
|
||||
// variable: wrapf = func(...)
|
||||
v = info.ObjectOf(lhs).(*types.Var)
|
||||
v, _ = info.ObjectOf(lhs).(*types.Var)
|
||||
case *ast.SelectorExpr:
|
||||
if sel, ok := info.Selections[lhs]; ok {
|
||||
// struct field: x.wrapf = func(...)
|
||||
|
|
@ -291,35 +292,35 @@ func findPrintLike(pass *analysis.Pass, res *Result) {
|
|||
// var _ Logger = myLogger{}
|
||||
impls := methodImplementations(pass)
|
||||
|
||||
// doCall records a call from one wrapper to another.
|
||||
doCall := func(w *wrapper, callee types.Object, call *ast.CallExpr) {
|
||||
// Call from one wrapper candidate to another?
|
||||
// Record the edge so that if callee is found to be
|
||||
// a true wrapper, w will be too.
|
||||
if w2, ok := byObj[callee]; ok {
|
||||
w2.callers = append(w2.callers, printfCaller{w, call})
|
||||
}
|
||||
|
||||
// Is the candidate a true wrapper, because it calls
|
||||
// a known print{,f}-like function from the allowlist
|
||||
// or an imported fact, or another wrapper found
|
||||
// to be a true wrapper?
|
||||
// If so, convert all w's callers to kind.
|
||||
kind := callKind(pass, callee, res)
|
||||
if kind != KindNone {
|
||||
propagate(pass, w, call, kind, res)
|
||||
}
|
||||
}
|
||||
|
||||
// Pass 2: scan the body of each wrapper function
|
||||
// for calls to other printf-like functions.
|
||||
for _, w := range wrappers {
|
||||
|
||||
// doCall records a call from one wrapper to another.
|
||||
doCall := func(callee types.Object, call *ast.CallExpr) {
|
||||
// Call from one wrapper candidate to another?
|
||||
// Record the edge so that if callee is found to be
|
||||
// a true wrapper, w will be too.
|
||||
if w2, ok := byObj[callee]; ok {
|
||||
w2.callers = append(w2.callers, printfCaller{w, call})
|
||||
}
|
||||
|
||||
// Is the candidate a true wrapper, because it calls
|
||||
// a known print{,f}-like function from the allowlist
|
||||
// or an imported fact, or another wrapper found
|
||||
// to be a true wrapper?
|
||||
// If so, convert all w's callers to kind.
|
||||
kind := callKind(pass, callee, res)
|
||||
if kind != KindNone {
|
||||
checkForward(pass, w, call, kind, res)
|
||||
}
|
||||
}
|
||||
|
||||
// An interface method has no body, but acts
|
||||
// like an implicit call to each implementing method.
|
||||
if w.curBody.Inspector() == nil {
|
||||
for impl := range impls[w.obj.(*types.Func)] {
|
||||
doCall(impl, nil)
|
||||
doCall(w, impl, nil)
|
||||
}
|
||||
continue // (no body)
|
||||
}
|
||||
|
|
@ -360,7 +361,7 @@ func findPrintLike(pass *analysis.Pass, res *Result) {
|
|||
case *ast.CallExpr:
|
||||
if len(n.Args) > 0 && match(info, n.Args[len(n.Args)-1], w.args) {
|
||||
if callee := typeutil.Callee(pass.TypesInfo, n); callee != nil {
|
||||
doCall(callee, n)
|
||||
doCall(w, callee, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -414,44 +415,15 @@ func match(info *types.Info, arg ast.Expr, param *types.Var) bool {
|
|||
return ok && info.ObjectOf(id) == param
|
||||
}
|
||||
|
||||
// checkForward checks whether a forwarding wrapper is forwarding correctly.
|
||||
// If so, it propagates changes in wrapper kind information backwards
|
||||
// through through the wrapper.callers graph of forwarding calls.
|
||||
//
|
||||
// If not, it reports a diagnostic that the user wrote
|
||||
// fmt.Printf(format, args) instead of fmt.Printf(format, args...).
|
||||
func checkForward(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind, res *Result) {
|
||||
// propagate propagates changes in wrapper (non-None) kind information backwards
|
||||
// through through the wrapper.callers graph of well-formed forwarding calls.
|
||||
func propagate(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind, res *Result) {
|
||||
// Check correct call forwarding.
|
||||
// (Interface methods forward correctly by construction.)
|
||||
if call != nil {
|
||||
matched := kind == KindPrint ||
|
||||
kind != KindNone && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format)
|
||||
if !matched {
|
||||
return
|
||||
}
|
||||
|
||||
if !call.Ellipsis.IsValid() {
|
||||
typ, ok := pass.TypesInfo.Types[call.Fun].Type.(*types.Signature)
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
if len(call.Args) > typ.Params().Len() {
|
||||
// If we're passing more arguments than what the
|
||||
// print/printf function can take, adding an ellipsis
|
||||
// would break the program. For example:
|
||||
//
|
||||
// func foo(arg1 string, arg2 ...interface{}) {
|
||||
// fmt.Printf("%s %v", arg1, arg2)
|
||||
// }
|
||||
return
|
||||
}
|
||||
desc := "printf"
|
||||
if kind == KindPrint {
|
||||
desc = "print"
|
||||
}
|
||||
pass.ReportRangef(call, "missing ... in args forwarded to %s-like function", desc)
|
||||
return
|
||||
}
|
||||
//
|
||||
// Interface methods (call==nil) forward
|
||||
// correctly by construction.
|
||||
if call != nil && !checkForward(pass, w, call, kind) {
|
||||
return
|
||||
}
|
||||
|
||||
// If the candidate's print{,f} status becomes known,
|
||||
|
|
@ -471,11 +443,50 @@ func checkForward(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind
|
|||
|
||||
// Propagate kind back to known callers.
|
||||
for _, caller := range w.callers {
|
||||
checkForward(pass, caller.w, caller.call, kind, res)
|
||||
propagate(pass, caller.w, caller.call, kind, res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// checkForward checks whether a call from wrapper w is a well-formed
|
||||
// forwarding call of the specified (non-None) kind.
|
||||
//
|
||||
// If not, it reports a diagnostic that the user wrote
|
||||
// fmt.Printf(format, args) instead of fmt.Printf(format, args...).
|
||||
func checkForward(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind) bool {
|
||||
// Printf/Errorf calls must delegate the format string.
|
||||
switch kind {
|
||||
case KindPrintf, KindErrorf:
|
||||
if len(call.Args) < 2 || !match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// The args... delegation must be variadic.
|
||||
// (That args is actually delegated was
|
||||
// established before the root call to doCall.)
|
||||
if !call.Ellipsis.IsValid() {
|
||||
typ, ok := pass.TypesInfo.Types[call.Fun].Type.(*types.Signature)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
if len(call.Args) > typ.Params().Len() {
|
||||
// If we're passing more arguments than what the
|
||||
// print/printf function can take, adding an ellipsis
|
||||
// would break the program. For example:
|
||||
//
|
||||
// func foo(arg1 string, arg2 ...interface{}) {
|
||||
// fmt.Printf("%s %v", arg1, arg2)
|
||||
// }
|
||||
return false
|
||||
}
|
||||
pass.ReportRangef(call, "missing ... in args forwarded to %s-like function", kind)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func origin(obj types.Object) types.Object {
|
||||
switch obj := obj.(type) {
|
||||
case *types.Func:
|
||||
|
|
|
|||
7
src/cmd/vendor/golang.org/x/tools/internal/analysis/driverutil/fix.go
generated
vendored
7
src/cmd/vendor/golang.org/x/tools/internal/analysis/driverutil/fix.go
generated
vendored
|
|
@ -339,6 +339,9 @@ fixloop:
|
|||
// information for the fixed file and thus cannot accurately tell
|
||||
// whether k is among the free names of T{k: 0}, which requires
|
||||
// knowledge of whether T is a struct type.
|
||||
//
|
||||
// Like [imports.Process] (the core of x/tools/cmd/goimports), it also
|
||||
// merges import decls.
|
||||
func FormatSourceRemoveImports(pkg *types.Package, src []byte) ([]byte, error) {
|
||||
// This function was reduced from the "strict entire file"
|
||||
// path through [format.Source].
|
||||
|
|
@ -353,6 +356,10 @@ func FormatSourceRemoveImports(pkg *types.Package, src []byte) ([]byte, error) {
|
|||
|
||||
removeUnneededImports(fset, pkg, file)
|
||||
|
||||
// TODO(adonovan): to generate cleaner edits when adding an import,
|
||||
// consider adding a call to imports.mergeImports; however, it does
|
||||
// cause comments to migrate.
|
||||
|
||||
// printerNormalizeNumbers means to canonicalize number literal prefixes
|
||||
// and exponents while printing. See https://golang.org/doc/go1.13#gofmt.
|
||||
//
|
||||
|
|
|
|||
9
src/cmd/vendor/golang.org/x/tools/internal/analysis/driverutil/print.go
generated
vendored
9
src/cmd/vendor/golang.org/x/tools/internal/analysis/driverutil/print.go
generated
vendored
|
|
@ -7,6 +7,7 @@ package driverutil
|
|||
// This file defined output helpers common to all drivers.
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"go/token"
|
||||
|
|
@ -76,11 +77,10 @@ type JSONSuggestedFix struct {
|
|||
}
|
||||
|
||||
// A JSONDiagnostic describes the JSON schema of an analysis.Diagnostic.
|
||||
//
|
||||
// TODO(matloob): include End position if present.
|
||||
type JSONDiagnostic struct {
|
||||
Category string `json:"category,omitempty"`
|
||||
Posn string `json:"posn"` // e.g. "file.go:line:column"
|
||||
End string `json:"end"` // (ditto)
|
||||
Message string `json:"message"`
|
||||
SuggestedFixes []JSONSuggestedFix `json:"suggested_fixes,omitempty"`
|
||||
Related []JSONRelatedInformation `json:"related,omitempty"`
|
||||
|
|
@ -88,10 +88,9 @@ type JSONDiagnostic struct {
|
|||
|
||||
// A JSONRelated describes a secondary position and message related to
|
||||
// a primary diagnostic.
|
||||
//
|
||||
// TODO(adonovan): include End position if present.
|
||||
type JSONRelatedInformation struct {
|
||||
Posn string `json:"posn"` // e.g. "file.go:line:column"
|
||||
End string `json:"end"` // (ditto)
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
|
|
@ -127,12 +126,14 @@ func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.
|
|||
for _, r := range f.Related {
|
||||
related = append(related, JSONRelatedInformation{
|
||||
Posn: fset.Position(r.Pos).String(),
|
||||
End: fset.Position(cmp.Or(r.End, r.Pos)).String(),
|
||||
Message: r.Message,
|
||||
})
|
||||
}
|
||||
jdiag := JSONDiagnostic{
|
||||
Category: f.Category,
|
||||
Posn: fset.Position(f.Pos).String(),
|
||||
End: fset.Position(cmp.Or(f.End, f.Pos)).String(),
|
||||
Message: f.Message,
|
||||
SuggestedFixes: fixes,
|
||||
Related: related,
|
||||
|
|
|
|||
39
src/cmd/vendor/golang.org/x/tools/internal/refactor/delete.go
generated
vendored
39
src/cmd/vendor/golang.org/x/tools/internal/refactor/delete.go
generated
vendored
|
|
@ -13,7 +13,6 @@ import (
|
|||
"go/types"
|
||||
"slices"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/ast/edge"
|
||||
"golang.org/x/tools/go/ast/inspector"
|
||||
"golang.org/x/tools/internal/astutil"
|
||||
|
|
@ -32,7 +31,7 @@ import (
|
|||
//
|
||||
// If it cannot make the necessary edits, such as for a function
|
||||
// parameter or result, it returns nil.
|
||||
func DeleteVar(tokFile *token.File, info *types.Info, curId inspector.Cursor) []analysis.TextEdit {
|
||||
func DeleteVar(tokFile *token.File, info *types.Info, curId inspector.Cursor) []Edit {
|
||||
switch ek, _ := curId.ParentEdge(); ek {
|
||||
case edge.ValueSpec_Names:
|
||||
return deleteVarFromValueSpec(tokFile, info, curId)
|
||||
|
|
@ -52,7 +51,7 @@ func DeleteVar(tokFile *token.File, info *types.Info, curId inspector.Cursor) []
|
|||
// Precondition: curId is Ident beneath ValueSpec.Names beneath GenDecl.
|
||||
//
|
||||
// See also [deleteVarFromAssignStmt], which has parallel structure.
|
||||
func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []analysis.TextEdit {
|
||||
func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []Edit {
|
||||
var (
|
||||
id = curIdent.Node().(*ast.Ident)
|
||||
curSpec = curIdent.Parent()
|
||||
|
|
@ -95,7 +94,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
|
|||
pos = spec.Names[index].Pos()
|
||||
end = spec.Names[index+1].Pos()
|
||||
}
|
||||
return []analysis.TextEdit{{
|
||||
return []Edit{{
|
||||
Pos: pos,
|
||||
End: end,
|
||||
}}
|
||||
|
|
@ -111,7 +110,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
|
|||
//
|
||||
// var _, lhs1 = rhs0, rhs1
|
||||
// ------ ------
|
||||
return []analysis.TextEdit{
|
||||
return []Edit{
|
||||
{
|
||||
Pos: spec.Names[index-1].End(),
|
||||
End: spec.Names[index].End(),
|
||||
|
|
@ -126,7 +125,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
|
|||
//
|
||||
// var lhs0, _ = rhs0, rhs1
|
||||
// ------ ------
|
||||
return []analysis.TextEdit{
|
||||
return []Edit{
|
||||
{
|
||||
Pos: spec.Names[index].Pos(),
|
||||
End: spec.Names[index+1].Pos(),
|
||||
|
|
@ -141,7 +140,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
|
|||
|
||||
// We cannot delete the RHS.
|
||||
// Blank out the LHS.
|
||||
return []analysis.TextEdit{{
|
||||
return []Edit{{
|
||||
Pos: id.Pos(),
|
||||
End: id.End(),
|
||||
NewText: []byte("_"),
|
||||
|
|
@ -151,7 +150,7 @@ func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent insp
|
|||
// Precondition: curId is Ident beneath AssignStmt.Lhs.
|
||||
//
|
||||
// See also [deleteVarFromValueSpec], which has parallel structure.
|
||||
func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []analysis.TextEdit {
|
||||
func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []Edit {
|
||||
var (
|
||||
id = curIdent.Node().(*ast.Ident)
|
||||
curStmt = curIdent.Parent()
|
||||
|
|
@ -192,7 +191,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
|
|||
//
|
||||
// _, lhs1 := rhs0, rhs1
|
||||
// ------ ------
|
||||
return []analysis.TextEdit{
|
||||
return []Edit{
|
||||
{
|
||||
Pos: assign.Lhs[index-1].End(),
|
||||
End: assign.Lhs[index].End(),
|
||||
|
|
@ -207,7 +206,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
|
|||
//
|
||||
// lhs0, _ := rhs0, rhs1
|
||||
// ------ ------
|
||||
return []analysis.TextEdit{
|
||||
return []Edit{
|
||||
{
|
||||
Pos: assign.Lhs[index].Pos(),
|
||||
End: assign.Lhs[index+1].Pos(),
|
||||
|
|
@ -222,7 +221,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
|
|||
|
||||
// We cannot delete the RHS.
|
||||
// Blank out the LHS.
|
||||
edits := []analysis.TextEdit{{
|
||||
edits := []Edit{{
|
||||
Pos: id.Pos(),
|
||||
End: id.End(),
|
||||
NewText: []byte("_"),
|
||||
|
|
@ -233,7 +232,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
|
|||
// assignment to avoid a "no new variables on left
|
||||
// side of :=" error.
|
||||
if !declaresOtherNames {
|
||||
edits = append(edits, analysis.TextEdit{
|
||||
edits = append(edits, Edit{
|
||||
Pos: assign.TokPos,
|
||||
End: assign.TokPos + token.Pos(len(":=")),
|
||||
NewText: []byte("="),
|
||||
|
|
@ -246,7 +245,7 @@ func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent ins
|
|||
// DeleteSpec returns edits to delete the {Type,Value}Spec identified by curSpec.
|
||||
//
|
||||
// TODO(adonovan): add test suite. Test for consts as well.
|
||||
func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []analysis.TextEdit {
|
||||
func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []Edit {
|
||||
var (
|
||||
spec = curSpec.Node().(ast.Spec)
|
||||
curDecl = curSpec.Parent()
|
||||
|
|
@ -277,7 +276,7 @@ func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []analysis.TextEd
|
|||
// -----
|
||||
end = decl.Specs[index+1].Pos()
|
||||
}
|
||||
return []analysis.TextEdit{{
|
||||
return []Edit{{
|
||||
Pos: pos,
|
||||
End: end,
|
||||
}}
|
||||
|
|
@ -286,7 +285,7 @@ func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []analysis.TextEd
|
|||
// DeleteDecl returns edits to delete the ast.Decl identified by curDecl.
|
||||
//
|
||||
// TODO(adonovan): add test suite.
|
||||
func DeleteDecl(tokFile *token.File, curDecl inspector.Cursor) []analysis.TextEdit {
|
||||
func DeleteDecl(tokFile *token.File, curDecl inspector.Cursor) []Edit {
|
||||
decl := curDecl.Node().(ast.Decl)
|
||||
|
||||
ek, _ := curDecl.ParentEdge()
|
||||
|
|
@ -321,7 +320,7 @@ func DeleteDecl(tokFile *token.File, curDecl inspector.Cursor) []analysis.TextEd
|
|||
}
|
||||
}
|
||||
|
||||
return []analysis.TextEdit{{
|
||||
return []Edit{{
|
||||
Pos: pos,
|
||||
End: end,
|
||||
}}
|
||||
|
|
@ -366,7 +365,7 @@ func filterPos(nds []*ast.Comment, start, end token.Pos) (token.Pos, token.Pos,
|
|||
// it removes whole lines like
|
||||
//
|
||||
// stmt // comment
|
||||
func DeleteStmt(file *token.File, curStmt inspector.Cursor) []analysis.TextEdit {
|
||||
func DeleteStmt(file *token.File, curStmt inspector.Cursor) []Edit {
|
||||
// if the stmt is on a line by itself, or a range of lines, delete the whole thing
|
||||
// including comments. Except for the heads of switches, type
|
||||
// switches, and for-statements that's the usual case. Complexity occurs where
|
||||
|
|
@ -516,13 +515,13 @@ Big:
|
|||
}
|
||||
}
|
||||
|
||||
return []analysis.TextEdit{{Pos: leftEdit, End: rightEdit}}
|
||||
return []Edit{{Pos: leftEdit, End: rightEdit}}
|
||||
}
|
||||
|
||||
// DeleteUnusedVars computes the edits required to delete the
|
||||
// declarations of any local variables whose last uses are in the
|
||||
// curDelend subtree, which is about to be deleted.
|
||||
func DeleteUnusedVars(index *typeindex.Index, info *types.Info, tokFile *token.File, curDelend inspector.Cursor) []analysis.TextEdit {
|
||||
func DeleteUnusedVars(index *typeindex.Index, info *types.Info, tokFile *token.File, curDelend inspector.Cursor) []Edit {
|
||||
// TODO(adonovan): we might want to generalize this by
|
||||
// splitting the two phases below, so that we can gather
|
||||
// across a whole sequence of deletions then finally compute the
|
||||
|
|
@ -539,7 +538,7 @@ func DeleteUnusedVars(index *typeindex.Index, info *types.Info, tokFile *token.F
|
|||
}
|
||||
|
||||
// Delete declaration of each var that became unused.
|
||||
var edits []analysis.TextEdit
|
||||
var edits []Edit
|
||||
for v, count := range delcount {
|
||||
if len(slices.Collect(index.Uses(v))) == count {
|
||||
if curDefId, ok := index.Def(v); ok {
|
||||
|
|
|
|||
15
src/cmd/vendor/golang.org/x/tools/internal/refactor/edit.go
generated
vendored
Normal file
15
src/cmd/vendor/golang.org/x/tools/internal/refactor/edit.go
generated
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
// Copyright 2025 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.p
|
||||
|
||||
package refactor
|
||||
|
||||
// This is the only file in this package that should import analysis.
|
||||
//
|
||||
// TODO(adonovan): consider unaliasing the type to break the
|
||||
// dependency. (The ergonomics of slice append are unfortunate.)
|
||||
|
||||
import "golang.org/x/tools/go/analysis"
|
||||
|
||||
// An Edit describes a deletion and/or an insertion.
|
||||
type Edit = analysis.TextEdit
|
||||
58
src/cmd/vendor/golang.org/x/tools/internal/refactor/imports.go
generated
vendored
58
src/cmd/vendor/golang.org/x/tools/internal/refactor/imports.go
generated
vendored
|
|
@ -7,13 +7,12 @@ package refactor
|
|||
// This file defines operations for computing edits to imports.
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
pathpkg "path"
|
||||
"strconv"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/internal/packagepath"
|
||||
)
|
||||
|
||||
|
|
@ -35,7 +34,7 @@ import (
|
|||
// package declares member.
|
||||
//
|
||||
// AddImport does not mutate its arguments.
|
||||
func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (prefix string, edits []analysis.TextEdit) {
|
||||
func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (prefix string, edits []Edit) {
|
||||
// Find innermost enclosing lexical block.
|
||||
scope := info.Scopes[file].Innermost(pos)
|
||||
if scope == nil {
|
||||
|
|
@ -69,33 +68,53 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member
|
|||
newName := preferredName
|
||||
if preferredName != "_" {
|
||||
newName = FreshName(scope, pos, preferredName)
|
||||
prefix = newName + "."
|
||||
}
|
||||
|
||||
// Use a renaming import whenever the preferred name is not
|
||||
// available, or the chosen name does not match the last
|
||||
// segment of its path.
|
||||
if newName == preferredName && newName == pathpkg.Base(pkgpath) {
|
||||
newName = ""
|
||||
}
|
||||
|
||||
return prefix, AddImportEdits(file, newName, pkgpath)
|
||||
}
|
||||
|
||||
// AddImportEdits returns the edits to add an import of the specified
|
||||
// package, without any analysis of whether this is necessary or safe.
|
||||
// If name is nonempty, it is used as an explicit [ImportSpec.Name].
|
||||
//
|
||||
// A sequence of calls to AddImportEdits that each add the file's
|
||||
// first import (or in a file that does not have a grouped import) may
|
||||
// result in multiple import declarations, rather than a single one
|
||||
// with multiple ImportSpecs. However, a subsequent run of
|
||||
// x/tools/cmd/goimports ([imports.Process]) will combine them.
|
||||
//
|
||||
// AddImportEdits does not mutate the AST.
|
||||
func AddImportEdits(file *ast.File, name, pkgpath string) []Edit {
|
||||
newText := strconv.Quote(pkgpath)
|
||||
if name != "" {
|
||||
newText = name + " " + newText
|
||||
}
|
||||
|
||||
// Create a new import declaration either before the first existing
|
||||
// declaration (which must exist), including its comments; or
|
||||
// inside the declaration, if it is an import group.
|
||||
//
|
||||
// Use a renaming import whenever the preferred name is not
|
||||
// available, or the chosen name does not match the last
|
||||
// segment of its path.
|
||||
newText := fmt.Sprintf("%q", pkgpath)
|
||||
if newName != preferredName || newName != pathpkg.Base(pkgpath) {
|
||||
newText = fmt.Sprintf("%s %q", newName, pkgpath)
|
||||
}
|
||||
|
||||
decl0 := file.Decls[0]
|
||||
var before ast.Node = decl0
|
||||
before := decl0.Pos()
|
||||
switch decl0 := decl0.(type) {
|
||||
case *ast.GenDecl:
|
||||
if decl0.Doc != nil {
|
||||
before = decl0.Doc
|
||||
before = decl0.Doc.Pos()
|
||||
}
|
||||
case *ast.FuncDecl:
|
||||
if decl0.Doc != nil {
|
||||
before = decl0.Doc
|
||||
before = decl0.Doc.Pos()
|
||||
}
|
||||
}
|
||||
if gd, ok := before.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() {
|
||||
var pos token.Pos
|
||||
if gd, ok := decl0.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() {
|
||||
// Have existing grouped import ( ... ) decl.
|
||||
if packagepath.IsStdPackage(pkgpath) && len(gd.Specs) > 0 {
|
||||
// Add spec for a std package before
|
||||
|
|
@ -116,10 +135,13 @@ func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member
|
|||
// No import decl, or non-grouped import.
|
||||
// Add a new import decl before first decl.
|
||||
// (gofmt will merge multiple import decls.)
|
||||
pos = before.Pos()
|
||||
//
|
||||
// TODO(adonovan): do better here; plunder the
|
||||
// mergeImports logic from [imports.Process].
|
||||
pos = before
|
||||
newText = "import " + newText + "\n\n"
|
||||
}
|
||||
return newName + ".", []analysis.TextEdit{{
|
||||
return []Edit{{
|
||||
Pos: pos,
|
||||
End: pos,
|
||||
NewText: []byte(newText),
|
||||
|
|
|
|||
404
src/cmd/vendor/golang.org/x/tools/internal/refactor/inline/inline.go
generated
vendored
404
src/cmd/vendor/golang.org/x/tools/internal/refactor/inline/inline.go
generated
vendored
|
|
@ -11,14 +11,12 @@ import (
|
|||
"go/constant"
|
||||
"go/format"
|
||||
"go/parser"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"maps"
|
||||
pathpkg "path"
|
||||
"reflect"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/ast/astutil"
|
||||
|
|
@ -26,6 +24,7 @@ import (
|
|||
internalastutil "golang.org/x/tools/internal/astutil"
|
||||
"golang.org/x/tools/internal/astutil/free"
|
||||
"golang.org/x/tools/internal/packagepath"
|
||||
"golang.org/x/tools/internal/refactor"
|
||||
"golang.org/x/tools/internal/typeparams"
|
||||
"golang.org/x/tools/internal/typesinternal"
|
||||
"golang.org/x/tools/internal/versions"
|
||||
|
|
@ -35,12 +34,11 @@ import (
|
|||
//
|
||||
// The client is responsible for populating this struct and passing it to Inline.
|
||||
type Caller struct {
|
||||
Fset *token.FileSet
|
||||
Types *types.Package
|
||||
Info *types.Info
|
||||
File *ast.File
|
||||
Call *ast.CallExpr
|
||||
Content []byte // source of file containing (TODO(adonovan): see comment at Result.Content)
|
||||
Fset *token.FileSet
|
||||
Types *types.Package
|
||||
Info *types.Info
|
||||
File *ast.File
|
||||
Call *ast.CallExpr
|
||||
|
||||
// CountUses is an optional optimized computation of
|
||||
// the number of times pkgname appears in Info.Uses.
|
||||
|
|
@ -61,26 +59,9 @@ type Options struct {
|
|||
|
||||
// Result holds the result of code transformation.
|
||||
type Result struct {
|
||||
// TODO(adonovan): the only textual results that should be
|
||||
// needed are (1) an edit in the vicinity of the call (either
|
||||
// to the CallExpr or one of its ancestors), and optionally
|
||||
// (2) an edit to the import declaration.
|
||||
// Change the inliner API to return a list of edits,
|
||||
// and not to accept a Caller.Content, as it is only
|
||||
// temptation to use such algorithmically expensive
|
||||
// operations as reformatting the entire file, which is
|
||||
// a significant source of non-linear dynamic behavior;
|
||||
// see https://go.dev/issue/75773.
|
||||
// This will require a sequence of changes to the tests
|
||||
// and the inliner algorithm itself.
|
||||
Content []byte // formatted, transformed content of caller file
|
||||
Literalized bool // chosen strategy replaced callee() with func(){...}()
|
||||
BindingDecl bool // transformation added "var params = args" declaration
|
||||
|
||||
// TODO(adonovan): provide an API for clients that want structured
|
||||
// output: a list of import additions and deletions plus one or more
|
||||
// localized diffs (or even AST transformations, though ownership and
|
||||
// mutation are tricky) near the call site.
|
||||
Edits []refactor.Edit // edits around CallExpr and imports
|
||||
Literalized bool // chosen strategy replaced callee() with func(){...}()
|
||||
BindingDecl bool // transformation added "var params = args" declaration
|
||||
}
|
||||
|
||||
// Inline inlines the called function (callee) into the function call (caller)
|
||||
|
|
@ -117,14 +98,8 @@ func (st *state) inline() (*Result, error) {
|
|||
debugFormatNode(caller.Fset, caller.Call),
|
||||
caller.Fset.PositionFor(caller.Call.Lparen, false))
|
||||
|
||||
if !consistentOffsets(caller) {
|
||||
return nil, fmt.Errorf("internal error: caller syntax positions are inconsistent with file content (did you forget to use FileSet.PositionFor when computing the file name?)")
|
||||
}
|
||||
|
||||
// Break the string literal so we can use inlining in this file. :)
|
||||
if ast.IsGenerated(caller.File) &&
|
||||
bytes.Contains(caller.Content, []byte("// Code generated by "+"cmd/cgo; DO NOT EDIT.")) {
|
||||
return nil, fmt.Errorf("cannot inline calls from files that import \"C\"")
|
||||
if ast.IsGenerated(caller.File) {
|
||||
return nil, fmt.Errorf("cannot inline calls from generated files")
|
||||
}
|
||||
|
||||
res, err := st.inlineCall()
|
||||
|
|
@ -224,37 +199,10 @@ func (st *state) inline() (*Result, error) {
|
|||
}
|
||||
}
|
||||
|
||||
// File rewriting. This proceeds in multiple passes, in order to maximally
|
||||
// preserve comment positioning. (This could be greatly simplified once
|
||||
// comments are stored in the tree.)
|
||||
//
|
||||
// Don't call replaceNode(caller.File, res.old, res.new)
|
||||
// as it mutates the caller's syntax tree.
|
||||
// Instead, splice the file, replacing the extent of the "old"
|
||||
// node by a formatting of the "new" node, and re-parse.
|
||||
// We'll fix up the imports on this new tree, and format again.
|
||||
//
|
||||
// Inv: f is the result of parsing content, using fset.
|
||||
var (
|
||||
content = caller.Content
|
||||
fset = caller.Fset
|
||||
f *ast.File // parsed below
|
||||
)
|
||||
reparse := func() error {
|
||||
const mode = parser.ParseComments | parser.SkipObjectResolution | parser.AllErrors
|
||||
f, err = parser.ParseFile(fset, "callee.go", content, mode)
|
||||
if err != nil {
|
||||
// Something has gone very wrong.
|
||||
logf("failed to reparse <<%s>>: %v", string(content), err) // debugging
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
var edits []refactor.Edit
|
||||
|
||||
// Format the cloned callee.
|
||||
{
|
||||
start := offsetOf(fset, res.old.Pos())
|
||||
end := offsetOf(fset, res.old.End())
|
||||
var out bytes.Buffer
|
||||
out.Write(content[:start])
|
||||
// TODO(adonovan): might it make more sense to use
|
||||
// callee.Fset when formatting res.new?
|
||||
// The new tree is a mix of (cloned) caller nodes for
|
||||
|
|
@ -269,148 +217,106 @@ func (st *state) inline() (*Result, error) {
|
|||
// Precise comment handling would make this a
|
||||
// non-issue. Formatting wouldn't really need a
|
||||
// FileSet at all.
|
||||
|
||||
var out bytes.Buffer
|
||||
if elideBraces {
|
||||
for i, stmt := range res.new.(*ast.BlockStmt).List {
|
||||
if i > 0 {
|
||||
out.WriteByte('\n')
|
||||
}
|
||||
if err := format.Node(&out, fset, stmt); err != nil {
|
||||
if err := format.Node(&out, caller.Fset, stmt); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if err := format.Node(&out, fset, res.new); err != nil {
|
||||
if err := format.Node(&out, caller.Fset, res.new); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
out.Write(content[end:])
|
||||
content = out.Bytes()
|
||||
if err := reparse(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
edits = append(edits, refactor.Edit{
|
||||
Pos: res.old.Pos(),
|
||||
End: res.old.End(),
|
||||
NewText: out.Bytes(),
|
||||
})
|
||||
}
|
||||
|
||||
// Add new imports that are still used.
|
||||
newImports := trimNewImports(res.newImports, res.new)
|
||||
// Insert new imports after last existing import,
|
||||
// to avoid migration of pre-import comments.
|
||||
// The imports will be organized below.
|
||||
if len(newImports) > 0 {
|
||||
// If we have imports to add, do so independent of the rest of the file.
|
||||
// Otherwise, the length of the new imports may consume floating comments,
|
||||
// causing them to be printed inside the imports block.
|
||||
var (
|
||||
importDecl *ast.GenDecl
|
||||
comments []*ast.CommentGroup // relevant comments.
|
||||
before, after []byte // pre- and post-amble for the imports block.
|
||||
)
|
||||
if len(f.Imports) > 0 {
|
||||
// Append specs to existing import decl
|
||||
importDecl = f.Decls[0].(*ast.GenDecl)
|
||||
for _, comment := range f.Comments {
|
||||
// Filter comments. Don't use CommentMap.Filter here, because we don't
|
||||
// want to include comments that document the import decl itself, for
|
||||
// example:
|
||||
//
|
||||
// // We don't want this comment to be duplicated.
|
||||
// import (
|
||||
// "something"
|
||||
// )
|
||||
if importDecl.Pos() <= comment.Pos() && comment.Pos() < importDecl.End() {
|
||||
comments = append(comments, comment)
|
||||
}
|
||||
}
|
||||
before = content[:offsetOf(fset, importDecl.Pos())]
|
||||
importDecl.Doc = nil // present in before
|
||||
after = content[offsetOf(fset, importDecl.End()):]
|
||||
} else {
|
||||
// Insert new import decl.
|
||||
importDecl = &ast.GenDecl{Tok: token.IMPORT}
|
||||
f.Decls = prepend[ast.Decl](importDecl, f.Decls...)
|
||||
|
||||
// Make room for the new declaration after the package declaration.
|
||||
pkgEnd := f.Name.End()
|
||||
file := fset.File(pkgEnd)
|
||||
if file == nil {
|
||||
logf("internal error: missing pkg file")
|
||||
return nil, fmt.Errorf("missing pkg file for %s", f.Name.Name)
|
||||
}
|
||||
// Preserve any comments after the package declaration, by splicing in
|
||||
// the new import block after the end of the package declaration line.
|
||||
line := file.Line(pkgEnd)
|
||||
if line < len(file.Lines()) { // line numbers are 1-based
|
||||
nextLinePos := file.LineStart(line + 1)
|
||||
nextLine := offsetOf(fset, nextLinePos)
|
||||
before = slices.Concat(content[:nextLine], []byte("\n"))
|
||||
after = slices.Concat([]byte("\n\n"), content[nextLine:])
|
||||
} else {
|
||||
before = slices.Concat(content, []byte("\n\n"))
|
||||
}
|
||||
}
|
||||
// Add new imports.
|
||||
// Set their position to after the last position of the old imports, to keep
|
||||
// comments on the old imports from moving.
|
||||
lastPos := token.NoPos
|
||||
if lastSpec := last(importDecl.Specs); lastSpec != nil {
|
||||
lastPos = lastSpec.Pos()
|
||||
if c := lastSpec.(*ast.ImportSpec).Comment; c != nil {
|
||||
lastPos = c.Pos()
|
||||
}
|
||||
}
|
||||
for _, imp := range newImports {
|
||||
// Check that the new imports are accessible.
|
||||
path, _ := strconv.Unquote(imp.spec.Path.Value)
|
||||
if !packagepath.CanImport(caller.Types.Path(), path) {
|
||||
return nil, fmt.Errorf("can't inline function %v as its body refers to inaccessible package %q", callee, path)
|
||||
}
|
||||
if lastPos.IsValid() {
|
||||
lastPos++
|
||||
imp.spec.Path.ValuePos = lastPos
|
||||
}
|
||||
importDecl.Specs = append(importDecl.Specs, imp.spec)
|
||||
// Add new imports.
|
||||
//
|
||||
// It's possible that not all are needed (e.g. for type names
|
||||
// that melted away), but we'll let the client (such as an
|
||||
// analysis driver) clean it up since it must remove unused
|
||||
// imports anyway.
|
||||
for _, imp := range res.newImports {
|
||||
// Check that the new imports are accessible.
|
||||
if !packagepath.CanImport(caller.Types.Path(), imp.path) {
|
||||
return nil, fmt.Errorf("can't inline function %v as its body refers to inaccessible package %q", callee, imp.path)
|
||||
}
|
||||
|
||||
var out bytes.Buffer
|
||||
out.Write(before)
|
||||
commented := &printer.CommentedNode{
|
||||
Node: importDecl,
|
||||
Comments: comments,
|
||||
}
|
||||
|
||||
if err := format.Node(&out, fset, commented); err != nil {
|
||||
logf("failed to format new importDecl: %v", err) // debugging
|
||||
return nil, err
|
||||
}
|
||||
out.Write(after)
|
||||
content = out.Bytes()
|
||||
if err := reparse(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
// Delete imports referenced only by caller.Call.Fun.
|
||||
for _, oldImport := range res.oldImports {
|
||||
specToDelete := oldImport.spec
|
||||
// We've already validated the import, so we call
|
||||
// AddImportEdits directly to compute the edit.
|
||||
name := ""
|
||||
if specToDelete.Name != nil {
|
||||
name = specToDelete.Name.Name
|
||||
if imp.explicit {
|
||||
name = imp.name
|
||||
}
|
||||
path, _ := strconv.Unquote(specToDelete.Path.Value)
|
||||
astutil.DeleteNamedImport(caller.Fset, f, name, path)
|
||||
edits = append(edits, refactor.AddImportEdits(caller.File, name, imp.path)...)
|
||||
}
|
||||
|
||||
var out bytes.Buffer
|
||||
if err := format.Node(&out, caller.Fset, f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
newSrc := out.Bytes()
|
||||
|
||||
literalized := false
|
||||
if call, ok := res.new.(*ast.CallExpr); ok && is[*ast.FuncLit](call.Fun) {
|
||||
literalized = true
|
||||
}
|
||||
|
||||
// Delete imports referenced only by caller.Call.Fun.
|
||||
//
|
||||
// It's ambiguous to let the client (e.g. analysis driver)
|
||||
// remove unneeded imports in this case because it is common
|
||||
// to inlining a call from "dir1/a".F to "dir2/a".F, which
|
||||
// leaves two imports of packages named 'a', both providing a.F.
|
||||
//
|
||||
// However, the only two import deletion tools at our disposal
|
||||
// are astutil.DeleteNamedImport, which mutates the AST, and
|
||||
// refactor.Delete{Spec,Decl}, which need a Cursor. So we need
|
||||
// to reinvent the wheel here.
|
||||
for _, oldImport := range res.oldImports {
|
||||
spec := oldImport.spec
|
||||
|
||||
// Include adjacent comments.
|
||||
pos := spec.Pos()
|
||||
if doc := spec.Doc; doc != nil {
|
||||
pos = doc.Pos()
|
||||
}
|
||||
end := spec.End()
|
||||
if doc := spec.Comment; doc != nil {
|
||||
end = doc.End()
|
||||
}
|
||||
|
||||
// Find the enclosing import decl.
|
||||
// If it's paren-less, we must delete it too.
|
||||
for _, decl := range caller.File.Decls {
|
||||
decl, ok := decl.(*ast.GenDecl)
|
||||
if !(ok && decl.Tok == token.IMPORT) {
|
||||
break // stop at first non-import decl
|
||||
}
|
||||
if internalastutil.NodeContainsPos(decl, spec.Pos()) && !decl.Rparen.IsValid() {
|
||||
// Include adjacent comments.
|
||||
pos = decl.Pos()
|
||||
if doc := decl.Doc; doc != nil {
|
||||
pos = doc.Pos()
|
||||
}
|
||||
end = decl.End()
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
edits = append(edits, refactor.Edit{
|
||||
Pos: pos,
|
||||
End: end,
|
||||
})
|
||||
}
|
||||
|
||||
return &Result{
|
||||
Content: newSrc,
|
||||
Edits: edits,
|
||||
Literalized: literalized,
|
||||
BindingDecl: res.bindingDecl,
|
||||
}, nil
|
||||
|
|
@ -424,8 +330,9 @@ type oldImport struct {
|
|||
|
||||
// A newImport is an import that will be added to the caller file.
|
||||
type newImport struct {
|
||||
pkgName string
|
||||
spec *ast.ImportSpec
|
||||
name string
|
||||
path string
|
||||
explicit bool // use name as ImportSpec.Name
|
||||
}
|
||||
|
||||
// importState tracks information about imports.
|
||||
|
|
@ -526,16 +433,12 @@ func (i *importState) importName(pkgPath string, shadow shadowMap) string {
|
|||
return ""
|
||||
}
|
||||
|
||||
// localName returns the local name for a given imported package path,
|
||||
// adding one if it doesn't exists.
|
||||
func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) string {
|
||||
// Does an import already exist that works in this shadowing context?
|
||||
if name := i.importName(pkgPath, shadow); name != "" {
|
||||
return name
|
||||
}
|
||||
|
||||
// findNewLocalName returns a new local package name to use in a particular shadowing context.
|
||||
// It considers the existing local name used by the callee, or construct a new local name
|
||||
// based on the package name.
|
||||
func (i *importState) findNewLocalName(pkgName, calleePkgName string, shadow shadowMap) string {
|
||||
newlyAdded := func(name string) bool {
|
||||
return slices.ContainsFunc(i.newImports, func(n newImport) bool { return n.pkgName == name })
|
||||
return slices.ContainsFunc(i.newImports, func(n newImport) bool { return n.name == name })
|
||||
}
|
||||
|
||||
// shadowedInCaller reports whether a candidate package name
|
||||
|
|
@ -551,74 +454,44 @@ func (i *importState) localName(pkgPath, pkgName string, shadow shadowMap) strin
|
|||
|
||||
// import added by callee
|
||||
//
|
||||
// Choose local PkgName based on last segment of
|
||||
// package path plus, if needed, a numeric suffix to
|
||||
// ensure uniqueness.
|
||||
// Try to preserve the local package name used by the callee first.
|
||||
//
|
||||
// If that is shadowed, choose a local package name based on last segment of
|
||||
// package path plus, if needed, a numeric suffix to ensure uniqueness.
|
||||
//
|
||||
// "init" is not a legal PkgName.
|
||||
//
|
||||
// TODO(rfindley): is it worth preserving local package names for callee
|
||||
// imports? Are they likely to be better or worse than the name we choose
|
||||
// here?
|
||||
if shadow[calleePkgName] == 0 && !shadowedInCaller(calleePkgName) && !newlyAdded(calleePkgName) && calleePkgName != "init" {
|
||||
return calleePkgName
|
||||
}
|
||||
|
||||
base := pkgName
|
||||
name := base
|
||||
for n := 0; shadow[name] != 0 || shadowedInCaller(name) || newlyAdded(name) || name == "init"; n++ {
|
||||
name = fmt.Sprintf("%s%d", base, n)
|
||||
}
|
||||
i.logf("adding import %s %q", name, pkgPath)
|
||||
spec := &ast.ImportSpec{
|
||||
Path: &ast.BasicLit{
|
||||
Kind: token.STRING,
|
||||
Value: strconv.Quote(pkgPath),
|
||||
},
|
||||
}
|
||||
// Use explicit pkgname (out of necessity) when it differs from the declared name,
|
||||
// or (for good style) when it differs from base(pkgpath).
|
||||
if name != pkgName || name != pathpkg.Base(pkgPath) {
|
||||
spec.Name = makeIdent(name)
|
||||
}
|
||||
i.newImports = append(i.newImports, newImport{
|
||||
pkgName: name,
|
||||
spec: spec,
|
||||
})
|
||||
i.importMap[pkgPath] = append(i.importMap[pkgPath], name)
|
||||
|
||||
return name
|
||||
}
|
||||
|
||||
// trimNewImports removes imports that are no longer needed.
|
||||
//
|
||||
// The list of new imports as constructed by calls to [importState.localName]
|
||||
// includes all of the packages referenced by the callee.
|
||||
// But in the process of inlining, we may have dropped some of those references.
|
||||
// For example, if the callee looked like this:
|
||||
//
|
||||
// func F(x int) (p.T) {... /* no mention of p */ ...}
|
||||
//
|
||||
// and we inlined by assignment:
|
||||
//
|
||||
// v := ...
|
||||
//
|
||||
// then the reference to package p drops away.
|
||||
//
|
||||
// Remove the excess imports by seeing which remain in new, the expression
|
||||
// to be inlined.
|
||||
// We can find those by looking at the free names in new.
|
||||
// The list of free names cannot include spurious package names.
|
||||
// Free-name tracking is precise except for the case of an identifier
|
||||
// key in a composite literal, which names either a field or a value.
|
||||
// Neither fields nor values are package names.
|
||||
// Since they are not relevant to removing unused imports, we instruct
|
||||
// freeishNames to omit composite-literal keys that are identifiers.
|
||||
func trimNewImports(newImports []newImport, new ast.Node) []newImport {
|
||||
const omitComplitIdents = false
|
||||
free := free.Names(new, omitComplitIdents)
|
||||
var res []newImport
|
||||
for _, ni := range newImports {
|
||||
if free[ni.pkgName] {
|
||||
res = append(res, ni)
|
||||
}
|
||||
// localName returns the local name for a given imported package path,
|
||||
// adding one if it doesn't exists.
|
||||
func (i *importState) localName(pkgPath, pkgName, calleePkgName string, shadow shadowMap) string {
|
||||
// Does an import already exist that works in this shadowing context?
|
||||
if name := i.importName(pkgPath, shadow); name != "" {
|
||||
return name
|
||||
}
|
||||
return res
|
||||
|
||||
name := i.findNewLocalName(pkgName, calleePkgName, shadow)
|
||||
i.logf("adding import %s %q", name, pkgPath)
|
||||
// Use explicit pkgname (out of necessity) when it differs from the declared name,
|
||||
// or (for good style) when it differs from base(pkgpath).
|
||||
i.newImports = append(i.newImports, newImport{
|
||||
name: name,
|
||||
path: pkgPath,
|
||||
explicit: name != pkgName || name != pathpkg.Base(pkgPath),
|
||||
})
|
||||
i.importMap[pkgPath] = append(i.importMap[pkgPath], name)
|
||||
return name
|
||||
}
|
||||
|
||||
type inlineCallResult struct {
|
||||
|
|
@ -655,14 +528,6 @@ type inlineCallResult struct {
|
|||
// allows inlining a statement list. However, due to loss of comments, more
|
||||
// sophisticated rewrites are challenging.
|
||||
//
|
||||
// TODO(adonovan): in earlier drafts, the transformation was expressed
|
||||
// by splicing substrings of the two source files because syntax
|
||||
// trees don't preserve comments faithfully (see #20744), but such
|
||||
// transformations don't compose. The current implementation is
|
||||
// tree-based but is very lossy wrt comments. It would make a good
|
||||
// candidate for evaluating an alternative fully self-contained tree
|
||||
// representation, such as any proposed solution to #20744, or even
|
||||
// dst or some private fork of go/ast.)
|
||||
// TODO(rfindley): see if we can reduce the amount of comment lossiness by
|
||||
// using printer.CommentedNode, which has been useful elsewhere.
|
||||
//
|
||||
|
|
@ -1381,7 +1246,7 @@ func (st *state) renameFreeObjs(istate *importState) ([]ast.Expr, error) {
|
|||
var newName ast.Expr
|
||||
if obj.Kind == "pkgname" {
|
||||
// Use locally appropriate import, creating as needed.
|
||||
n := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow)
|
||||
n := istate.localName(obj.PkgPath, obj.PkgName, obj.Name, obj.Shadow)
|
||||
newName = makeIdent(n) // imported package
|
||||
} else if !obj.ValidPos {
|
||||
// Built-in function, type, or value (e.g. nil, zero):
|
||||
|
|
@ -1426,7 +1291,7 @@ func (st *state) renameFreeObjs(istate *importState) ([]ast.Expr, error) {
|
|||
|
||||
// Form a qualified identifier, pkg.Name.
|
||||
if qualify {
|
||||
pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.Shadow)
|
||||
pkgName := istate.localName(obj.PkgPath, obj.PkgName, obj.PkgName, obj.Shadow)
|
||||
newName = &ast.SelectorExpr{
|
||||
X: makeIdent(pkgName),
|
||||
Sel: makeIdent(obj.Name),
|
||||
|
|
@ -3272,25 +3137,6 @@ func last[T any](slice []T) T {
|
|||
return *new(T)
|
||||
}
|
||||
|
||||
// consistentOffsets reports whether the portion of caller.Content
|
||||
// that corresponds to caller.Call can be parsed as a call expression.
|
||||
// If not, the client has provided inconsistent information, possibly
|
||||
// because they forgot to ignore line directives when computing the
|
||||
// filename enclosing the call.
|
||||
// This is just a heuristic.
|
||||
func consistentOffsets(caller *Caller) bool {
|
||||
start := offsetOf(caller.Fset, caller.Call.Pos())
|
||||
end := offsetOf(caller.Fset, caller.Call.End())
|
||||
if !(0 < start && start < end && end <= len(caller.Content)) {
|
||||
return false
|
||||
}
|
||||
expr, err := parser.ParseExpr(string(caller.Content[start:end]))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return is[*ast.CallExpr](expr)
|
||||
}
|
||||
|
||||
// needsParens reports whether parens are required to avoid ambiguity
|
||||
// around the new node replacing the specified old node (which is some
|
||||
// ancestor of the CallExpr identified by its PathEnclosingInterval).
|
||||
|
|
|
|||
3
src/cmd/vendor/golang.org/x/tools/internal/refactor/refactor.go
generated
vendored
3
src/cmd/vendor/golang.org/x/tools/internal/refactor/refactor.go
generated
vendored
|
|
@ -5,8 +5,7 @@
|
|||
// Package refactor provides operators to compute common textual edits
|
||||
// for refactoring tools.
|
||||
//
|
||||
// This package should not use features of the analysis API
|
||||
// other than [analysis.TextEdit].
|
||||
// This package should not use features of the analysis API other than [Edit].
|
||||
package refactor
|
||||
|
||||
import (
|
||||
|
|
|
|||
626
src/cmd/vendor/golang.org/x/tools/internal/stdlib/deps.go
generated
vendored
626
src/cmd/vendor/golang.org/x/tools/internal/stdlib/deps.go
generated
vendored
|
|
@ -12,360 +12,364 @@ type pkginfo struct {
|
|||
}
|
||||
|
||||
var deps = [...]pkginfo{
|
||||
{"archive/tar", "\x03n\x03E<\x01\n\x01$\x01\x01\x02\x05\b\x02\x01\x02\x02\f"},
|
||||
{"archive/zip", "\x02\x04d\a\x03\x12\x021<\x01+\x05\x01\x0f\x03\x02\x0e\x04"},
|
||||
{"bufio", "\x03n\x84\x01D\x14"},
|
||||
{"bytes", "q*Z\x03\fG\x02\x02"},
|
||||
{"archive/tar", "\x03p\x03F=\x01\n\x01$\x01\x01\x02\x05\b\x02\x01\x02\x02\f"},
|
||||
{"archive/zip", "\x02\x04f\a\x03\x13\x021=\x01+\x05\x01\x0f\x03\x02\x0e\x04"},
|
||||
{"bufio", "\x03p\x86\x01D\x14"},
|
||||
{"bytes", "s+[\x03\fG\x02\x02"},
|
||||
{"cmp", ""},
|
||||
{"compress/bzip2", "\x02\x02\xf1\x01A"},
|
||||
{"compress/flate", "\x02o\x03\x81\x01\f\x033\x01\x03"},
|
||||
{"compress/gzip", "\x02\x04d\a\x03\x14mT"},
|
||||
{"compress/lzw", "\x02o\x03\x81\x01"},
|
||||
{"compress/zlib", "\x02\x04d\a\x03\x12\x01n"},
|
||||
{"container/heap", "\xb7\x02"},
|
||||
{"compress/bzip2", "\x02\x02\xf5\x01A"},
|
||||
{"compress/flate", "\x02q\x03\x83\x01\f\x033\x01\x03"},
|
||||
{"compress/gzip", "\x02\x04f\a\x03\x15nT"},
|
||||
{"compress/lzw", "\x02q\x03\x83\x01"},
|
||||
{"compress/zlib", "\x02\x04f\a\x03\x13\x01o"},
|
||||
{"container/heap", "\xbb\x02"},
|
||||
{"container/list", ""},
|
||||
{"container/ring", ""},
|
||||
{"context", "q[o\x01\r"},
|
||||
{"crypto", "\x86\x01oC"},
|
||||
{"crypto/aes", "\x10\n\t\x95\x02"},
|
||||
{"crypto/cipher", "\x03 \x01\x01\x1f\x11\x1c+Y"},
|
||||
{"crypto/des", "\x10\x15\x1f-+\x9c\x01\x03"},
|
||||
{"crypto/dsa", "D\x04)\x84\x01\r"},
|
||||
{"crypto/ecdh", "\x03\v\f\x10\x04\x16\x04\r\x1c\x84\x01"},
|
||||
{"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x10\a\v\x06\x01\x04\f\x01\x1c\x84\x01\r\x05K\x01"},
|
||||
{"crypto/ed25519", "\x0e\x1e\x11\a\n\a\x1c\x84\x01C"},
|
||||
{"crypto/elliptic", "2?\x84\x01\r9"},
|
||||
{"context", "s\\p\x01\r"},
|
||||
{"crypto", "\x89\x01pC"},
|
||||
{"crypto/aes", "\x10\n\t\x99\x02"},
|
||||
{"crypto/cipher", "\x03 \x01\x01 \x12\x1c,Z"},
|
||||
{"crypto/des", "\x10\x15 .,\x9d\x01\x03"},
|
||||
{"crypto/dsa", "E\x04*\x86\x01\r"},
|
||||
{"crypto/ecdh", "\x03\v\f\x10\x04\x17\x04\x0e\x1c\x86\x01"},
|
||||
{"crypto/ecdsa", "\x0e\x05\x03\x04\x01\x10\b\v\x06\x01\x04\r\x01\x1c\x86\x01\r\x05K\x01"},
|
||||
{"crypto/ed25519", "\x0e\x1e\x12\a\v\a\x1c\x86\x01C"},
|
||||
{"crypto/elliptic", "3@\x86\x01\r9"},
|
||||
{"crypto/fips140", "\"\x05"},
|
||||
{"crypto/hkdf", "/\x14\x01-\x15"},
|
||||
{"crypto/hmac", "\x1a\x16\x13\x01\x111"},
|
||||
{"crypto/internal/boring", "\x0e\x02\ri"},
|
||||
{"crypto/internal/boring/bbig", "\x1a\xe8\x01M"},
|
||||
{"crypto/internal/boring/bcache", "\xbc\x02\x13"},
|
||||
{"crypto/hkdf", "/\x15\x01.\x16"},
|
||||
{"crypto/hmac", "\x1a\x16\x14\x01\x122"},
|
||||
{"crypto/internal/boring", "\x0e\x02\rl"},
|
||||
{"crypto/internal/boring/bbig", "\x1a\xec\x01M"},
|
||||
{"crypto/internal/boring/bcache", "\xc0\x02\x13"},
|
||||
{"crypto/internal/boring/sig", ""},
|
||||
{"crypto/internal/constanttime", ""},
|
||||
{"crypto/internal/cryptotest", "\x03\r\n\b%\x0e\x19\x06\x12\x12 \x04\x06\t\x18\x01\x11\x11\x1b\x01\a\x05\b\x03\x05\v"},
|
||||
{"crypto/internal/entropy", "I"},
|
||||
{"crypto/internal/entropy/v1.0.0", "B/\x93\x018\x13"},
|
||||
{"crypto/internal/fips140", "A0\xbd\x01\v\x16"},
|
||||
{"crypto/internal/fips140/aes", "\x03\x1f\x03\x02\x13\x05\x01\x01\x06*\x93\x014"},
|
||||
{"crypto/internal/fips140/aes/gcm", "\"\x01\x02\x02\x02\x11\x05\x01\a*\x90\x01"},
|
||||
{"crypto/internal/fips140/alias", "\xcf\x02"},
|
||||
{"crypto/internal/fips140/bigmod", "'\x18\x01\a*\x93\x01"},
|
||||
{"crypto/internal/fips140/check", "\"\x0e\x06\t\x02\xb4\x01Z"},
|
||||
{"crypto/internal/fips140/check/checktest", "'\x87\x02!"},
|
||||
{"crypto/internal/fips140/drbg", "\x03\x1e\x01\x01\x04\x13\x05\t\x01(\x84\x01\x0f7\x01"},
|
||||
{"crypto/internal/fips140/ecdh", "\x03\x1f\x05\x02\t\r2\x84\x01\x0f7"},
|
||||
{"crypto/internal/fips140/ecdsa", "\x03\x1f\x04\x01\x02\a\x02\x069\x15oF"},
|
||||
{"crypto/internal/fips140/ed25519", "\x03\x1f\x05\x02\x04\v9\xc7\x01\x03"},
|
||||
{"crypto/internal/fips140/edwards25519", "\x1e\t\a\x112\x93\x017"},
|
||||
{"crypto/internal/fips140/edwards25519/field", "'\x13\x052\x93\x01"},
|
||||
{"crypto/internal/fips140/hkdf", "\x03\x1f\x05\t\x06;\x15"},
|
||||
{"crypto/internal/fips140/hmac", "\x03\x1f\x14\x01\x019\x15"},
|
||||
{"crypto/internal/fips140/mlkem", "\x03\x1f\x05\x02\x0e\x03\x052\xca\x01"},
|
||||
{"crypto/internal/fips140/nistec", "\x1e\t\f\f2\x93\x01*\r\x14"},
|
||||
{"crypto/internal/fips140/nistec/fiat", "'\x137\x93\x01"},
|
||||
{"crypto/internal/fips140/pbkdf2", "\x03\x1f\x05\t\x06;\x15"},
|
||||
{"crypto/internal/fips140/rsa", "\x03\x1b\x04\x04\x01\x02\r\x01\x01\x027\x15oF"},
|
||||
{"crypto/internal/fips140/sha256", "\x03\x1f\x1d\x01\a*\x15~"},
|
||||
{"crypto/internal/fips140/sha3", "\x03\x1f\x18\x05\x011\x93\x01K"},
|
||||
{"crypto/internal/fips140/sha512", "\x03\x1f\x1d\x01\a*\x15~"},
|
||||
{"crypto/internal/fips140/ssh", "'_"},
|
||||
{"crypto/internal/fips140/subtle", "\x1e\a\x1a\xc5\x01"},
|
||||
{"crypto/internal/fips140/tls12", "\x03\x1f\x05\t\x06\x029\x15"},
|
||||
{"crypto/internal/fips140/tls13", "\x03\x1f\x05\b\a\t2\x15"},
|
||||
{"crypto/internal/fips140cache", "\xae\x02\r&"},
|
||||
{"crypto/internal/cryptotest", "\x03\r\n\b&\x0f\x19\x06\x13\x12 \x04\x06\t\x19\x01\x11\x11\x1b\x01\a\x05\b\x03\x05\v"},
|
||||
{"crypto/internal/entropy", "J"},
|
||||
{"crypto/internal/entropy/v1.0.0", "C0\x95\x018\x13"},
|
||||
{"crypto/internal/fips140", "B1\xbf\x01\v\x16"},
|
||||
{"crypto/internal/fips140/aes", "\x03\x1f\x03\x02\x14\x05\x01\x01\x06+\x95\x014"},
|
||||
{"crypto/internal/fips140/aes/gcm", "\"\x01\x02\x02\x02\x12\x05\x01\a+\x92\x01"},
|
||||
{"crypto/internal/fips140/alias", "\xd3\x02"},
|
||||
{"crypto/internal/fips140/bigmod", "'\x19\x01\a+\x95\x01"},
|
||||
{"crypto/internal/fips140/check", "\"\x0e\a\t\x02\xb7\x01Z"},
|
||||
{"crypto/internal/fips140/check/checktest", "'\x8b\x02!"},
|
||||
{"crypto/internal/fips140/drbg", "\x03\x1e\x01\x01\x04\x14\x05\t\x01)\x86\x01\x0f7\x01"},
|
||||
{"crypto/internal/fips140/ecdh", "\x03\x1f\x05\x02\n\r3\x86\x01\x0f7"},
|
||||
{"crypto/internal/fips140/ecdsa", "\x03\x1f\x04\x01\x02\a\x03\x06:\x16pF"},
|
||||
{"crypto/internal/fips140/ed25519", "\x03\x1f\x05\x02\x04\f:\xc9\x01\x03"},
|
||||
{"crypto/internal/fips140/edwards25519", "\x1e\t\a\x123\x95\x017"},
|
||||
{"crypto/internal/fips140/edwards25519/field", "'\x14\x053\x95\x01"},
|
||||
{"crypto/internal/fips140/hkdf", "\x03\x1f\x05\t\a<\x16"},
|
||||
{"crypto/internal/fips140/hmac", "\x03\x1f\x15\x01\x01:\x16"},
|
||||
{"crypto/internal/fips140/mldsa", "\x03\x1b\x04\x05\x02\x0e\x01\x03\x053\x95\x017"},
|
||||
{"crypto/internal/fips140/mlkem", "\x03\x1f\x05\x02\x0f\x03\x053\xcc\x01"},
|
||||
{"crypto/internal/fips140/nistec", "\x1e\t\r\f3\x95\x01*\r\x14"},
|
||||
{"crypto/internal/fips140/nistec/fiat", "'\x148\x95\x01"},
|
||||
{"crypto/internal/fips140/pbkdf2", "\x03\x1f\x05\t\a<\x16"},
|
||||
{"crypto/internal/fips140/rsa", "\x03\x1b\x04\x04\x01\x02\x0e\x01\x01\x028\x16pF"},
|
||||
{"crypto/internal/fips140/sha256", "\x03\x1f\x1e\x01\a+\x16\x7f"},
|
||||
{"crypto/internal/fips140/sha3", "\x03\x1f\x19\x05\x012\x95\x01K"},
|
||||
{"crypto/internal/fips140/sha512", "\x03\x1f\x1e\x01\a+\x16\x7f"},
|
||||
{"crypto/internal/fips140/ssh", "'b"},
|
||||
{"crypto/internal/fips140/subtle", "\x1e\a\x1b\xc8\x01"},
|
||||
{"crypto/internal/fips140/tls12", "\x03\x1f\x05\t\a\x02:\x16"},
|
||||
{"crypto/internal/fips140/tls13", "\x03\x1f\x05\b\b\t3\x16"},
|
||||
{"crypto/internal/fips140cache", "\xb2\x02\r&"},
|
||||
{"crypto/internal/fips140deps", ""},
|
||||
{"crypto/internal/fips140deps/byteorder", "\x9c\x01"},
|
||||
{"crypto/internal/fips140deps/cpu", "\xb1\x01\a"},
|
||||
{"crypto/internal/fips140deps/godebug", "\xb9\x01"},
|
||||
{"crypto/internal/fips140deps/time", "\xc9\x02"},
|
||||
{"crypto/internal/fips140hash", "7\x1c3\xc9\x01"},
|
||||
{"crypto/internal/fips140only", ")\r\x01\x01N3<"},
|
||||
{"crypto/internal/fips140deps/byteorder", "\x9f\x01"},
|
||||
{"crypto/internal/fips140deps/cpu", "\xb4\x01\a"},
|
||||
{"crypto/internal/fips140deps/godebug", "\xbc\x01"},
|
||||
{"crypto/internal/fips140deps/time", "\xcd\x02"},
|
||||
{"crypto/internal/fips140hash", "8\x1d4\xca\x01"},
|
||||
{"crypto/internal/fips140only", ")\x0e\x01\x01P3="},
|
||||
{"crypto/internal/fips140test", ""},
|
||||
{"crypto/internal/hpke", "\x0e\x01\x01\x03\x056#+hM"},
|
||||
{"crypto/internal/impl", "\xb9\x02"},
|
||||
{"crypto/internal/randutil", "\xf5\x01\x12"},
|
||||
{"crypto/internal/sysrand", "qo! \r\r\x01\x01\f\x06"},
|
||||
{"crypto/internal/sysrand/internal/seccomp", "q"},
|
||||
{"crypto/md5", "\x0e6-\x15\x16h"},
|
||||
{"crypto/mlkem", "1"},
|
||||
{"crypto/pbkdf2", "4\x0f\x01-\x15"},
|
||||
{"crypto/rand", "\x1a\b\a\x1b\x04\x01(\x84\x01\rM"},
|
||||
{"crypto/rc4", "%\x1f-\xc7\x01"},
|
||||
{"crypto/rsa", "\x0e\f\x01\v\x0f\x0e\x01\x04\x06\a\x1c\x03\x123<\f\x01"},
|
||||
{"crypto/sha1", "\x0e\f*\x03*\x15\x16\x15S"},
|
||||
{"crypto/sha256", "\x0e\f\x1cP"},
|
||||
{"crypto/sha3", "\x0e)O\xc9\x01"},
|
||||
{"crypto/sha512", "\x0e\f\x1eN"},
|
||||
{"crypto/subtle", "\x1e\x1c\x9c\x01X"},
|
||||
{"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x02\x01\x01\t\x01\r\n\x01\n\x05\x03\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x12\x16\x15\b<\x16\x16\r\b\x01\x01\x01\x02\x01\r\x06\x02\x01\x0f"},
|
||||
{"crypto/tls/internal/fips140tls", "\x17\xa5\x02"},
|
||||
{"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x015\x05\x01\x01\x02\x05\x0e\x06\x02\x02\x03E\x039\x01\x02\b\x01\x01\x02\a\x10\x05\x01\x06\x02\x05\b\x02\x01\x02\x0e\x02\x01\x01\x02\x03\x01"},
|
||||
{"crypto/x509/pkix", "g\x06\a\x8e\x01G"},
|
||||
{"database/sql", "\x03\nN\x16\x03\x81\x01\v\a\"\x05\b\x02\x03\x01\r\x02\x02\x02"},
|
||||
{"database/sql/driver", "\rd\x03\xb5\x01\x0f\x11"},
|
||||
{"debug/buildinfo", "\x03[\x02\x01\x01\b\a\x03e\x1a\x02\x01+\x0f\x1f"},
|
||||
{"debug/dwarf", "\x03g\a\x03\x81\x011\x11\x01\x01"},
|
||||
{"debug/elf", "\x03\x06T\r\a\x03e\x1b\x01\f \x17\x01\x16"},
|
||||
{"debug/gosym", "\x03g\n\xc3\x01\x01\x01\x02"},
|
||||
{"debug/macho", "\x03\x06T\r\ne\x1c,\x17\x01"},
|
||||
{"debug/pe", "\x03\x06T\r\a\x03e\x1c,\x17\x01\x16"},
|
||||
{"debug/plan9obj", "j\a\x03e\x1c,"},
|
||||
{"embed", "q*A\x19\x01S"},
|
||||
{"crypto/internal/hpke", "\x03\v\x01\x01\x03\x055\x03\x04\x01\x01\x16\a\x03\x13\xcc\x01"},
|
||||
{"crypto/internal/impl", "\xbd\x02"},
|
||||
{"crypto/internal/randutil", "\xf9\x01\x12"},
|
||||
{"crypto/internal/sysrand", "sq! \r\r\x01\x01\f\x06"},
|
||||
{"crypto/internal/sysrand/internal/seccomp", "s"},
|
||||
{"crypto/md5", "\x0e7.\x16\x16i"},
|
||||
{"crypto/mlkem", "\x0e$"},
|
||||
{"crypto/mlkem/mlkemtest", "2\x1b&"},
|
||||
{"crypto/pbkdf2", "5\x0f\x01.\x16"},
|
||||
{"crypto/rand", "\x1a\b\a\x1c\x04\x01)\x86\x01\rM"},
|
||||
{"crypto/rc4", "% .\xc9\x01"},
|
||||
{"crypto/rsa", "\x0e\f\x01\v\x10\x0e\x01\x04\a\a\x1c\x03\x133=\f\x01"},
|
||||
{"crypto/sha1", "\x0e\f+\x03+\x16\x16\x15T"},
|
||||
{"crypto/sha256", "\x0e\f\x1dR"},
|
||||
{"crypto/sha3", "\x0e*Q\xca\x01"},
|
||||
{"crypto/sha512", "\x0e\f\x1fP"},
|
||||
{"crypto/subtle", "\x1e\x1d\x9f\x01X"},
|
||||
{"crypto/tls", "\x03\b\x02\x01\x01\x01\x01\x02\x01\x01\x01\x02\x01\x01\t\x01\x0e\n\x01\n\x05\x04\x01\x01\x01\x01\x02\x01\x02\x01\x17\x02\x03\x13\x16\x15\b=\x16\x16\r\b\x01\x01\x01\x02\x01\r\x06\x02\x01\x0f"},
|
||||
{"crypto/tls/internal/fips140tls", "\x17\xa9\x02"},
|
||||
{"crypto/x509", "\x03\v\x01\x01\x01\x01\x01\x01\x01\x016\x06\x01\x01\x02\x05\x0e\x06\x02\x02\x03F\x03:\x01\x02\b\x01\x01\x02\a\x10\x05\x01\x06\a\b\x02\x01\x02\x0e\x02\x01\x01\x02\x03\x01"},
|
||||
{"crypto/x509/pkix", "i\x06\a\x90\x01G"},
|
||||
{"database/sql", "\x03\nP\x16\x03\x83\x01\v\a\"\x05\b\x02\x03\x01\r\x02\x02\x02"},
|
||||
{"database/sql/driver", "\rf\x03\xb7\x01\x0f\x11"},
|
||||
{"debug/buildinfo", "\x03]\x02\x01\x01\b\a\x03g\x1a\x02\x01+\x0f\x1f"},
|
||||
{"debug/dwarf", "\x03i\a\x03\x83\x011\x11\x01\x01"},
|
||||
{"debug/elf", "\x03\x06V\r\a\x03g\x1b\x01\f \x17\x01\x16"},
|
||||
{"debug/gosym", "\x03i\n\xc5\x01\x01\x01\x02"},
|
||||
{"debug/macho", "\x03\x06V\r\ng\x1c,\x17\x01"},
|
||||
{"debug/pe", "\x03\x06V\r\a\x03g\x1c,\x17\x01\x16"},
|
||||
{"debug/plan9obj", "l\a\x03g\x1c,"},
|
||||
{"embed", "s+B\x19\x01S"},
|
||||
{"embed/internal/embedtest", ""},
|
||||
{"encoding", ""},
|
||||
{"encoding/ascii85", "\xf5\x01C"},
|
||||
{"encoding/asn1", "\x03n\x03e(\x01'\r\x02\x01\x10\x03\x01"},
|
||||
{"encoding/base32", "\xf5\x01A\x02"},
|
||||
{"encoding/base64", "\x9c\x01YA\x02"},
|
||||
{"encoding/binary", "q\x84\x01\f(\r\x05"},
|
||||
{"encoding/csv", "\x02\x01n\x03\x81\x01D\x12\x02"},
|
||||
{"encoding/gob", "\x02c\x05\a\x03e\x1c\v\x01\x03\x1d\b\x12\x01\x0f\x02"},
|
||||
{"encoding/hex", "q\x03\x81\x01A\x03"},
|
||||
{"encoding/json", "\x03\x01a\x04\b\x03\x81\x01\f(\r\x02\x01\x02\x10\x01\x01\x02"},
|
||||
{"encoding/pem", "\x03f\b\x84\x01A\x03"},
|
||||
{"encoding/xml", "\x02\x01b\f\x03\x81\x014\x05\n\x01\x02\x10\x02"},
|
||||
{"errors", "\xcc\x01\x83\x01"},
|
||||
{"expvar", "nK@\b\v\x15\r\b\x02\x03\x01\x11"},
|
||||
{"flag", "e\f\x03\x81\x01,\b\x05\b\x02\x01\x10"},
|
||||
{"fmt", "qE&\x19\f \b\r\x02\x03\x12"},
|
||||
{"go/ast", "\x03\x01p\x0e\x01r\x03)\b\r\x02\x01\x12\x02"},
|
||||
{"go/build", "\x02\x01n\x03\x01\x02\x02\a\x02\x01\x17\x1f\x04\x02\b\x1b\x13\x01+\x01\x04\x01\a\b\x02\x01\x12\x02\x02"},
|
||||
{"go/build/constraint", "q\xc7\x01\x01\x12\x02"},
|
||||
{"go/constant", "t\x0f~\x01\x024\x01\x02\x12"},
|
||||
{"go/doc", "\x04p\x01\x05\t=51\x10\x02\x01\x12\x02"},
|
||||
{"go/doc/comment", "\x03q\xc2\x01\x01\x01\x01\x12\x02"},
|
||||
{"go/format", "\x03q\x01\v\x01\x02rD"},
|
||||
{"go/importer", "v\a\x01\x01\x04\x01q9"},
|
||||
{"go/internal/gccgoimporter", "\x02\x01[\x13\x03\x04\v\x01o\x02,\x01\x05\x11\x01\f\b"},
|
||||
{"go/internal/gcimporter", "\x02r\x0f\x010\x05\r/,\x15\x03\x02"},
|
||||
{"go/internal/srcimporter", "t\x01\x01\n\x03\x01q,\x01\x05\x12\x02\x14"},
|
||||
{"go/parser", "\x03n\x03\x01\x02\v\x01r\x01+\x06\x12"},
|
||||
{"go/printer", "t\x01\x02\x03\tr\f \x15\x02\x01\x02\v\x05\x02"},
|
||||
{"go/scanner", "\x03q\x0fr2\x10\x01\x13\x02"},
|
||||
{"go/token", "\x04p\x84\x01>\x02\x03\x01\x0f\x02"},
|
||||
{"go/types", "\x03\x01\x06g\x03\x01\x03\b\x03\x024\x062\x04\x03\t \x06\a\b\x01\x01\x01\x02\x01\x0f\x02\x02"},
|
||||
{"go/version", "\xbe\x01{"},
|
||||
{"hash", "\xf5\x01"},
|
||||
{"hash/adler32", "q\x15\x16"},
|
||||
{"hash/crc32", "q\x15\x16\x15\x8a\x01\x01\x13"},
|
||||
{"hash/crc64", "q\x15\x16\x9f\x01"},
|
||||
{"hash/fnv", "q\x15\x16h"},
|
||||
{"hash/maphash", "\x86\x01\x11<|"},
|
||||
{"html", "\xb9\x02\x02\x12"},
|
||||
{"html/template", "\x03k\x06\x18-<\x01\n!\x05\x01\x02\x03\f\x01\x02\f\x01\x03\x02"},
|
||||
{"image", "\x02o\x1ef\x0f4\x03\x01"},
|
||||
{"encoding/ascii85", "\xf9\x01C"},
|
||||
{"encoding/asn1", "\x03p\x03g(\x01'\r\x02\x01\x10\x03\x01"},
|
||||
{"encoding/base32", "\xf9\x01A\x02"},
|
||||
{"encoding/base64", "\x9f\x01ZA\x02"},
|
||||
{"encoding/binary", "s\x86\x01\f(\r\x05"},
|
||||
{"encoding/csv", "\x02\x01p\x03\x83\x01D\x12\x02"},
|
||||
{"encoding/gob", "\x02e\x05\a\x03g\x1c\v\x01\x03\x1d\b\x12\x01\x0f\x02"},
|
||||
{"encoding/hex", "s\x03\x83\x01A\x03"},
|
||||
{"encoding/json", "\x03\x01c\x04\b\x03\x83\x01\f(\r\x02\x01\x02\x10\x01\x01\x02"},
|
||||
{"encoding/pem", "\x03h\b\x86\x01A\x03"},
|
||||
{"encoding/xml", "\x02\x01d\f\x03\x83\x014\x05\n\x01\x02\x10\x02"},
|
||||
{"errors", "\xcf\x01\x84\x01"},
|
||||
{"expvar", "pLA\b\v\x15\r\b\x02\x03\x01\x11"},
|
||||
{"flag", "g\f\x03\x83\x01,\b\x05\b\x02\x01\x10"},
|
||||
{"fmt", "sF'\x19\f \b\r\x02\x03\x12"},
|
||||
{"go/ast", "\x03\x01r\x0f\x01s\x03)\b\r\x02\x01\x12\x02"},
|
||||
{"go/build", "\x02\x01p\x03\x01\x02\x02\b\x02\x01\x17\x1f\x04\x02\b\x1c\x13\x01+\x01\x04\x01\a\b\x02\x01\x12\x02\x02"},
|
||||
{"go/build/constraint", "s\xc9\x01\x01\x12\x02"},
|
||||
{"go/constant", "v\x10\x7f\x01\x024\x01\x02\x12"},
|
||||
{"go/doc", "\x04r\x01\x05\n=61\x10\x02\x01\x12\x02"},
|
||||
{"go/doc/comment", "\x03s\xc4\x01\x01\x01\x01\x12\x02"},
|
||||
{"go/format", "\x03s\x01\f\x01\x02sD"},
|
||||
{"go/importer", "x\a\x01\x02\x04\x01r9"},
|
||||
{"go/internal/gccgoimporter", "\x02\x01]\x13\x03\x04\f\x01p\x02,\x01\x05\x11\x01\f\b"},
|
||||
{"go/internal/gcimporter", "\x02t\x10\x010\x05\r0,\x15\x03\x02"},
|
||||
{"go/internal/scannerhooks", "\x86\x01"},
|
||||
{"go/internal/srcimporter", "v\x01\x01\v\x03\x01r,\x01\x05\x12\x02\x14"},
|
||||
{"go/parser", "\x03p\x03\x01\x02\b\x04\x01s\x01+\x06\x12"},
|
||||
{"go/printer", "v\x01\x02\x03\ns\f \x15\x02\x01\x02\v\x05\x02"},
|
||||
{"go/scanner", "\x03s\v\x05s2\x10\x01\x13\x02"},
|
||||
{"go/token", "\x04r\x86\x01>\x02\x03\x01\x0f\x02"},
|
||||
{"go/types", "\x03\x01\x06i\x03\x01\x03\t\x03\x024\x063\x04\x03\t \x06\a\b\x01\x01\x01\x02\x01\x0f\x02\x02"},
|
||||
{"go/version", "\xc1\x01|"},
|
||||
{"hash", "\xf9\x01"},
|
||||
{"hash/adler32", "s\x16\x16"},
|
||||
{"hash/crc32", "s\x16\x16\x15\x8b\x01\x01\x13"},
|
||||
{"hash/crc64", "s\x16\x16\xa0\x01"},
|
||||
{"hash/fnv", "s\x16\x16i"},
|
||||
{"hash/maphash", "\x89\x01\x11<}"},
|
||||
{"html", "\xbd\x02\x02\x12"},
|
||||
{"html/template", "\x03m\x06\x19-=\x01\n!\x05\x01\x02\x03\f\x01\x02\f\x01\x03\x02"},
|
||||
{"image", "\x02q\x1fg\x0f4\x03\x01"},
|
||||
{"image/color", ""},
|
||||
{"image/color/palette", "\x8f\x01"},
|
||||
{"image/draw", "\x8e\x01\x01\x04"},
|
||||
{"image/gif", "\x02\x01\x05i\x03\x1a\x01\x01\x01\vY"},
|
||||
{"image/internal/imageutil", "\x8e\x01"},
|
||||
{"image/jpeg", "\x02o\x1d\x01\x04b"},
|
||||
{"image/png", "\x02\aa\n\x12\x02\x06\x01fC"},
|
||||
{"index/suffixarray", "\x03g\a\x84\x01\f+\n\x01"},
|
||||
{"internal/abi", "\xb8\x01\x97\x01"},
|
||||
{"internal/asan", "\xcf\x02"},
|
||||
{"internal/bisect", "\xae\x02\r\x01"},
|
||||
{"internal/buildcfg", "tGf\x06\x02\x05\n\x01"},
|
||||
{"internal/bytealg", "\xb1\x01\x9e\x01"},
|
||||
{"image/color/palette", "\x92\x01"},
|
||||
{"image/draw", "\x91\x01\x01\x04"},
|
||||
{"image/gif", "\x02\x01\x05k\x03\x1b\x01\x01\x01\vZ\x0f"},
|
||||
{"image/internal/imageutil", "\x91\x01"},
|
||||
{"image/jpeg", "\x02q\x1e\x01\x04c"},
|
||||
{"image/png", "\x02\ac\n\x13\x02\x06\x01gC"},
|
||||
{"index/suffixarray", "\x03i\a\x86\x01\f+\n\x01"},
|
||||
{"internal/abi", "\xbb\x01\x98\x01"},
|
||||
{"internal/asan", "\xd3\x02"},
|
||||
{"internal/bisect", "\xb2\x02\r\x01"},
|
||||
{"internal/buildcfg", "vHg\x06\x02\x05\n\x01"},
|
||||
{"internal/bytealg", "\xb4\x01\x9f\x01"},
|
||||
{"internal/byteorder", ""},
|
||||
{"internal/cfg", ""},
|
||||
{"internal/cgrouptest", "tZS\x06\x0f\x02\x01\x04\x01"},
|
||||
{"internal/chacha8rand", "\x9c\x01\x15\a\x97\x01"},
|
||||
{"internal/cgrouptest", "v[T\x06\x0f\x02\x01\x04\x01"},
|
||||
{"internal/chacha8rand", "\x9f\x01\x15\a\x98\x01"},
|
||||
{"internal/copyright", ""},
|
||||
{"internal/coverage", ""},
|
||||
{"internal/coverage/calloc", ""},
|
||||
{"internal/coverage/cfile", "n\x06\x16\x17\x01\x02\x01\x01\x01\x01\x01\x01\x01\"\x02&,\x06\a\n\x01\x03\r\x06"},
|
||||
{"internal/coverage/cformat", "\x04p-\x04P\v6\x01\x02\r"},
|
||||
{"internal/coverage/cmerge", "t-`"},
|
||||
{"internal/coverage/decodecounter", "j\n-\v\x02G,\x17\x17"},
|
||||
{"internal/coverage/decodemeta", "\x02h\n\x16\x17\v\x02G,"},
|
||||
{"internal/coverage/encodecounter", "\x02h\n-\f\x01\x02E\v!\x15"},
|
||||
{"internal/coverage/encodemeta", "\x02\x01g\n\x12\x04\x17\r\x02E,."},
|
||||
{"internal/coverage/pods", "\x04p-\x80\x01\x06\x05\n\x02\x01"},
|
||||
{"internal/coverage/rtcov", "\xcf\x02"},
|
||||
{"internal/coverage/slicereader", "j\n\x81\x01Z"},
|
||||
{"internal/coverage/slicewriter", "t\x81\x01"},
|
||||
{"internal/coverage/stringtab", "t8\x04E"},
|
||||
{"internal/coverage/cfile", "p\x06\x17\x17\x01\x02\x01\x01\x01\x01\x01\x01\x01\"\x02',\x06\a\n\x01\x03\r\x06"},
|
||||
{"internal/coverage/cformat", "\x04r.\x04Q\v6\x01\x02\r"},
|
||||
{"internal/coverage/cmerge", "v.a"},
|
||||
{"internal/coverage/decodecounter", "l\n.\v\x02H,\x17\x17"},
|
||||
{"internal/coverage/decodemeta", "\x02j\n\x17\x17\v\x02H,"},
|
||||
{"internal/coverage/encodecounter", "\x02j\n.\f\x01\x02F\v!\x15"},
|
||||
{"internal/coverage/encodemeta", "\x02\x01i\n\x13\x04\x17\r\x02F,."},
|
||||
{"internal/coverage/pods", "\x04r.\x81\x01\x06\x05\n\x02\x01"},
|
||||
{"internal/coverage/rtcov", "\xd3\x02"},
|
||||
{"internal/coverage/slicereader", "l\n\x83\x01Z"},
|
||||
{"internal/coverage/slicewriter", "v\x83\x01"},
|
||||
{"internal/coverage/stringtab", "v9\x04F"},
|
||||
{"internal/coverage/test", ""},
|
||||
{"internal/coverage/uleb128", ""},
|
||||
{"internal/cpu", "\xcf\x02"},
|
||||
{"internal/dag", "\x04p\xc2\x01\x03"},
|
||||
{"internal/diff", "\x03q\xc3\x01\x02"},
|
||||
{"internal/exportdata", "\x02\x01n\x03\x02c\x1c,\x01\x05\x11\x01\x02"},
|
||||
{"internal/filepathlite", "q*A\x1a@"},
|
||||
{"internal/fmtsort", "\x04\xa5\x02\r"},
|
||||
{"internal/fuzz", "\x03\nE\x18\x04\x03\x03\x01\v\x036<\f\x03\x1d\x01\x05\x02\x05\n\x01\x02\x01\x01\f\x04\x02"},
|
||||
{"internal/cpu", "\xd3\x02"},
|
||||
{"internal/dag", "\x04r\xc4\x01\x03"},
|
||||
{"internal/diff", "\x03s\xc5\x01\x02"},
|
||||
{"internal/exportdata", "\x02\x01p\x03\x02e\x1c,\x01\x05\x11\x01\x02"},
|
||||
{"internal/filepathlite", "s+B\x1a@"},
|
||||
{"internal/fmtsort", "\x04\xa9\x02\r"},
|
||||
{"internal/fuzz", "\x03\nG\x18\x04\x03\x03\x01\f\x036=\f\x03\x1d\x01\x05\x02\x05\n\x01\x02\x01\x01\f\x04\x02"},
|
||||
{"internal/goarch", ""},
|
||||
{"internal/godebug", "\x99\x01!\x81\x01\x01\x13"},
|
||||
{"internal/godebug", "\x9c\x01!\x82\x01\x01\x13"},
|
||||
{"internal/godebugs", ""},
|
||||
{"internal/goexperiment", ""},
|
||||
{"internal/goos", ""},
|
||||
{"internal/goroot", "\xa1\x02\x01\x05\x12\x02"},
|
||||
{"internal/goroot", "\xa5\x02\x01\x05\x12\x02"},
|
||||
{"internal/gover", "\x04"},
|
||||
{"internal/goversion", ""},
|
||||
{"internal/lazyregexp", "\xa1\x02\v\r\x02"},
|
||||
{"internal/lazytemplate", "\xf5\x01,\x18\x02\f"},
|
||||
{"internal/msan", "\xcf\x02"},
|
||||
{"internal/lazyregexp", "\xa5\x02\v\r\x02"},
|
||||
{"internal/lazytemplate", "\xf9\x01,\x18\x02\f"},
|
||||
{"internal/msan", "\xd3\x02"},
|
||||
{"internal/nettrace", ""},
|
||||
{"internal/obscuretestdata", "i\x8c\x01,"},
|
||||
{"internal/oserror", "q"},
|
||||
{"internal/pkgbits", "\x03O\x18\a\x03\x04\vr\r\x1f\r\n\x01"},
|
||||
{"internal/obscuretestdata", "k\x8e\x01,"},
|
||||
{"internal/oserror", "s"},
|
||||
{"internal/pkgbits", "\x03Q\x18\a\x03\x04\fs\r\x1f\r\n\x01"},
|
||||
{"internal/platform", ""},
|
||||
{"internal/poll", "qj\x05\x159\r\x01\x01\f\x06"},
|
||||
{"internal/profile", "\x03\x04j\x03\x81\x017\n\x01\x01\x01\x10"},
|
||||
{"internal/poll", "sl\x05\x159\r\x01\x01\f\x06"},
|
||||
{"internal/profile", "\x03\x04l\x03\x83\x017\n\x01\x01\x01\x10"},
|
||||
{"internal/profilerecord", ""},
|
||||
{"internal/race", "\x97\x01\xb8\x01"},
|
||||
{"internal/reflectlite", "\x97\x01!:<!"},
|
||||
{"internal/runtime/atomic", "\xb8\x01\x97\x01"},
|
||||
{"internal/runtime/cgroup", "\x9b\x01<\x04t"},
|
||||
{"internal/runtime/exithook", "\xcd\x01\x82\x01"},
|
||||
{"internal/runtime/gc", "\xb8\x01"},
|
||||
{"internal/runtime/gc/internal/gen", "\n`\n\x17j\x04\v\x1d\b\x10\x02"},
|
||||
{"internal/runtime/gc/scan", "\xb1\x01\a\x18\x06y"},
|
||||
{"internal/runtime/maps", "\x97\x01\x01 \n\t\t\x02y"},
|
||||
{"internal/runtime/math", "\xb8\x01"},
|
||||
{"internal/race", "\x9a\x01\xb9\x01"},
|
||||
{"internal/reflectlite", "\x9a\x01!;<!"},
|
||||
{"internal/runtime/atomic", "\xbb\x01\x98\x01"},
|
||||
{"internal/runtime/cgroup", "\x9e\x01=\x04t"},
|
||||
{"internal/runtime/exithook", "\xd0\x01\x83\x01"},
|
||||
{"internal/runtime/gc", "\xbb\x01"},
|
||||
{"internal/runtime/gc/internal/gen", "\nb\n\x18k\x04\v\x1d\b\x10\x02"},
|
||||
{"internal/runtime/gc/scan", "\xb4\x01\a\x18\ay"},
|
||||
{"internal/runtime/maps", "\x9a\x01\x01 \n\t\t\x03y"},
|
||||
{"internal/runtime/math", "\xbb\x01"},
|
||||
{"internal/runtime/pprof/label", ""},
|
||||
{"internal/runtime/startlinetest", ""},
|
||||
{"internal/runtime/sys", "\xb8\x01\x04"},
|
||||
{"internal/runtime/syscall/linux", "\xb8\x01\x97\x01"},
|
||||
{"internal/runtime/sys", "\xbb\x01\x04"},
|
||||
{"internal/runtime/syscall/linux", "\xbb\x01\x98\x01"},
|
||||
{"internal/runtime/wasitest", ""},
|
||||
{"internal/saferio", "\xf5\x01Z"},
|
||||
{"internal/singleflight", "\xbb\x02"},
|
||||
{"internal/strconv", "\x84\x02K"},
|
||||
{"internal/stringslite", "\x9b\x01\xb4\x01"},
|
||||
{"internal/sync", "\x97\x01!\x13q\x13"},
|
||||
{"internal/synctest", "\x97\x01\xb8\x01"},
|
||||
{"internal/syscall/execenv", "\xbd\x02"},
|
||||
{"internal/syscall/unix", "\xae\x02\x0e\x01\x12"},
|
||||
{"internal/sysinfo", "\x02\x01\xae\x01D,\x18\x02"},
|
||||
{"internal/saferio", "\xf9\x01Z"},
|
||||
{"internal/singleflight", "\xbf\x02"},
|
||||
{"internal/strconv", "\x88\x02K"},
|
||||
{"internal/stringslite", "\x9e\x01\xb5\x01"},
|
||||
{"internal/sync", "\x9a\x01!\x13r\x13"},
|
||||
{"internal/synctest", "\x9a\x01\xb9\x01"},
|
||||
{"internal/syscall/execenv", "\xc1\x02"},
|
||||
{"internal/syscall/unix", "\xb2\x02\x0e\x01\x12"},
|
||||
{"internal/sysinfo", "\x02\x01\xb1\x01E,\x18\x02"},
|
||||
{"internal/syslist", ""},
|
||||
{"internal/testenv", "\x03\nd\x02\x01)\x1b\x0f/+\x01\x05\a\n\x01\x02\x02\x01\v"},
|
||||
{"internal/testhash", "\x03\x83\x01o\x118\v"},
|
||||
{"internal/testlog", "\xbb\x02\x01\x13"},
|
||||
{"internal/testpty", "q\x03\xad\x01"},
|
||||
{"internal/trace", "\x02\x01\x01\x06`\a\x03u\x03\x03\x06\x03\t5\x01\x01\x01\x10\x06"},
|
||||
{"internal/trace/internal/testgen", "\x03g\ns\x03\x02\x03\x011\v\r\x10"},
|
||||
{"internal/trace/internal/tracev1", "\x03\x01f\a\x03{\x06\f5\x01"},
|
||||
{"internal/trace/raw", "\x02h\nx\x03\x06C\x01\x12"},
|
||||
{"internal/trace/testtrace", "\x02\x01n\x03o\x04\x03\x05\x01\x05,\v\x02\b\x02\x01\x05"},
|
||||
{"internal/testenv", "\x03\nf\x02\x01*\x1b\x0f0+\x01\x05\a\n\x01\x02\x02\x01\v"},
|
||||
{"internal/testhash", "\x03\x86\x01p\x118\v"},
|
||||
{"internal/testlog", "\xbf\x02\x01\x13"},
|
||||
{"internal/testpty", "s\x03\xaf\x01"},
|
||||
{"internal/trace", "\x02\x01\x01\x06b\a\x03w\x03\x03\x06\x03\t5\x01\x01\x01\x10\x06"},
|
||||
{"internal/trace/internal/testgen", "\x03i\nu\x03\x02\x03\x011\v\r\x10"},
|
||||
{"internal/trace/internal/tracev1", "\x03\x01h\a\x03}\x06\f5\x01"},
|
||||
{"internal/trace/raw", "\x02j\nz\x03\x06C\x01\x12"},
|
||||
{"internal/trace/testtrace", "\x02\x01p\x03q\x04\x03\x05\x01\x05,\v\x02\b\x02\x01\x05"},
|
||||
{"internal/trace/tracev2", ""},
|
||||
{"internal/trace/traceviewer", "\x02a\v\x06\x19<\x1e\a\a\x04\b\v\x15\x01\x05\a\n\x01\x02\x0e"},
|
||||
{"internal/trace/traceviewer", "\x02c\v\x06\x1a<\x1f\a\a\x04\b\v\x15\x01\x05\a\n\x01\x02\x0e"},
|
||||
{"internal/trace/traceviewer/format", ""},
|
||||
{"internal/trace/version", "tx\t"},
|
||||
{"internal/txtar", "\x03q\xad\x01\x18"},
|
||||
{"internal/types/errors", "\xb8\x02"},
|
||||
{"internal/unsafeheader", "\xcf\x02"},
|
||||
{"internal/xcoff", "]\r\a\x03e\x1c,\x17\x01"},
|
||||
{"internal/zstd", "j\a\x03\x81\x01\x0f"},
|
||||
{"io", "q\xca\x01"},
|
||||
{"io/fs", "q**01\x10\x13\x04"},
|
||||
{"io/ioutil", "\xf5\x01\x01+\x15\x03"},
|
||||
{"iter", "\xcb\x01c!"},
|
||||
{"log", "t\x81\x01\x05'\r\r\x01\r"},
|
||||
{"internal/trace/version", "vz\t"},
|
||||
{"internal/txtar", "\x03s\xaf\x01\x18"},
|
||||
{"internal/types/errors", "\xbc\x02"},
|
||||
{"internal/unsafeheader", "\xd3\x02"},
|
||||
{"internal/xcoff", "_\r\a\x03g\x1c,\x17\x01"},
|
||||
{"internal/zstd", "l\a\x03\x83\x01\x0f"},
|
||||
{"io", "s\xcc\x01"},
|
||||
{"io/fs", "s+*11\x10\x13\x04"},
|
||||
{"io/ioutil", "\xf9\x01\x01+\x15\x03"},
|
||||
{"iter", "\xce\x01d!"},
|
||||
{"log", "v\x83\x01\x05'\r\r\x01\r"},
|
||||
{"log/internal", ""},
|
||||
{"log/slog", "\x03\nX\t\x03\x03\x81\x01\x04\x01\x02\x02\x03(\x05\b\x02\x01\x02\x01\r\x02\x02\x02"},
|
||||
{"log/slog", "\x03\nZ\t\x03\x03\x83\x01\x04\x01\x02\x02\x03(\x05\b\x02\x01\x02\x01\r\x02\x02\x02"},
|
||||
{"log/slog/internal", ""},
|
||||
{"log/slog/internal/benchmarks", "\rd\x03\x81\x01\x06\x03:\x11"},
|
||||
{"log/slog/internal/buffer", "\xbb\x02"},
|
||||
{"log/syslog", "q\x03\x85\x01\x12\x16\x18\x02\x0e"},
|
||||
{"maps", "\xf8\x01W"},
|
||||
{"math", "\xb1\x01SK"},
|
||||
{"math/big", "\x03n\x03(\x15D\f\x03\x020\x02\x01\x02\x14"},
|
||||
{"math/big/internal/asmgen", "\x03\x01p\x90\x012\x03"},
|
||||
{"math/bits", "\xcf\x02"},
|
||||
{"math/cmplx", "\x81\x02\x03"},
|
||||
{"math/rand", "\xb9\x01H:\x01\x13"},
|
||||
{"math/rand/v2", "q+\x03b\x03K"},
|
||||
{"mime", "\x02\x01f\b\x03\x81\x01\v!\x15\x03\x02\x10\x02"},
|
||||
{"mime/multipart", "\x02\x01K#\x03E<\v\x01\a\x02\x15\x02\x06\x0f\x02\x01\x16"},
|
||||
{"mime/quotedprintable", "\x02\x01q\x81\x01"},
|
||||
{"net", "\x04\td*\x1e\n\x05\x12\x01\x01\x04\x15\x01%\x06\r\b\x05\x01\x01\f\x06\a"},
|
||||
{"net/http", "\x02\x01\x03\x01\x04\x02A\b\x13\x01\a\x03E<\x01\x03\a\x01\x03\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\b\x01\x01\x01\x02\x01\r\x02\x02\x02\b\x01\x01\x01"},
|
||||
{"net/http/cgi", "\x02T\x1b\x03\x81\x01\x04\a\v\x01\x13\x01\x01\x01\x04\x01\x05\x02\b\x02\x01\x10\x0e"},
|
||||
{"net/http/cookiejar", "\x04m\x03\x97\x01\x01\b\f\x16\x03\x02\x0e\x04"},
|
||||
{"net/http/fcgi", "\x02\x01\n]\a\x03\x81\x01\x16\x01\x01\x14\x18\x02\x0e"},
|
||||
{"net/http/httptest", "\x02\x01\nI\x02\x1b\x01\x81\x01\x04\x12\x01\n\t\x02\x17\x01\x02\x0e\x0e"},
|
||||
{"net/http/httptrace", "\rImH\x14\n "},
|
||||
{"net/http/httputil", "\x02\x01\nd\x03\x81\x01\x04\x0f\x03\x01\x05\x02\x01\v\x01\x19\x02\x0e\x0e"},
|
||||
{"net/http/internal", "\x02\x01n\x03\x81\x01"},
|
||||
{"net/http/internal/ascii", "\xb9\x02\x12"},
|
||||
{"net/http/internal/httpcommon", "\rd\x03\x9d\x01\x0e\x01\x17\x01\x01\x02\x1c\x02"},
|
||||
{"net/http/internal/testcert", "\xb9\x02"},
|
||||
{"net/http/pprof", "\x02\x01\ng\x18-\x02\x0e,\x04\x13\x14\x01\r\x04\x03\x01\x02\x01\x10"},
|
||||
{"log/slog/internal/benchmarks", "\rf\x03\x83\x01\x06\x03:\x11"},
|
||||
{"log/slog/internal/buffer", "\xbf\x02"},
|
||||
{"log/syslog", "s\x03\x87\x01\x12\x16\x18\x02\x0e"},
|
||||
{"maps", "\xfc\x01W"},
|
||||
{"math", "\xb4\x01TK"},
|
||||
{"math/big", "\x03p\x03)\x15E\f\x03\x020\x02\x01\x02\x14"},
|
||||
{"math/big/internal/asmgen", "\x03\x01r\x92\x012\x03"},
|
||||
{"math/bits", "\xd3\x02"},
|
||||
{"math/cmplx", "\x85\x02\x03"},
|
||||
{"math/rand", "\xbc\x01I:\x01\x13"},
|
||||
{"math/rand/v2", "s,\x03c\x03K"},
|
||||
{"mime", "\x02\x01h\b\x03\x83\x01\v!\x15\x03\x02\x10\x02"},
|
||||
{"mime/multipart", "\x02\x01M#\x03F=\v\x01\a\x02\x15\x02\x06\x0f\x02\x01\x16"},
|
||||
{"mime/quotedprintable", "\x02\x01s\x83\x01"},
|
||||
{"net", "\x04\tf+\x1e\n\x05\x13\x01\x01\x04\x15\x01%\x06\r\b\x05\x01\x01\f\x06\a"},
|
||||
{"net/http", "\x02\x01\x03\x01\x04\x02C\b\x13\x01\a\x03F=\x01\x03\a\x01\x03\x02\x02\x01\x02\x06\x02\x01\x01\n\x01\x01\x05\x01\x02\x05\b\x01\x01\x01\x02\x01\r\x02\x02\x02\b\x01\x01\x01"},
|
||||
{"net/http/cgi", "\x02V\x1b\x03\x83\x01\x04\a\v\x01\x13\x01\x01\x01\x04\x01\x05\x02\b\x02\x01\x10\x0e"},
|
||||
{"net/http/cookiejar", "\x04o\x03\x99\x01\x01\b\a\x05\x16\x03\x02\x0e\x04"},
|
||||
{"net/http/fcgi", "\x02\x01\n_\a\x03\x83\x01\x16\x01\x01\x14\x18\x02\x0e"},
|
||||
{"net/http/httptest", "\x02\x01\nK\x02\x1b\x01\x83\x01\x04\x12\x01\n\t\x02\x17\x01\x02\x0e\x0e"},
|
||||
{"net/http/httptrace", "\rKnI\x14\n "},
|
||||
{"net/http/httputil", "\x02\x01\nf\x03\x83\x01\x04\x0f\x03\x01\x05\x02\x01\v\x01\x19\x02\x01\r\x0e"},
|
||||
{"net/http/internal", "\x02\x01p\x03\x83\x01"},
|
||||
{"net/http/internal/ascii", "\xbd\x02\x12"},
|
||||
{"net/http/internal/httpcommon", "\rf\x03\x9f\x01\x0e\x01\x17\x01\x01\x02\x1c\x02"},
|
||||
{"net/http/internal/testcert", "\xbd\x02"},
|
||||
{"net/http/pprof", "\x02\x01\ni\x19-\x02\x0e-\x04\x13\x14\x01\r\x04\x03\x01\x02\x01\x10"},
|
||||
{"net/internal/cgotest", ""},
|
||||
{"net/internal/socktest", "t\xc7\x01\x02"},
|
||||
{"net/mail", "\x02o\x03\x81\x01\x04\x0f\x03\x14\x1a\x02\x0e\x04"},
|
||||
{"net/netip", "\x04m*\x01e\x034\x16"},
|
||||
{"net/rpc", "\x02j\x05\x03\x0f\nh\x04\x12\x01\x1d\r\x03\x02"},
|
||||
{"net/rpc/jsonrpc", "n\x03\x03\x81\x01\x16\x11\x1f"},
|
||||
{"net/smtp", "\x192\v\x13\b\x03\x81\x01\x16\x14\x1a"},
|
||||
{"net/textproto", "\x02\x01n\x03\x81\x01\f\n-\x01\x02\x14"},
|
||||
{"net/url", "q\x03\xa7\x01\v\x10\x02\x01\x16"},
|
||||
{"os", "q*\x01\x19\x03\x10\x13\x01\x03\x01\x05\x10\x018\b\x05\x01\x01\f\x06"},
|
||||
{"os/exec", "\x03\ndH&\x01\x15\x01+\x06\a\n\x01\x04\f"},
|
||||
{"os/exec/internal/fdtest", "\xbd\x02"},
|
||||
{"os/signal", "\r\x94\x02\x15\x05\x02"},
|
||||
{"os/user", "\x02\x01n\x03\x81\x01,\r\n\x01\x02"},
|
||||
{"path", "q*\xb2\x01"},
|
||||
{"path/filepath", "q*\x1aA+\r\b\x03\x04\x10"},
|
||||
{"plugin", "q"},
|
||||
{"reflect", "q&\x04\x1d\x13\b\x03\x05\x17\x06\t-\n\x03\x10\x02\x02"},
|
||||
{"net/internal/socktest", "v\xc9\x01\x02"},
|
||||
{"net/mail", "\x02q\x03\x83\x01\x04\x0f\x03\x14\x1a\x02\x0e\x04"},
|
||||
{"net/netip", "\x04o+\x01f\x034\x16"},
|
||||
{"net/rpc", "\x02l\x05\x03\x10\ni\x04\x12\x01\x1d\r\x03\x02"},
|
||||
{"net/rpc/jsonrpc", "p\x03\x03\x83\x01\x16\x11\x1f"},
|
||||
{"net/smtp", "\x193\f\x13\b\x03\x83\x01\x16\x14\x1a"},
|
||||
{"net/textproto", "\x02\x01p\x03\x83\x01\f\n-\x01\x02\x14"},
|
||||
{"net/url", "s\x03Fc\v\x10\x02\x01\x16"},
|
||||
{"os", "s+\x01\x19\x03\x10\x14\x01\x03\x01\x05\x10\x018\b\x05\x01\x01\f\x06"},
|
||||
{"os/exec", "\x03\nfI'\x01\x15\x01+\x06\a\n\x01\x04\f"},
|
||||
{"os/exec/internal/fdtest", "\xc1\x02"},
|
||||
{"os/signal", "\r\x98\x02\x15\x05\x02"},
|
||||
{"os/user", "\x02\x01p\x03\x83\x01,\r\n\x01\x02"},
|
||||
{"path", "s+\xb3\x01"},
|
||||
{"path/filepath", "s+\x1aB+\r\b\x03\x04\x10"},
|
||||
{"plugin", "s"},
|
||||
{"reflect", "s'\x04\x1d\x13\b\x04\x05\x17\x06\t-\n\x03\x10\x02\x02"},
|
||||
{"reflect/internal/example1", ""},
|
||||
{"reflect/internal/example2", ""},
|
||||
{"regexp", "\x03\xf2\x018\t\x02\x01\x02\x10\x02"},
|
||||
{"regexp/syntax", "\xb6\x02\x01\x01\x01\x02\x10\x02"},
|
||||
{"runtime", "\x97\x01\x04\x01\x03\f\x06\a\x02\x01\x01\x0e\x03\x01\x01\x01\x02\x01\x01\x02\x01\x04\x01\x10c"},
|
||||
{"runtime/coverage", "\xa3\x01R"},
|
||||
{"runtime/debug", "tTY\r\b\x02\x01\x10\x06"},
|
||||
{"runtime/metrics", "\xba\x01G-!"},
|
||||
{"runtime/pprof", "\x02\x01\x01\x03\x06]\a\x03#$\x0f+\f \r\b\x01\x01\x01\x02\x02\t\x03\x06"},
|
||||
{"runtime/race", "\xb4\x02"},
|
||||
{"regexp", "\x03\xf6\x018\t\x02\x01\x02\x10\x02"},
|
||||
{"regexp/syntax", "\xba\x02\x01\x01\x01\x02\x10\x02"},
|
||||
{"runtime", "\x9a\x01\x04\x01\x03\f\x06\a\x02\x01\x01\x0e\x03\x01\x01\x01\x02\x01\x01\x01\x02\x01\x04\x01\x10\x18K"},
|
||||
{"runtime/coverage", "\xa6\x01S"},
|
||||
{"runtime/debug", "vUZ\r\b\x02\x01\x10\x06"},
|
||||
{"runtime/metrics", "\xbd\x01H-!"},
|
||||
{"runtime/pprof", "\x02\x01\x01\x03\x06_\a\x03$$\x0f\v!\f \r\b\x01\x01\x01\x02\x02\t\x03\x06"},
|
||||
{"runtime/race", "\xb8\x02"},
|
||||
{"runtime/race/internal/amd64v1", ""},
|
||||
{"runtime/trace", "\rd\x03x\t9\b\x05\x01\r\x06"},
|
||||
{"slices", "\x04\xf4\x01\fK"},
|
||||
{"sort", "\xcc\x0182"},
|
||||
{"strconv", "q*@\x01q"},
|
||||
{"strings", "q&\x04A\x19\x03\f7\x10\x02\x02"},
|
||||
{"runtime/trace", "\rf\x03z\t9\b\x05\x01\r\x06"},
|
||||
{"slices", "\x04\xf8\x01\fK"},
|
||||
{"sort", "\xcf\x0192"},
|
||||
{"strconv", "s+A\x01q"},
|
||||
{"strings", "s'\x04B\x19\x03\f7\x10\x02\x02"},
|
||||
{"structs", ""},
|
||||
{"sync", "\xcb\x01\x12\x01P\x0e\x13"},
|
||||
{"sync/atomic", "\xcf\x02"},
|
||||
{"syscall", "q'\x03\x01\x1c\n\x03\x06\f\x04S\b\x05\x01\x13"},
|
||||
{"testing", "\x03\nd\x02\x01W\x16\x14\f\x05\x1b\x06\x02\x05\x02\x05\x01\x02\x01\x02\x01\r\x02\x04"},
|
||||
{"testing/fstest", "q\x03\x81\x01\x01\n&\x10\x03\b\b"},
|
||||
{"testing/internal/testdeps", "\x02\v\xaa\x01.\x10,\x03\x05\x03\x06\a\x02\x0e"},
|
||||
{"testing/iotest", "\x03n\x03\x81\x01\x04"},
|
||||
{"testing/quick", "s\x01\x8d\x01\x05#\x10\x10"},
|
||||
{"testing/slogtest", "\rd\x03\x87\x01.\x05\x10\v"},
|
||||
{"testing/synctest", "\xde\x01`\x11"},
|
||||
{"text/scanner", "\x03q\x81\x01,*\x02"},
|
||||
{"text/tabwriter", "t\x81\x01X"},
|
||||
{"text/template", "q\x03B?\x01\n \x01\x05\x01\x02\x05\v\x02\r\x03\x02"},
|
||||
{"text/template/parse", "\x03q\xba\x01\n\x01\x12\x02"},
|
||||
{"time", "q*\x1e#(*\r\x02\x12"},
|
||||
{"time/tzdata", "q\xcc\x01\x12"},
|
||||
{"sync", "\xce\x01\x13\x01P\x0e\x13"},
|
||||
{"sync/atomic", "\xd3\x02"},
|
||||
{"syscall", "s(\x03\x01\x1c\n\x03\x06\r\x04S\b\x05\x01\x13"},
|
||||
{"testing", "\x03\nf\x02\x01X\x17\x14\f\x05\x1b\x06\x02\x05\x02\x05\x01\x02\x01\x02\x01\r\x02\x04"},
|
||||
{"testing/fstest", "s\x03\x83\x01\x01\n&\x10\x03\b\b"},
|
||||
{"testing/internal/testdeps", "\x02\v\xad\x01/\x10,\x03\x05\x03\x06\a\x02\x0e"},
|
||||
{"testing/iotest", "\x03p\x03\x83\x01\x04"},
|
||||
{"testing/quick", "u\x01\x8f\x01\x05#\x10\x10"},
|
||||
{"testing/slogtest", "\rf\x03\x89\x01.\x05\x10\v"},
|
||||
{"testing/synctest", "\xe2\x01`\x11"},
|
||||
{"text/scanner", "\x03s\x83\x01,*\x02"},
|
||||
{"text/tabwriter", "v\x83\x01X"},
|
||||
{"text/template", "s\x03C@\x01\n \x01\x05\x01\x02\x05\v\x02\r\x03\x02"},
|
||||
{"text/template/parse", "\x03s\xbc\x01\n\x01\x12\x02"},
|
||||
{"time", "s+\x1e$(*\r\x02\x12"},
|
||||
{"time/tzdata", "s\xce\x01\x12"},
|
||||
{"unicode", ""},
|
||||
{"unicode/utf16", ""},
|
||||
{"unicode/utf8", ""},
|
||||
{"unique", "\x97\x01!$\x01Q\r\x01\x13\x12"},
|
||||
{"unique", "\x9a\x01!%\x01Q\r\x01\x13\x12"},
|
||||
{"unsafe", ""},
|
||||
{"vendor/golang.org/x/crypto/chacha20", "\x10Z\a\x93\x01*&"},
|
||||
{"vendor/golang.org/x/crypto/chacha20poly1305", "\x10Z\a\xdf\x01\x04\x01\a"},
|
||||
{"vendor/golang.org/x/crypto/cryptobyte", "g\n\x03\x8e\x01' \n"},
|
||||
{"vendor/golang.org/x/crypto/chacha20", "\x10\\\a\x95\x01*&"},
|
||||
{"vendor/golang.org/x/crypto/chacha20poly1305", "\x10\\\a\xe1\x01\x04\x01\a"},
|
||||
{"vendor/golang.org/x/crypto/cryptobyte", "i\n\x03\x90\x01' \n"},
|
||||
{"vendor/golang.org/x/crypto/cryptobyte/asn1", ""},
|
||||
{"vendor/golang.org/x/crypto/internal/alias", "\xcf\x02"},
|
||||
{"vendor/golang.org/x/crypto/internal/poly1305", "U\x15\x9a\x01"},
|
||||
{"vendor/golang.org/x/net/dns/dnsmessage", "q"},
|
||||
{"vendor/golang.org/x/net/http/httpguts", "\x8b\x02\x14\x1a\x14\r"},
|
||||
{"vendor/golang.org/x/net/http/httpproxy", "q\x03\x97\x01\x10\x05\x01\x18\x14\r"},
|
||||
{"vendor/golang.org/x/net/http2/hpack", "\x03n\x03\x81\x01F"},
|
||||
{"vendor/golang.org/x/net/idna", "t\x8d\x018\x14\x10\x02\x01"},
|
||||
{"vendor/golang.org/x/net/nettest", "\x03g\a\x03\x81\x01\x11\x05\x16\x01\f\n\x01\x02\x02\x01\v"},
|
||||
{"vendor/golang.org/x/sys/cpu", "\xa1\x02\r\n\x01\x16"},
|
||||
{"vendor/golang.org/x/text/secure/bidirule", "q\xdc\x01\x11\x01"},
|
||||
{"vendor/golang.org/x/text/transform", "\x03n\x84\x01X"},
|
||||
{"vendor/golang.org/x/text/unicode/bidi", "\x03\bi\x85\x01>\x16"},
|
||||
{"vendor/golang.org/x/text/unicode/norm", "j\n\x81\x01F\x12\x11"},
|
||||
{"weak", "\x97\x01\x97\x01!"},
|
||||
{"vendor/golang.org/x/crypto/internal/alias", "\xd3\x02"},
|
||||
{"vendor/golang.org/x/crypto/internal/poly1305", "W\x15\x9c\x01"},
|
||||
{"vendor/golang.org/x/net/dns/dnsmessage", "s\xc7\x01"},
|
||||
{"vendor/golang.org/x/net/http/httpguts", "\x8f\x02\x14\x1a\x14\r"},
|
||||
{"vendor/golang.org/x/net/http/httpproxy", "s\x03\x99\x01\x10\x05\x01\x18\x14\r"},
|
||||
{"vendor/golang.org/x/net/http2/hpack", "\x03p\x03\x83\x01F"},
|
||||
{"vendor/golang.org/x/net/idna", "v\x8f\x018\x14\x10\x02\x01"},
|
||||
{"vendor/golang.org/x/net/nettest", "\x03i\a\x03\x83\x01\x11\x05\x16\x01\f\n\x01\x02\x02\x01\v"},
|
||||
{"vendor/golang.org/x/sys/cpu", "\xa5\x02\r\n\x01\x16"},
|
||||
{"vendor/golang.org/x/text/secure/bidirule", "s\xde\x01\x11\x01"},
|
||||
{"vendor/golang.org/x/text/transform", "\x03p\x86\x01X"},
|
||||
{"vendor/golang.org/x/text/unicode/bidi", "\x03\bk\x87\x01>\x16"},
|
||||
{"vendor/golang.org/x/text/unicode/norm", "l\n\x83\x01F\x12\x11"},
|
||||
{"weak", "\x9a\x01\x98\x01!"},
|
||||
}
|
||||
|
||||
// bootstrap is the list of bootstrap packages extracted from cmd/dist.
|
||||
|
|
@ -385,6 +389,7 @@ var bootstrap = map[string]bool{
|
|||
"cmd/compile/internal/arm64": true,
|
||||
"cmd/compile/internal/base": true,
|
||||
"cmd/compile/internal/bitvec": true,
|
||||
"cmd/compile/internal/bloop": true,
|
||||
"cmd/compile/internal/compare": true,
|
||||
"cmd/compile/internal/coverage": true,
|
||||
"cmd/compile/internal/deadlocals": true,
|
||||
|
|
@ -413,6 +418,7 @@ var bootstrap = map[string]bool{
|
|||
"cmd/compile/internal/riscv64": true,
|
||||
"cmd/compile/internal/rttype": true,
|
||||
"cmd/compile/internal/s390x": true,
|
||||
"cmd/compile/internal/slice": true,
|
||||
"cmd/compile/internal/ssa": true,
|
||||
"cmd/compile/internal/ssagen": true,
|
||||
"cmd/compile/internal/staticdata": true,
|
||||
|
|
|
|||
549
src/cmd/vendor/golang.org/x/tools/internal/stdlib/manifest.go
generated
vendored
549
src/cmd/vendor/golang.org/x/tools/internal/stdlib/manifest.go
generated
vendored
File diff suppressed because it is too large
Load diff
2
src/cmd/vendor/golang.org/x/tools/internal/stdlib/stdlib.go
generated
vendored
2
src/cmd/vendor/golang.org/x/tools/internal/stdlib/stdlib.go
generated
vendored
|
|
@ -39,7 +39,7 @@ const (
|
|||
Var // "EOF"
|
||||
Const // "Pi"
|
||||
Field // "Point.X"
|
||||
Method // "(*Buffer).Grow"
|
||||
Method // "(*Buffer).Grow" or "(Reader).Read"
|
||||
)
|
||||
|
||||
func (kind Kind) String() string {
|
||||
|
|
|
|||
2
src/cmd/vendor/modules.txt
vendored
2
src/cmd/vendor/modules.txt
vendored
|
|
@ -73,7 +73,7 @@ golang.org/x/text/internal/tag
|
|||
golang.org/x/text/language
|
||||
golang.org/x/text/transform
|
||||
golang.org/x/text/unicode/norm
|
||||
# golang.org/x/tools v0.39.1-0.20251120214200-68724afed209
|
||||
# golang.org/x/tools v0.39.1-0.20251130212600-1ad6f3d02713
|
||||
## explicit; go 1.24.0
|
||||
golang.org/x/tools/cmd/bisect
|
||||
golang.org/x/tools/cover
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue