mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
cmd/compile: add up-to-date test for generated files
This runs the ssa/_gen generator writing files into a temporary directory, and then checks that there are no differences with what is currently in the ssa directory, and also checks that any file with the "generated from _gen/..." header was actually generated, and checks that the headers on the generated file match the expected header prefix. Change-Id: Ic8eeb0b06cf6f2e576a013e865b331a12d3a77aa Reviewed-on: https://go-review.googlesource.com/c/go/+/680615 LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com> Reviewed-by: Keith Randall <khr@golang.org> Reviewed-by: Keith Randall <khr@google.com>
This commit is contained in:
parent
7fa2c736b3
commit
d4c6effaa7
12 changed files with 1848 additions and 3 deletions
|
|
@ -9,3 +9,8 @@ more information.
|
||||||
|
|
||||||
To regenerate everything, run "go generate" on the ssa package
|
To regenerate everything, run "go generate" on the ssa package
|
||||||
in the parent directory.
|
in the parent directory.
|
||||||
|
|
||||||
|
The parent directory contains a test in generate_test.go that will fail
|
||||||
|
if the generated files are not up-to-date, and to allow that test to
|
||||||
|
run in no-network environments, golang.org/x/tools/go/ast/astutil is
|
||||||
|
vendored.
|
||||||
|
|
|
||||||
|
|
@ -155,7 +155,7 @@ func genAllocators() {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := os.WriteFile("../allocators.go", b, 0666); err != nil {
|
if err := os.WriteFile(outFile("allocators.go"), b, 0666); err != nil {
|
||||||
log.Fatalf("can't write output: %v\n", err)
|
log.Fatalf("can't write output: %v\n", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -113,6 +113,7 @@ var archs []arch
|
||||||
var cpuprofile = flag.String("cpuprofile", "", "write cpu profile to `file`")
|
var cpuprofile = flag.String("cpuprofile", "", "write cpu profile to `file`")
|
||||||
var memprofile = flag.String("memprofile", "", "write memory profile to `file`")
|
var memprofile = flag.String("memprofile", "", "write memory profile to `file`")
|
||||||
var tracefile = flag.String("trace", "", "write trace to `file`")
|
var tracefile = flag.String("trace", "", "write trace to `file`")
|
||||||
|
var outDir = flag.String("outdir", "..", "directory in which to write generated files")
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
|
|
@ -144,6 +145,13 @@ func main() {
|
||||||
defer trace.Stop()
|
defer trace.Stop()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if *outDir != ".." {
|
||||||
|
err := os.MkdirAll(*outDir, 0755)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to create output directory: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
slices.SortFunc(archs, func(a, b arch) int {
|
slices.SortFunc(archs, func(a, b arch) int {
|
||||||
return strings.Compare(a.name, b.name)
|
return strings.Compare(a.name, b.name)
|
||||||
})
|
})
|
||||||
|
|
@ -193,6 +201,10 @@ func main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func outFile(file string) string {
|
||||||
|
return *outDir + "/" + file
|
||||||
|
}
|
||||||
|
|
||||||
func genOp() {
|
func genOp() {
|
||||||
w := new(bytes.Buffer)
|
w := new(bytes.Buffer)
|
||||||
fmt.Fprintf(w, "// Code generated from _gen/*Ops.go using 'go generate'; DO NOT EDIT.\n")
|
fmt.Fprintf(w, "// Code generated from _gen/*Ops.go using 'go generate'; DO NOT EDIT.\n")
|
||||||
|
|
@ -500,7 +512,7 @@ func genOp() {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := os.WriteFile("../opGen.go", b, 0666); err != nil {
|
if err := os.WriteFile(outFile("opGen.go"), b, 0666); err != nil {
|
||||||
log.Fatalf("can't write output: %v\n", err)
|
log.Fatalf("can't write output: %v\n", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -322,7 +322,7 @@ func genRulesSuffix(arch arch, suff string) {
|
||||||
file = astutil.Apply(file, pre, post).(*ast.File)
|
file = astutil.Apply(file, pre, post).(*ast.File)
|
||||||
|
|
||||||
// Write the well-formatted source to file
|
// Write the well-formatted source to file
|
||||||
f, err := os.Create("../rewrite" + arch.name + suff + ".go")
|
f, err := os.Create(outFile("rewrite" + arch.name + suff + ".go"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("can't write output: %v", err)
|
log.Fatalf("can't write output: %v", err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
27
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/LICENSE
generated
vendored
Normal file
27
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/LICENSE
generated
vendored
Normal file
|
|
@ -0,0 +1,27 @@
|
||||||
|
Copyright 2009 The Go Authors.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google LLC nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
22
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/PATENTS
generated
vendored
Normal file
22
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/PATENTS
generated
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
Additional IP Rights Grant (Patents)
|
||||||
|
|
||||||
|
"This implementation" means the copyrightable works distributed by
|
||||||
|
Google as part of the Go project.
|
||||||
|
|
||||||
|
Google hereby grants to You a perpetual, worldwide, non-exclusive,
|
||||||
|
no-charge, royalty-free, irrevocable (except as stated in this section)
|
||||||
|
patent license to make, have made, use, offer to sell, sell, import,
|
||||||
|
transfer and otherwise run, modify and propagate the contents of this
|
||||||
|
implementation of Go, where such license applies only to those patent
|
||||||
|
claims, both currently owned or controlled by Google and acquired in
|
||||||
|
the future, licensable by Google that are necessarily infringed by this
|
||||||
|
implementation of Go. This grant does not include claims that would be
|
||||||
|
infringed only as a consequence of further modification of this
|
||||||
|
implementation. If you or your agent or exclusive licensee institute or
|
||||||
|
order or agree to the institution of patent litigation against any
|
||||||
|
entity (including a cross-claim or counterclaim in a lawsuit) alleging
|
||||||
|
that this implementation of Go or any code incorporated within this
|
||||||
|
implementation of Go constitutes direct or contributory patent
|
||||||
|
infringement, or inducement of patent infringement, then any patent
|
||||||
|
rights granted to you under this License for this implementation of Go
|
||||||
|
shall terminate as of the date such litigation is filed.
|
||||||
654
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
generated
vendored
Normal file
654
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
generated
vendored
Normal file
|
|
@ -0,0 +1,654 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package astutil
|
||||||
|
|
||||||
|
// This file defines utilities for working with source positions.
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PathEnclosingInterval returns the node that encloses the source
|
||||||
|
// interval [start, end), and all its ancestors up to the AST root.
|
||||||
|
//
|
||||||
|
// The definition of "enclosing" used by this function considers
|
||||||
|
// additional whitespace abutting a node to be enclosed by it.
|
||||||
|
// In this example:
|
||||||
|
//
|
||||||
|
// z := x + y // add them
|
||||||
|
// <-A->
|
||||||
|
// <----B----->
|
||||||
|
//
|
||||||
|
// the ast.BinaryExpr(+) node is considered to enclose interval B
|
||||||
|
// even though its [Pos()..End()) is actually only interval A.
|
||||||
|
// This behaviour makes user interfaces more tolerant of imperfect
|
||||||
|
// input.
|
||||||
|
//
|
||||||
|
// This function treats tokens as nodes, though they are not included
|
||||||
|
// in the result. e.g. PathEnclosingInterval("+") returns the
|
||||||
|
// enclosing ast.BinaryExpr("x + y").
|
||||||
|
//
|
||||||
|
// If start==end, the 1-char interval following start is used instead.
|
||||||
|
//
|
||||||
|
// The 'exact' result is true if the interval contains only path[0]
|
||||||
|
// and perhaps some adjacent whitespace. It is false if the interval
|
||||||
|
// overlaps multiple children of path[0], or if it contains only
|
||||||
|
// interior whitespace of path[0].
|
||||||
|
// In this example:
|
||||||
|
//
|
||||||
|
// z := x + y // add them
|
||||||
|
// <--C--> <---E-->
|
||||||
|
// ^
|
||||||
|
// D
|
||||||
|
//
|
||||||
|
// intervals C, D and E are inexact. C is contained by the
|
||||||
|
// z-assignment statement, because it spans three of its children (:=,
|
||||||
|
// x, +). So too is the 1-char interval D, because it contains only
|
||||||
|
// interior whitespace of the assignment. E is considered interior
|
||||||
|
// whitespace of the BlockStmt containing the assignment.
|
||||||
|
//
|
||||||
|
// The resulting path is never empty; it always contains at least the
|
||||||
|
// 'root' *ast.File. Ideally PathEnclosingInterval would reject
|
||||||
|
// intervals that lie wholly or partially outside the range of the
|
||||||
|
// file, but unfortunately ast.File records only the token.Pos of
|
||||||
|
// the 'package' keyword, but not of the start of the file itself.
|
||||||
|
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
|
||||||
|
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
|
||||||
|
|
||||||
|
// Precondition: node.[Pos..End) and adjoining whitespace contain [start, end).
|
||||||
|
var visit func(node ast.Node) bool
|
||||||
|
visit = func(node ast.Node) bool {
|
||||||
|
path = append(path, node)
|
||||||
|
|
||||||
|
nodePos := node.Pos()
|
||||||
|
nodeEnd := node.End()
|
||||||
|
|
||||||
|
// fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging
|
||||||
|
|
||||||
|
// Intersect [start, end) with interval of node.
|
||||||
|
if start < nodePos {
|
||||||
|
start = nodePos
|
||||||
|
}
|
||||||
|
if end > nodeEnd {
|
||||||
|
end = nodeEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find sole child that contains [start, end).
|
||||||
|
children := childrenOf(node)
|
||||||
|
l := len(children)
|
||||||
|
for i, child := range children {
|
||||||
|
// [childPos, childEnd) is unaugmented interval of child.
|
||||||
|
childPos := child.Pos()
|
||||||
|
childEnd := child.End()
|
||||||
|
|
||||||
|
// [augPos, augEnd) is whitespace-augmented interval of child.
|
||||||
|
augPos := childPos
|
||||||
|
augEnd := childEnd
|
||||||
|
if i > 0 {
|
||||||
|
augPos = children[i-1].End() // start of preceding whitespace
|
||||||
|
}
|
||||||
|
if i < l-1 {
|
||||||
|
nextChildPos := children[i+1].Pos()
|
||||||
|
// Does [start, end) lie between child and next child?
|
||||||
|
if start >= augEnd && end <= nextChildPos {
|
||||||
|
return false // inexact match
|
||||||
|
}
|
||||||
|
augEnd = nextChildPos // end of following whitespace
|
||||||
|
}
|
||||||
|
|
||||||
|
// fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n",
|
||||||
|
// i, augPos, augEnd, start, end) // debugging
|
||||||
|
|
||||||
|
// Does augmented child strictly contain [start, end)?
|
||||||
|
if augPos <= start && end <= augEnd {
|
||||||
|
if is[tokenNode](child) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// childrenOf elides the FuncType node beneath FuncDecl.
|
||||||
|
// Add it back here for TypeParams, Params, Results,
|
||||||
|
// all FieldLists). But we don't add it back for the "func" token
|
||||||
|
// even though it is is the tree at FuncDecl.Type.Func.
|
||||||
|
if decl, ok := node.(*ast.FuncDecl); ok {
|
||||||
|
if fields, ok := child.(*ast.FieldList); ok && fields != decl.Recv {
|
||||||
|
path = append(path, decl.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return visit(child)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Does [start, end) overlap multiple children?
|
||||||
|
// i.e. left-augmented child contains start
|
||||||
|
// but LR-augmented child does not contain end.
|
||||||
|
if start < childEnd && end > augEnd {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// No single child contained [start, end),
|
||||||
|
// so node is the result. Is it exact?
|
||||||
|
|
||||||
|
// (It's tempting to put this condition before the
|
||||||
|
// child loop, but it gives the wrong result in the
|
||||||
|
// case where a node (e.g. ExprStmt) and its sole
|
||||||
|
// child have equal intervals.)
|
||||||
|
if start == nodePos && end == nodeEnd {
|
||||||
|
return true // exact match
|
||||||
|
}
|
||||||
|
|
||||||
|
return false // inexact: overlaps multiple children
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure [start,end) is nondecreasing.
|
||||||
|
if start > end {
|
||||||
|
start, end = end, start
|
||||||
|
}
|
||||||
|
|
||||||
|
if start < root.End() && end > root.Pos() {
|
||||||
|
if start == end {
|
||||||
|
end = start + 1 // empty interval => interval of size 1
|
||||||
|
}
|
||||||
|
exact = visit(root)
|
||||||
|
|
||||||
|
// Reverse the path:
|
||||||
|
for i, l := 0, len(path); i < l/2; i++ {
|
||||||
|
path[i], path[l-1-i] = path[l-1-i], path[i]
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Selection lies within whitespace preceding the
|
||||||
|
// first (or following the last) declaration in the file.
|
||||||
|
// The result nonetheless always includes the ast.File.
|
||||||
|
path = append(path, root)
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// tokenNode is a dummy implementation of ast.Node for a single token.
|
||||||
|
// They are used transiently by PathEnclosingInterval but never escape
|
||||||
|
// this package.
|
||||||
|
type tokenNode struct {
|
||||||
|
pos token.Pos
|
||||||
|
end token.Pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n tokenNode) Pos() token.Pos {
|
||||||
|
return n.pos
|
||||||
|
}
|
||||||
|
|
||||||
|
func (n tokenNode) End() token.Pos {
|
||||||
|
return n.end
|
||||||
|
}
|
||||||
|
|
||||||
|
func tok(pos token.Pos, len int) ast.Node {
|
||||||
|
return tokenNode{pos, pos + token.Pos(len)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// childrenOf returns the direct non-nil children of ast.Node n.
|
||||||
|
// It may include fake ast.Node implementations for bare tokens.
|
||||||
|
// it is not safe to call (e.g.) ast.Walk on such nodes.
|
||||||
|
func childrenOf(n ast.Node) []ast.Node {
|
||||||
|
var children []ast.Node
|
||||||
|
|
||||||
|
// First add nodes for all true subtrees.
|
||||||
|
ast.Inspect(n, func(node ast.Node) bool {
|
||||||
|
if node == n { // push n
|
||||||
|
return true // recur
|
||||||
|
}
|
||||||
|
if node != nil { // push child
|
||||||
|
children = append(children, node)
|
||||||
|
}
|
||||||
|
return false // no recursion
|
||||||
|
})
|
||||||
|
|
||||||
|
// Then add fake Nodes for bare tokens.
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.ArrayType:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrack, len("[")),
|
||||||
|
tok(n.Elt.End(), len("]")))
|
||||||
|
|
||||||
|
case *ast.AssignStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
|
||||||
|
case *ast.BasicLit:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.ValuePos, len(n.Value)))
|
||||||
|
|
||||||
|
case *ast.BinaryExpr:
|
||||||
|
children = append(children, tok(n.OpPos, len(n.Op.String())))
|
||||||
|
|
||||||
|
case *ast.BlockStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrace, len("{")),
|
||||||
|
tok(n.Rbrace, len("}")))
|
||||||
|
|
||||||
|
case *ast.BranchStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
|
||||||
|
case *ast.CallExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lparen, len("(")),
|
||||||
|
tok(n.Rparen, len(")")))
|
||||||
|
if n.Ellipsis != 0 {
|
||||||
|
children = append(children, tok(n.Ellipsis, len("...")))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.CaseClause:
|
||||||
|
if n.List == nil {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Case, len("default")))
|
||||||
|
} else {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Case, len("case")))
|
||||||
|
}
|
||||||
|
children = append(children, tok(n.Colon, len(":")))
|
||||||
|
|
||||||
|
case *ast.ChanType:
|
||||||
|
switch n.Dir {
|
||||||
|
case ast.RECV:
|
||||||
|
children = append(children, tok(n.Begin, len("<-chan")))
|
||||||
|
case ast.SEND:
|
||||||
|
children = append(children, tok(n.Begin, len("chan<-")))
|
||||||
|
case ast.RECV | ast.SEND:
|
||||||
|
children = append(children, tok(n.Begin, len("chan")))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.CommClause:
|
||||||
|
if n.Comm == nil {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Case, len("default")))
|
||||||
|
} else {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Case, len("case")))
|
||||||
|
}
|
||||||
|
children = append(children, tok(n.Colon, len(":")))
|
||||||
|
|
||||||
|
case *ast.Comment:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.CommentGroup:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.CompositeLit:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrace, len("{")),
|
||||||
|
tok(n.Rbrace, len("{")))
|
||||||
|
|
||||||
|
case *ast.DeclStmt:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.DeferStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Defer, len("defer")))
|
||||||
|
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Ellipsis, len("...")))
|
||||||
|
|
||||||
|
case *ast.EmptyStmt:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.ExprStmt:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
// TODO(adonovan): Field.{Doc,Comment,Tag}?
|
||||||
|
|
||||||
|
case *ast.FieldList:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Opening, len("(")), // or len("[")
|
||||||
|
tok(n.Closing, len(")"))) // or len("]")
|
||||||
|
|
||||||
|
case *ast.File:
|
||||||
|
// TODO test: Doc
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Package, len("package")))
|
||||||
|
|
||||||
|
case *ast.ForStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.For, len("for")))
|
||||||
|
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
// TODO(adonovan): FuncDecl.Comment?
|
||||||
|
|
||||||
|
// Uniquely, FuncDecl breaks the invariant that
|
||||||
|
// preorder traversal yields tokens in lexical order:
|
||||||
|
// in fact, FuncDecl.Recv precedes FuncDecl.Type.Func.
|
||||||
|
//
|
||||||
|
// As a workaround, we inline the case for FuncType
|
||||||
|
// here and order things correctly.
|
||||||
|
// We also need to insert the elided FuncType just
|
||||||
|
// before the 'visit' recursion.
|
||||||
|
//
|
||||||
|
children = nil // discard ast.Walk(FuncDecl) info subtrees
|
||||||
|
children = append(children, tok(n.Type.Func, len("func")))
|
||||||
|
if n.Recv != nil {
|
||||||
|
children = append(children, n.Recv)
|
||||||
|
}
|
||||||
|
children = append(children, n.Name)
|
||||||
|
if tparams := n.Type.TypeParams; tparams != nil {
|
||||||
|
children = append(children, tparams)
|
||||||
|
}
|
||||||
|
if n.Type.Params != nil {
|
||||||
|
children = append(children, n.Type.Params)
|
||||||
|
}
|
||||||
|
if n.Type.Results != nil {
|
||||||
|
children = append(children, n.Type.Results)
|
||||||
|
}
|
||||||
|
if n.Body != nil {
|
||||||
|
children = append(children, n.Body)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.FuncLit:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.FuncType:
|
||||||
|
if n.Func != 0 {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Func, len("func")))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.GenDecl:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
if n.Lparen != 0 {
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lparen, len("(")),
|
||||||
|
tok(n.Rparen, len(")")))
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.GoStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Go, len("go")))
|
||||||
|
|
||||||
|
case *ast.Ident:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.NamePos, len(n.Name)))
|
||||||
|
|
||||||
|
case *ast.IfStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.If, len("if")))
|
||||||
|
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
// TODO(adonovan): ImportSpec.{Doc,EndPos}?
|
||||||
|
|
||||||
|
case *ast.IncDecStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
|
||||||
|
case *ast.IndexExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrack, len("[")),
|
||||||
|
tok(n.Rbrack, len("]")))
|
||||||
|
|
||||||
|
case *ast.IndexListExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrack, len("[")),
|
||||||
|
tok(n.Rbrack, len("]")))
|
||||||
|
|
||||||
|
case *ast.InterfaceType:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Interface, len("interface")))
|
||||||
|
|
||||||
|
case *ast.KeyValueExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Colon, len(":")))
|
||||||
|
|
||||||
|
case *ast.LabeledStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Colon, len(":")))
|
||||||
|
|
||||||
|
case *ast.MapType:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Map, len("map")))
|
||||||
|
|
||||||
|
case *ast.ParenExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lparen, len("(")),
|
||||||
|
tok(n.Rparen, len(")")))
|
||||||
|
|
||||||
|
case *ast.RangeStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.For, len("for")),
|
||||||
|
tok(n.TokPos, len(n.Tok.String())))
|
||||||
|
|
||||||
|
case *ast.ReturnStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Return, len("return")))
|
||||||
|
|
||||||
|
case *ast.SelectStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Select, len("select")))
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
// nop
|
||||||
|
|
||||||
|
case *ast.SendStmt:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Arrow, len("<-")))
|
||||||
|
|
||||||
|
case *ast.SliceExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lbrack, len("[")),
|
||||||
|
tok(n.Rbrack, len("]")))
|
||||||
|
|
||||||
|
case *ast.StarExpr:
|
||||||
|
children = append(children, tok(n.Star, len("*")))
|
||||||
|
|
||||||
|
case *ast.StructType:
|
||||||
|
children = append(children, tok(n.Struct, len("struct")))
|
||||||
|
|
||||||
|
case *ast.SwitchStmt:
|
||||||
|
children = append(children, tok(n.Switch, len("switch")))
|
||||||
|
|
||||||
|
case *ast.TypeAssertExpr:
|
||||||
|
children = append(children,
|
||||||
|
tok(n.Lparen-1, len(".")),
|
||||||
|
tok(n.Lparen, len("(")),
|
||||||
|
tok(n.Rparen, len(")")))
|
||||||
|
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
// TODO(adonovan): TypeSpec.{Doc,Comment}?
|
||||||
|
|
||||||
|
case *ast.TypeSwitchStmt:
|
||||||
|
children = append(children, tok(n.Switch, len("switch")))
|
||||||
|
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
children = append(children, tok(n.OpPos, len(n.Op.String())))
|
||||||
|
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
// TODO(adonovan): ValueSpec.{Doc,Comment}?
|
||||||
|
|
||||||
|
case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt:
|
||||||
|
// nop
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(adonovan): opt: merge the logic of ast.Inspect() into
|
||||||
|
// the switch above so we can make interleaved callbacks for
|
||||||
|
// both Nodes and Tokens in the right order and avoid the need
|
||||||
|
// to sort.
|
||||||
|
sort.Sort(byPos(children))
|
||||||
|
|
||||||
|
return children
|
||||||
|
}
|
||||||
|
|
||||||
|
type byPos []ast.Node
|
||||||
|
|
||||||
|
func (sl byPos) Len() int {
|
||||||
|
return len(sl)
|
||||||
|
}
|
||||||
|
func (sl byPos) Less(i, j int) bool {
|
||||||
|
return sl[i].Pos() < sl[j].Pos()
|
||||||
|
}
|
||||||
|
func (sl byPos) Swap(i, j int) {
|
||||||
|
sl[i], sl[j] = sl[j], sl[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
// NodeDescription returns a description of the concrete type of n suitable
|
||||||
|
// for a user interface.
|
||||||
|
//
|
||||||
|
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
|
||||||
|
// StarExpr) we could be much more specific given the path to the AST
|
||||||
|
// root. Perhaps we should do that.
|
||||||
|
func NodeDescription(n ast.Node) string {
|
||||||
|
switch n := n.(type) {
|
||||||
|
case *ast.ArrayType:
|
||||||
|
return "array type"
|
||||||
|
case *ast.AssignStmt:
|
||||||
|
return "assignment"
|
||||||
|
case *ast.BadDecl:
|
||||||
|
return "bad declaration"
|
||||||
|
case *ast.BadExpr:
|
||||||
|
return "bad expression"
|
||||||
|
case *ast.BadStmt:
|
||||||
|
return "bad statement"
|
||||||
|
case *ast.BasicLit:
|
||||||
|
return "basic literal"
|
||||||
|
case *ast.BinaryExpr:
|
||||||
|
return fmt.Sprintf("binary %s operation", n.Op)
|
||||||
|
case *ast.BlockStmt:
|
||||||
|
return "block"
|
||||||
|
case *ast.BranchStmt:
|
||||||
|
switch n.Tok {
|
||||||
|
case token.BREAK:
|
||||||
|
return "break statement"
|
||||||
|
case token.CONTINUE:
|
||||||
|
return "continue statement"
|
||||||
|
case token.GOTO:
|
||||||
|
return "goto statement"
|
||||||
|
case token.FALLTHROUGH:
|
||||||
|
return "fall-through statement"
|
||||||
|
}
|
||||||
|
case *ast.CallExpr:
|
||||||
|
if len(n.Args) == 1 && !n.Ellipsis.IsValid() {
|
||||||
|
return "function call (or conversion)"
|
||||||
|
}
|
||||||
|
return "function call"
|
||||||
|
case *ast.CaseClause:
|
||||||
|
return "case clause"
|
||||||
|
case *ast.ChanType:
|
||||||
|
return "channel type"
|
||||||
|
case *ast.CommClause:
|
||||||
|
return "communication clause"
|
||||||
|
case *ast.Comment:
|
||||||
|
return "comment"
|
||||||
|
case *ast.CommentGroup:
|
||||||
|
return "comment group"
|
||||||
|
case *ast.CompositeLit:
|
||||||
|
return "composite literal"
|
||||||
|
case *ast.DeclStmt:
|
||||||
|
return NodeDescription(n.Decl) + " statement"
|
||||||
|
case *ast.DeferStmt:
|
||||||
|
return "defer statement"
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
return "ellipsis"
|
||||||
|
case *ast.EmptyStmt:
|
||||||
|
return "empty statement"
|
||||||
|
case *ast.ExprStmt:
|
||||||
|
return "expression statement"
|
||||||
|
case *ast.Field:
|
||||||
|
// Can be any of these:
|
||||||
|
// struct {x, y int} -- struct field(s)
|
||||||
|
// struct {T} -- anon struct field
|
||||||
|
// interface {I} -- interface embedding
|
||||||
|
// interface {f()} -- interface method
|
||||||
|
// func (A) func(B) C -- receiver, param(s), result(s)
|
||||||
|
return "field/method/parameter"
|
||||||
|
case *ast.FieldList:
|
||||||
|
return "field/method/parameter list"
|
||||||
|
case *ast.File:
|
||||||
|
return "source file"
|
||||||
|
case *ast.ForStmt:
|
||||||
|
return "for loop"
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
return "function declaration"
|
||||||
|
case *ast.FuncLit:
|
||||||
|
return "function literal"
|
||||||
|
case *ast.FuncType:
|
||||||
|
return "function type"
|
||||||
|
case *ast.GenDecl:
|
||||||
|
switch n.Tok {
|
||||||
|
case token.IMPORT:
|
||||||
|
return "import declaration"
|
||||||
|
case token.CONST:
|
||||||
|
return "constant declaration"
|
||||||
|
case token.TYPE:
|
||||||
|
return "type declaration"
|
||||||
|
case token.VAR:
|
||||||
|
return "variable declaration"
|
||||||
|
}
|
||||||
|
case *ast.GoStmt:
|
||||||
|
return "go statement"
|
||||||
|
case *ast.Ident:
|
||||||
|
return "identifier"
|
||||||
|
case *ast.IfStmt:
|
||||||
|
return "if statement"
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
return "import specification"
|
||||||
|
case *ast.IncDecStmt:
|
||||||
|
if n.Tok == token.INC {
|
||||||
|
return "increment statement"
|
||||||
|
}
|
||||||
|
return "decrement statement"
|
||||||
|
case *ast.IndexExpr:
|
||||||
|
return "index expression"
|
||||||
|
case *ast.IndexListExpr:
|
||||||
|
return "index list expression"
|
||||||
|
case *ast.InterfaceType:
|
||||||
|
return "interface type"
|
||||||
|
case *ast.KeyValueExpr:
|
||||||
|
return "key/value association"
|
||||||
|
case *ast.LabeledStmt:
|
||||||
|
return "statement label"
|
||||||
|
case *ast.MapType:
|
||||||
|
return "map type"
|
||||||
|
case *ast.Package:
|
||||||
|
return "package"
|
||||||
|
case *ast.ParenExpr:
|
||||||
|
return "parenthesized " + NodeDescription(n.X)
|
||||||
|
case *ast.RangeStmt:
|
||||||
|
return "range loop"
|
||||||
|
case *ast.ReturnStmt:
|
||||||
|
return "return statement"
|
||||||
|
case *ast.SelectStmt:
|
||||||
|
return "select statement"
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
return "selector"
|
||||||
|
case *ast.SendStmt:
|
||||||
|
return "channel send"
|
||||||
|
case *ast.SliceExpr:
|
||||||
|
return "slice expression"
|
||||||
|
case *ast.StarExpr:
|
||||||
|
return "*-operation" // load/store expr or pointer type
|
||||||
|
case *ast.StructType:
|
||||||
|
return "struct type"
|
||||||
|
case *ast.SwitchStmt:
|
||||||
|
return "switch statement"
|
||||||
|
case *ast.TypeAssertExpr:
|
||||||
|
return "type assertion"
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
return "type specification"
|
||||||
|
case *ast.TypeSwitchStmt:
|
||||||
|
return "type switch"
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
return fmt.Sprintf("unary %s operation", n.Op)
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
return "value specification"
|
||||||
|
|
||||||
|
}
|
||||||
|
panic(fmt.Sprintf("unexpected node type: %T", n))
|
||||||
|
}
|
||||||
|
|
||||||
|
func is[T any](x any) bool {
|
||||||
|
_, ok := x.(T)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
490
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/go/ast/astutil/imports.go
generated
vendored
Normal file
490
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/go/ast/astutil/imports.go
generated
vendored
Normal file
|
|
@ -0,0 +1,490 @@
|
||||||
|
// Copyright 2013 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package astutil contains common utilities for working with the Go AST.
|
||||||
|
package astutil // import "golang.org/x/tools/go/ast/astutil"
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/token"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AddImport adds the import path to the file f, if absent.
|
||||||
|
func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
|
||||||
|
return AddNamedImport(fset, f, "", path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddNamedImport adds the import with the given name and path to the file f, if absent.
|
||||||
|
// If name is not empty, it is used to rename the import.
|
||||||
|
//
|
||||||
|
// For example, calling
|
||||||
|
//
|
||||||
|
// AddNamedImport(fset, f, "pathpkg", "path")
|
||||||
|
//
|
||||||
|
// adds
|
||||||
|
//
|
||||||
|
// import pathpkg "path"
|
||||||
|
func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
|
||||||
|
if imports(f, name, path) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
newImport := &ast.ImportSpec{
|
||||||
|
Path: &ast.BasicLit{
|
||||||
|
Kind: token.STRING,
|
||||||
|
Value: strconv.Quote(path),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if name != "" {
|
||||||
|
newImport.Name = &ast.Ident{Name: name}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find an import decl to add to.
|
||||||
|
// The goal is to find an existing import
|
||||||
|
// whose import path has the longest shared
|
||||||
|
// prefix with path.
|
||||||
|
var (
|
||||||
|
bestMatch = -1 // length of longest shared prefix
|
||||||
|
lastImport = -1 // index in f.Decls of the file's final import decl
|
||||||
|
impDecl *ast.GenDecl // import decl containing the best match
|
||||||
|
impIndex = -1 // spec index in impDecl containing the best match
|
||||||
|
|
||||||
|
isThirdPartyPath = isThirdParty(path)
|
||||||
|
)
|
||||||
|
for i, decl := range f.Decls {
|
||||||
|
gen, ok := decl.(*ast.GenDecl)
|
||||||
|
if ok && gen.Tok == token.IMPORT {
|
||||||
|
lastImport = i
|
||||||
|
// Do not add to import "C", to avoid disrupting the
|
||||||
|
// association with its doc comment, breaking cgo.
|
||||||
|
if declImports(gen, "C") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match an empty import decl if that's all that is available.
|
||||||
|
if len(gen.Specs) == 0 && bestMatch == -1 {
|
||||||
|
impDecl = gen
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compute longest shared prefix with imports in this group and find best
|
||||||
|
// matched import spec.
|
||||||
|
// 1. Always prefer import spec with longest shared prefix.
|
||||||
|
// 2. While match length is 0,
|
||||||
|
// - for stdlib package: prefer first import spec.
|
||||||
|
// - for third party package: prefer first third party import spec.
|
||||||
|
// We cannot use last import spec as best match for third party package
|
||||||
|
// because grouped imports are usually placed last by goimports -local
|
||||||
|
// flag.
|
||||||
|
// See issue #19190.
|
||||||
|
seenAnyThirdParty := false
|
||||||
|
for j, spec := range gen.Specs {
|
||||||
|
impspec := spec.(*ast.ImportSpec)
|
||||||
|
p := importPath(impspec)
|
||||||
|
n := matchLen(p, path)
|
||||||
|
if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
|
||||||
|
bestMatch = n
|
||||||
|
impDecl = gen
|
||||||
|
impIndex = j
|
||||||
|
}
|
||||||
|
seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no import decl found, add one after the last import.
|
||||||
|
if impDecl == nil {
|
||||||
|
impDecl = &ast.GenDecl{
|
||||||
|
Tok: token.IMPORT,
|
||||||
|
}
|
||||||
|
if lastImport >= 0 {
|
||||||
|
impDecl.TokPos = f.Decls[lastImport].End()
|
||||||
|
} else {
|
||||||
|
// There are no existing imports.
|
||||||
|
// Our new import, preceded by a blank line, goes after the package declaration
|
||||||
|
// and after the comment, if any, that starts on the same line as the
|
||||||
|
// package declaration.
|
||||||
|
impDecl.TokPos = f.Package
|
||||||
|
|
||||||
|
file := fset.File(f.Package)
|
||||||
|
pkgLine := file.Line(f.Package)
|
||||||
|
for _, c := range f.Comments {
|
||||||
|
if file.Line(c.Pos()) > pkgLine {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
// +2 for a blank line
|
||||||
|
impDecl.TokPos = c.End() + 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.Decls = append(f.Decls, nil)
|
||||||
|
copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
|
||||||
|
f.Decls[lastImport+1] = impDecl
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert new import at insertAt.
|
||||||
|
insertAt := 0
|
||||||
|
if impIndex >= 0 {
|
||||||
|
// insert after the found import
|
||||||
|
insertAt = impIndex + 1
|
||||||
|
}
|
||||||
|
impDecl.Specs = append(impDecl.Specs, nil)
|
||||||
|
copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
|
||||||
|
impDecl.Specs[insertAt] = newImport
|
||||||
|
pos := impDecl.Pos()
|
||||||
|
if insertAt > 0 {
|
||||||
|
// If there is a comment after an existing import, preserve the comment
|
||||||
|
// position by adding the new import after the comment.
|
||||||
|
if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil {
|
||||||
|
pos = spec.Comment.End()
|
||||||
|
} else {
|
||||||
|
// Assign same position as the previous import,
|
||||||
|
// so that the sorter sees it as being in the same block.
|
||||||
|
pos = impDecl.Specs[insertAt-1].Pos()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if newImport.Name != nil {
|
||||||
|
newImport.Name.NamePos = pos
|
||||||
|
}
|
||||||
|
newImport.Path.ValuePos = pos
|
||||||
|
newImport.EndPos = pos
|
||||||
|
|
||||||
|
// Clean up parens. impDecl contains at least one spec.
|
||||||
|
if len(impDecl.Specs) == 1 {
|
||||||
|
// Remove unneeded parens.
|
||||||
|
impDecl.Lparen = token.NoPos
|
||||||
|
} else if !impDecl.Lparen.IsValid() {
|
||||||
|
// impDecl needs parens added.
|
||||||
|
impDecl.Lparen = impDecl.Specs[0].Pos()
|
||||||
|
}
|
||||||
|
|
||||||
|
f.Imports = append(f.Imports, newImport)
|
||||||
|
|
||||||
|
if len(f.Decls) <= 1 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge all the import declarations into the first one.
|
||||||
|
var first *ast.GenDecl
|
||||||
|
for i := 0; i < len(f.Decls); i++ {
|
||||||
|
decl := f.Decls[i]
|
||||||
|
gen, ok := decl.(*ast.GenDecl)
|
||||||
|
if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if first == nil {
|
||||||
|
first = gen
|
||||||
|
continue // Don't touch the first one.
|
||||||
|
}
|
||||||
|
// We now know there is more than one package in this import
|
||||||
|
// declaration. Ensure that it ends up parenthesized.
|
||||||
|
first.Lparen = first.Pos()
|
||||||
|
// Move the imports of the other import declaration to the first one.
|
||||||
|
for _, spec := range gen.Specs {
|
||||||
|
spec.(*ast.ImportSpec).Path.ValuePos = first.Pos()
|
||||||
|
first.Specs = append(first.Specs, spec)
|
||||||
|
}
|
||||||
|
f.Decls = append(f.Decls[:i], f.Decls[i+1:]...)
|
||||||
|
i--
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func isThirdParty(importPath string) bool {
|
||||||
|
// Third party package import path usually contains "." (".com", ".org", ...)
|
||||||
|
// This logic is taken from golang.org/x/tools/imports package.
|
||||||
|
return strings.Contains(importPath, ".")
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteImport deletes the import path from the file f, if present.
|
||||||
|
// If there are duplicate import declarations, all matching ones are deleted.
|
||||||
|
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
|
||||||
|
return DeleteNamedImport(fset, f, "", path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
|
||||||
|
// If there are duplicate import declarations, all matching ones are deleted.
|
||||||
|
func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
|
||||||
|
var delspecs []*ast.ImportSpec
|
||||||
|
var delcomments []*ast.CommentGroup
|
||||||
|
|
||||||
|
// Find the import nodes that import path, if any.
|
||||||
|
for i := 0; i < len(f.Decls); i++ {
|
||||||
|
decl := f.Decls[i]
|
||||||
|
gen, ok := decl.(*ast.GenDecl)
|
||||||
|
if !ok || gen.Tok != token.IMPORT {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for j := 0; j < len(gen.Specs); j++ {
|
||||||
|
spec := gen.Specs[j]
|
||||||
|
impspec := spec.(*ast.ImportSpec)
|
||||||
|
if importName(impspec) != name || importPath(impspec) != path {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// We found an import spec that imports path.
|
||||||
|
// Delete it.
|
||||||
|
delspecs = append(delspecs, impspec)
|
||||||
|
deleted = true
|
||||||
|
copy(gen.Specs[j:], gen.Specs[j+1:])
|
||||||
|
gen.Specs = gen.Specs[:len(gen.Specs)-1]
|
||||||
|
|
||||||
|
// If this was the last import spec in this decl,
|
||||||
|
// delete the decl, too.
|
||||||
|
if len(gen.Specs) == 0 {
|
||||||
|
copy(f.Decls[i:], f.Decls[i+1:])
|
||||||
|
f.Decls = f.Decls[:len(f.Decls)-1]
|
||||||
|
i--
|
||||||
|
break
|
||||||
|
} else if len(gen.Specs) == 1 {
|
||||||
|
if impspec.Doc != nil {
|
||||||
|
delcomments = append(delcomments, impspec.Doc)
|
||||||
|
}
|
||||||
|
if impspec.Comment != nil {
|
||||||
|
delcomments = append(delcomments, impspec.Comment)
|
||||||
|
}
|
||||||
|
for _, cg := range f.Comments {
|
||||||
|
// Found comment on the same line as the import spec.
|
||||||
|
if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line {
|
||||||
|
delcomments = append(delcomments, cg)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
spec := gen.Specs[0].(*ast.ImportSpec)
|
||||||
|
|
||||||
|
// Move the documentation right after the import decl.
|
||||||
|
if spec.Doc != nil {
|
||||||
|
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line {
|
||||||
|
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, cg := range f.Comments {
|
||||||
|
if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line {
|
||||||
|
for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line {
|
||||||
|
fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if j > 0 {
|
||||||
|
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
|
||||||
|
lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
|
||||||
|
line := fset.PositionFor(impspec.Path.ValuePos, false).Line
|
||||||
|
|
||||||
|
// We deleted an entry but now there may be
|
||||||
|
// a blank line-sized hole where the import was.
|
||||||
|
if line-lastLine > 1 || !gen.Rparen.IsValid() {
|
||||||
|
// There was a blank line immediately preceding the deleted import,
|
||||||
|
// so there's no need to close the hole. The right parenthesis is
|
||||||
|
// invalid after AddImport to an import statement without parenthesis.
|
||||||
|
// Do nothing.
|
||||||
|
} else if line != fset.File(gen.Rparen).LineCount() {
|
||||||
|
// There was no blank line. Close the hole.
|
||||||
|
fset.File(gen.Rparen).MergeLine(line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
j--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete imports from f.Imports.
|
||||||
|
for i := 0; i < len(f.Imports); i++ {
|
||||||
|
imp := f.Imports[i]
|
||||||
|
for j, del := range delspecs {
|
||||||
|
if imp == del {
|
||||||
|
copy(f.Imports[i:], f.Imports[i+1:])
|
||||||
|
f.Imports = f.Imports[:len(f.Imports)-1]
|
||||||
|
copy(delspecs[j:], delspecs[j+1:])
|
||||||
|
delspecs = delspecs[:len(delspecs)-1]
|
||||||
|
i--
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete comments from f.Comments.
|
||||||
|
for i := 0; i < len(f.Comments); i++ {
|
||||||
|
cg := f.Comments[i]
|
||||||
|
for j, del := range delcomments {
|
||||||
|
if cg == del {
|
||||||
|
copy(f.Comments[i:], f.Comments[i+1:])
|
||||||
|
f.Comments = f.Comments[:len(f.Comments)-1]
|
||||||
|
copy(delcomments[j:], delcomments[j+1:])
|
||||||
|
delcomments = delcomments[:len(delcomments)-1]
|
||||||
|
i--
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(delspecs) > 0 {
|
||||||
|
panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs))
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// RewriteImport rewrites any import of path oldPath to path newPath.
|
||||||
|
func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) {
|
||||||
|
for _, imp := range f.Imports {
|
||||||
|
if importPath(imp) == oldPath {
|
||||||
|
rewrote = true
|
||||||
|
// record old End, because the default is to compute
|
||||||
|
// it using the length of imp.Path.Value.
|
||||||
|
imp.EndPos = imp.End()
|
||||||
|
imp.Path.Value = strconv.Quote(newPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsesImport reports whether a given import is used.
|
||||||
|
// The provided File must have been parsed with syntactic object resolution
|
||||||
|
// (not using go/parser.SkipObjectResolution).
|
||||||
|
func UsesImport(f *ast.File, path string) (used bool) {
|
||||||
|
if f.Scope == nil {
|
||||||
|
panic("file f was not parsed with syntactic object resolution")
|
||||||
|
}
|
||||||
|
spec := importSpec(f, path)
|
||||||
|
if spec == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
name := spec.Name.String()
|
||||||
|
switch name {
|
||||||
|
case "<nil>":
|
||||||
|
// If the package name is not explicitly specified,
|
||||||
|
// make an educated guess. This is not guaranteed to be correct.
|
||||||
|
lastSlash := strings.LastIndex(path, "/")
|
||||||
|
if lastSlash == -1 {
|
||||||
|
name = path
|
||||||
|
} else {
|
||||||
|
name = path[lastSlash+1:]
|
||||||
|
}
|
||||||
|
case "_", ".":
|
||||||
|
// Not sure if this import is used - err on the side of caution.
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
ast.Walk(visitFn(func(n ast.Node) {
|
||||||
|
sel, ok := n.(*ast.SelectorExpr)
|
||||||
|
if ok && isTopName(sel.X, name) {
|
||||||
|
used = true
|
||||||
|
}
|
||||||
|
}), f)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
type visitFn func(node ast.Node)
|
||||||
|
|
||||||
|
func (fn visitFn) Visit(node ast.Node) ast.Visitor {
|
||||||
|
fn(node)
|
||||||
|
return fn
|
||||||
|
}
|
||||||
|
|
||||||
|
// imports reports whether f has an import with the specified name and path.
|
||||||
|
func imports(f *ast.File, name, path string) bool {
|
||||||
|
for _, s := range f.Imports {
|
||||||
|
if importName(s) == name && importPath(s) == path {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// importSpec returns the import spec if f imports path,
|
||||||
|
// or nil otherwise.
|
||||||
|
func importSpec(f *ast.File, path string) *ast.ImportSpec {
|
||||||
|
for _, s := range f.Imports {
|
||||||
|
if importPath(s) == path {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// importName returns the name of s,
|
||||||
|
// or "" if the import is not named.
|
||||||
|
func importName(s *ast.ImportSpec) string {
|
||||||
|
if s.Name == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return s.Name.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
// importPath returns the unquoted import path of s,
|
||||||
|
// or "" if the path is not properly quoted.
|
||||||
|
func importPath(s *ast.ImportSpec) string {
|
||||||
|
t, err := strconv.Unquote(s.Path.Value)
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
// declImports reports whether gen contains an import of path.
|
||||||
|
func declImports(gen *ast.GenDecl, path string) bool {
|
||||||
|
if gen.Tok != token.IMPORT {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, spec := range gen.Specs {
|
||||||
|
impspec := spec.(*ast.ImportSpec)
|
||||||
|
if importPath(impspec) == path {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// matchLen returns the length of the longest path segment prefix shared by x and y.
|
||||||
|
func matchLen(x, y string) int {
|
||||||
|
n := 0
|
||||||
|
for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ {
|
||||||
|
if x[i] == '/' {
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
// isTopName returns true if n is a top-level unresolved identifier with the given name.
|
||||||
|
func isTopName(n ast.Expr, name string) bool {
|
||||||
|
id, ok := n.(*ast.Ident)
|
||||||
|
return ok && id.Name == name && id.Obj == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Imports returns the file imports grouped by paragraph.
|
||||||
|
func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec {
|
||||||
|
var groups [][]*ast.ImportSpec
|
||||||
|
|
||||||
|
for _, decl := range f.Decls {
|
||||||
|
genDecl, ok := decl.(*ast.GenDecl)
|
||||||
|
if !ok || genDecl.Tok != token.IMPORT {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
group := []*ast.ImportSpec{}
|
||||||
|
|
||||||
|
var lastLine int
|
||||||
|
for _, spec := range genDecl.Specs {
|
||||||
|
importSpec := spec.(*ast.ImportSpec)
|
||||||
|
pos := importSpec.Path.ValuePos
|
||||||
|
line := fset.Position(pos).Line
|
||||||
|
if lastLine > 0 && pos > 0 && line-lastLine > 1 {
|
||||||
|
groups = append(groups, group)
|
||||||
|
group = []*ast.ImportSpec{}
|
||||||
|
}
|
||||||
|
group = append(group, importSpec)
|
||||||
|
lastLine = line
|
||||||
|
}
|
||||||
|
groups = append(groups, group)
|
||||||
|
}
|
||||||
|
|
||||||
|
return groups
|
||||||
|
}
|
||||||
486
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
generated
vendored
Normal file
486
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
generated
vendored
Normal file
|
|
@ -0,0 +1,486 @@
|
||||||
|
// Copyright 2017 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package astutil
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
// An ApplyFunc is invoked by Apply for each node n, even if n is nil,
|
||||||
|
// before and/or after the node's children, using a Cursor describing
|
||||||
|
// the current node and providing operations on it.
|
||||||
|
//
|
||||||
|
// The return value of ApplyFunc controls the syntax tree traversal.
|
||||||
|
// See Apply for details.
|
||||||
|
type ApplyFunc func(*Cursor) bool
|
||||||
|
|
||||||
|
// Apply traverses a syntax tree recursively, starting with root,
|
||||||
|
// and calling pre and post for each node as described below.
|
||||||
|
// Apply returns the syntax tree, possibly modified.
|
||||||
|
//
|
||||||
|
// If pre is not nil, it is called for each node before the node's
|
||||||
|
// children are traversed (pre-order). If pre returns false, no
|
||||||
|
// children are traversed, and post is not called for that node.
|
||||||
|
//
|
||||||
|
// If post is not nil, and a prior call of pre didn't return false,
|
||||||
|
// post is called for each node after its children are traversed
|
||||||
|
// (post-order). If post returns false, traversal is terminated and
|
||||||
|
// Apply returns immediately.
|
||||||
|
//
|
||||||
|
// Only fields that refer to AST nodes are considered children;
|
||||||
|
// i.e., token.Pos, Scopes, Objects, and fields of basic types
|
||||||
|
// (strings, etc.) are ignored.
|
||||||
|
//
|
||||||
|
// Children are traversed in the order in which they appear in the
|
||||||
|
// respective node's struct definition. A package's files are
|
||||||
|
// traversed in the filenames' alphabetical order.
|
||||||
|
func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
|
||||||
|
parent := &struct{ ast.Node }{root}
|
||||||
|
defer func() {
|
||||||
|
if r := recover(); r != nil && r != abort {
|
||||||
|
panic(r)
|
||||||
|
}
|
||||||
|
result = parent.Node
|
||||||
|
}()
|
||||||
|
a := &application{pre: pre, post: post}
|
||||||
|
a.apply(parent, "Node", nil, root)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var abort = new(int) // singleton, to signal termination of Apply
|
||||||
|
|
||||||
|
// A Cursor describes a node encountered during Apply.
|
||||||
|
// Information about the node and its parent is available
|
||||||
|
// from the Node, Parent, Name, and Index methods.
|
||||||
|
//
|
||||||
|
// If p is a variable of type and value of the current parent node
|
||||||
|
// c.Parent(), and f is the field identifier with name c.Name(),
|
||||||
|
// the following invariants hold:
|
||||||
|
//
|
||||||
|
// p.f == c.Node() if c.Index() < 0
|
||||||
|
// p.f[c.Index()] == c.Node() if c.Index() >= 0
|
||||||
|
//
|
||||||
|
// The methods Replace, Delete, InsertBefore, and InsertAfter
|
||||||
|
// can be used to change the AST without disrupting Apply.
|
||||||
|
type Cursor struct {
|
||||||
|
parent ast.Node
|
||||||
|
name string
|
||||||
|
iter *iterator // valid if non-nil
|
||||||
|
node ast.Node
|
||||||
|
}
|
||||||
|
|
||||||
|
// Node returns the current Node.
|
||||||
|
func (c *Cursor) Node() ast.Node { return c.node }
|
||||||
|
|
||||||
|
// Parent returns the parent of the current Node.
|
||||||
|
func (c *Cursor) Parent() ast.Node { return c.parent }
|
||||||
|
|
||||||
|
// Name returns the name of the parent Node field that contains the current Node.
|
||||||
|
// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns
|
||||||
|
// the filename for the current Node.
|
||||||
|
func (c *Cursor) Name() string { return c.name }
|
||||||
|
|
||||||
|
// Index reports the index >= 0 of the current Node in the slice of Nodes that
|
||||||
|
// contains it, or a value < 0 if the current Node is not part of a slice.
|
||||||
|
// The index of the current node changes if InsertBefore is called while
|
||||||
|
// processing the current node.
|
||||||
|
func (c *Cursor) Index() int {
|
||||||
|
if c.iter != nil {
|
||||||
|
return c.iter.index
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// field returns the current node's parent field value.
|
||||||
|
func (c *Cursor) field() reflect.Value {
|
||||||
|
return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace replaces the current Node with n.
|
||||||
|
// The replacement node is not walked by Apply.
|
||||||
|
func (c *Cursor) Replace(n ast.Node) {
|
||||||
|
if _, ok := c.node.(*ast.File); ok {
|
||||||
|
file, ok := n.(*ast.File)
|
||||||
|
if !ok {
|
||||||
|
panic("attempt to replace *ast.File with non-*ast.File")
|
||||||
|
}
|
||||||
|
c.parent.(*ast.Package).Files[c.name] = file
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
v := c.field()
|
||||||
|
if i := c.Index(); i >= 0 {
|
||||||
|
v = v.Index(i)
|
||||||
|
}
|
||||||
|
v.Set(reflect.ValueOf(n))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete deletes the current Node from its containing slice.
|
||||||
|
// If the current Node is not part of a slice, Delete panics.
|
||||||
|
// As a special case, if the current node is a package file,
|
||||||
|
// Delete removes it from the package's Files map.
|
||||||
|
func (c *Cursor) Delete() {
|
||||||
|
if _, ok := c.node.(*ast.File); ok {
|
||||||
|
delete(c.parent.(*ast.Package).Files, c.name)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
i := c.Index()
|
||||||
|
if i < 0 {
|
||||||
|
panic("Delete node not contained in slice")
|
||||||
|
}
|
||||||
|
v := c.field()
|
||||||
|
l := v.Len()
|
||||||
|
reflect.Copy(v.Slice(i, l), v.Slice(i+1, l))
|
||||||
|
v.Index(l - 1).Set(reflect.Zero(v.Type().Elem()))
|
||||||
|
v.SetLen(l - 1)
|
||||||
|
c.iter.step--
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertAfter inserts n after the current Node in its containing slice.
|
||||||
|
// If the current Node is not part of a slice, InsertAfter panics.
|
||||||
|
// Apply does not walk n.
|
||||||
|
func (c *Cursor) InsertAfter(n ast.Node) {
|
||||||
|
i := c.Index()
|
||||||
|
if i < 0 {
|
||||||
|
panic("InsertAfter node not contained in slice")
|
||||||
|
}
|
||||||
|
v := c.field()
|
||||||
|
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
|
||||||
|
l := v.Len()
|
||||||
|
reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l))
|
||||||
|
v.Index(i + 1).Set(reflect.ValueOf(n))
|
||||||
|
c.iter.step++
|
||||||
|
}
|
||||||
|
|
||||||
|
// InsertBefore inserts n before the current Node in its containing slice.
|
||||||
|
// If the current Node is not part of a slice, InsertBefore panics.
|
||||||
|
// Apply will not walk n.
|
||||||
|
func (c *Cursor) InsertBefore(n ast.Node) {
|
||||||
|
i := c.Index()
|
||||||
|
if i < 0 {
|
||||||
|
panic("InsertBefore node not contained in slice")
|
||||||
|
}
|
||||||
|
v := c.field()
|
||||||
|
v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem())))
|
||||||
|
l := v.Len()
|
||||||
|
reflect.Copy(v.Slice(i+1, l), v.Slice(i, l))
|
||||||
|
v.Index(i).Set(reflect.ValueOf(n))
|
||||||
|
c.iter.index++
|
||||||
|
}
|
||||||
|
|
||||||
|
// application carries all the shared data so we can pass it around cheaply.
|
||||||
|
type application struct {
|
||||||
|
pre, post ApplyFunc
|
||||||
|
cursor Cursor
|
||||||
|
iter iterator
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) {
|
||||||
|
// convert typed nil into untyped nil
|
||||||
|
if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() {
|
||||||
|
n = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead
|
||||||
|
saved := a.cursor
|
||||||
|
a.cursor.parent = parent
|
||||||
|
a.cursor.name = name
|
||||||
|
a.cursor.iter = iter
|
||||||
|
a.cursor.node = n
|
||||||
|
|
||||||
|
if a.pre != nil && !a.pre(&a.cursor) {
|
||||||
|
a.cursor = saved
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// walk children
|
||||||
|
// (the order of the cases matches the order of the corresponding node types in go/ast)
|
||||||
|
switch n := n.(type) {
|
||||||
|
case nil:
|
||||||
|
// nothing to do
|
||||||
|
|
||||||
|
// Comments and fields
|
||||||
|
case *ast.Comment:
|
||||||
|
// nothing to do
|
||||||
|
|
||||||
|
case *ast.CommentGroup:
|
||||||
|
if n != nil {
|
||||||
|
a.applyList(n, "List")
|
||||||
|
}
|
||||||
|
|
||||||
|
case *ast.Field:
|
||||||
|
a.apply(n, "Doc", nil, n.Doc)
|
||||||
|
a.applyList(n, "Names")
|
||||||
|
a.apply(n, "Type", nil, n.Type)
|
||||||
|
a.apply(n, "Tag", nil, n.Tag)
|
||||||
|
a.apply(n, "Comment", nil, n.Comment)
|
||||||
|
|
||||||
|
case *ast.FieldList:
|
||||||
|
a.applyList(n, "List")
|
||||||
|
|
||||||
|
// Expressions
|
||||||
|
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
|
||||||
|
// nothing to do
|
||||||
|
|
||||||
|
case *ast.Ellipsis:
|
||||||
|
a.apply(n, "Elt", nil, n.Elt)
|
||||||
|
|
||||||
|
case *ast.FuncLit:
|
||||||
|
a.apply(n, "Type", nil, n.Type)
|
||||||
|
a.apply(n, "Body", nil, n.Body)
|
||||||
|
|
||||||
|
case *ast.CompositeLit:
|
||||||
|
a.apply(n, "Type", nil, n.Type)
|
||||||
|
a.applyList(n, "Elts")
|
||||||
|
|
||||||
|
case *ast.ParenExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
|
||||||
|
case *ast.SelectorExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
a.apply(n, "Sel", nil, n.Sel)
|
||||||
|
|
||||||
|
case *ast.IndexExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
a.apply(n, "Index", nil, n.Index)
|
||||||
|
|
||||||
|
case *ast.IndexListExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
a.applyList(n, "Indices")
|
||||||
|
|
||||||
|
case *ast.SliceExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
a.apply(n, "Low", nil, n.Low)
|
||||||
|
a.apply(n, "High", nil, n.High)
|
||||||
|
a.apply(n, "Max", nil, n.Max)
|
||||||
|
|
||||||
|
case *ast.TypeAssertExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
a.apply(n, "Type", nil, n.Type)
|
||||||
|
|
||||||
|
case *ast.CallExpr:
|
||||||
|
a.apply(n, "Fun", nil, n.Fun)
|
||||||
|
a.applyList(n, "Args")
|
||||||
|
|
||||||
|
case *ast.StarExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
|
||||||
|
case *ast.UnaryExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
|
||||||
|
case *ast.BinaryExpr:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
a.apply(n, "Y", nil, n.Y)
|
||||||
|
|
||||||
|
case *ast.KeyValueExpr:
|
||||||
|
a.apply(n, "Key", nil, n.Key)
|
||||||
|
a.apply(n, "Value", nil, n.Value)
|
||||||
|
|
||||||
|
// Types
|
||||||
|
case *ast.ArrayType:
|
||||||
|
a.apply(n, "Len", nil, n.Len)
|
||||||
|
a.apply(n, "Elt", nil, n.Elt)
|
||||||
|
|
||||||
|
case *ast.StructType:
|
||||||
|
a.apply(n, "Fields", nil, n.Fields)
|
||||||
|
|
||||||
|
case *ast.FuncType:
|
||||||
|
if tparams := n.TypeParams; tparams != nil {
|
||||||
|
a.apply(n, "TypeParams", nil, tparams)
|
||||||
|
}
|
||||||
|
a.apply(n, "Params", nil, n.Params)
|
||||||
|
a.apply(n, "Results", nil, n.Results)
|
||||||
|
|
||||||
|
case *ast.InterfaceType:
|
||||||
|
a.apply(n, "Methods", nil, n.Methods)
|
||||||
|
|
||||||
|
case *ast.MapType:
|
||||||
|
a.apply(n, "Key", nil, n.Key)
|
||||||
|
a.apply(n, "Value", nil, n.Value)
|
||||||
|
|
||||||
|
case *ast.ChanType:
|
||||||
|
a.apply(n, "Value", nil, n.Value)
|
||||||
|
|
||||||
|
// Statements
|
||||||
|
case *ast.BadStmt:
|
||||||
|
// nothing to do
|
||||||
|
|
||||||
|
case *ast.DeclStmt:
|
||||||
|
a.apply(n, "Decl", nil, n.Decl)
|
||||||
|
|
||||||
|
case *ast.EmptyStmt:
|
||||||
|
// nothing to do
|
||||||
|
|
||||||
|
case *ast.LabeledStmt:
|
||||||
|
a.apply(n, "Label", nil, n.Label)
|
||||||
|
a.apply(n, "Stmt", nil, n.Stmt)
|
||||||
|
|
||||||
|
case *ast.ExprStmt:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
|
||||||
|
case *ast.SendStmt:
|
||||||
|
a.apply(n, "Chan", nil, n.Chan)
|
||||||
|
a.apply(n, "Value", nil, n.Value)
|
||||||
|
|
||||||
|
case *ast.IncDecStmt:
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
|
||||||
|
case *ast.AssignStmt:
|
||||||
|
a.applyList(n, "Lhs")
|
||||||
|
a.applyList(n, "Rhs")
|
||||||
|
|
||||||
|
case *ast.GoStmt:
|
||||||
|
a.apply(n, "Call", nil, n.Call)
|
||||||
|
|
||||||
|
case *ast.DeferStmt:
|
||||||
|
a.apply(n, "Call", nil, n.Call)
|
||||||
|
|
||||||
|
case *ast.ReturnStmt:
|
||||||
|
a.applyList(n, "Results")
|
||||||
|
|
||||||
|
case *ast.BranchStmt:
|
||||||
|
a.apply(n, "Label", nil, n.Label)
|
||||||
|
|
||||||
|
case *ast.BlockStmt:
|
||||||
|
a.applyList(n, "List")
|
||||||
|
|
||||||
|
case *ast.IfStmt:
|
||||||
|
a.apply(n, "Init", nil, n.Init)
|
||||||
|
a.apply(n, "Cond", nil, n.Cond)
|
||||||
|
a.apply(n, "Body", nil, n.Body)
|
||||||
|
a.apply(n, "Else", nil, n.Else)
|
||||||
|
|
||||||
|
case *ast.CaseClause:
|
||||||
|
a.applyList(n, "List")
|
||||||
|
a.applyList(n, "Body")
|
||||||
|
|
||||||
|
case *ast.SwitchStmt:
|
||||||
|
a.apply(n, "Init", nil, n.Init)
|
||||||
|
a.apply(n, "Tag", nil, n.Tag)
|
||||||
|
a.apply(n, "Body", nil, n.Body)
|
||||||
|
|
||||||
|
case *ast.TypeSwitchStmt:
|
||||||
|
a.apply(n, "Init", nil, n.Init)
|
||||||
|
a.apply(n, "Assign", nil, n.Assign)
|
||||||
|
a.apply(n, "Body", nil, n.Body)
|
||||||
|
|
||||||
|
case *ast.CommClause:
|
||||||
|
a.apply(n, "Comm", nil, n.Comm)
|
||||||
|
a.applyList(n, "Body")
|
||||||
|
|
||||||
|
case *ast.SelectStmt:
|
||||||
|
a.apply(n, "Body", nil, n.Body)
|
||||||
|
|
||||||
|
case *ast.ForStmt:
|
||||||
|
a.apply(n, "Init", nil, n.Init)
|
||||||
|
a.apply(n, "Cond", nil, n.Cond)
|
||||||
|
a.apply(n, "Post", nil, n.Post)
|
||||||
|
a.apply(n, "Body", nil, n.Body)
|
||||||
|
|
||||||
|
case *ast.RangeStmt:
|
||||||
|
a.apply(n, "Key", nil, n.Key)
|
||||||
|
a.apply(n, "Value", nil, n.Value)
|
||||||
|
a.apply(n, "X", nil, n.X)
|
||||||
|
a.apply(n, "Body", nil, n.Body)
|
||||||
|
|
||||||
|
// Declarations
|
||||||
|
case *ast.ImportSpec:
|
||||||
|
a.apply(n, "Doc", nil, n.Doc)
|
||||||
|
a.apply(n, "Name", nil, n.Name)
|
||||||
|
a.apply(n, "Path", nil, n.Path)
|
||||||
|
a.apply(n, "Comment", nil, n.Comment)
|
||||||
|
|
||||||
|
case *ast.ValueSpec:
|
||||||
|
a.apply(n, "Doc", nil, n.Doc)
|
||||||
|
a.applyList(n, "Names")
|
||||||
|
a.apply(n, "Type", nil, n.Type)
|
||||||
|
a.applyList(n, "Values")
|
||||||
|
a.apply(n, "Comment", nil, n.Comment)
|
||||||
|
|
||||||
|
case *ast.TypeSpec:
|
||||||
|
a.apply(n, "Doc", nil, n.Doc)
|
||||||
|
a.apply(n, "Name", nil, n.Name)
|
||||||
|
if tparams := n.TypeParams; tparams != nil {
|
||||||
|
a.apply(n, "TypeParams", nil, tparams)
|
||||||
|
}
|
||||||
|
a.apply(n, "Type", nil, n.Type)
|
||||||
|
a.apply(n, "Comment", nil, n.Comment)
|
||||||
|
|
||||||
|
case *ast.BadDecl:
|
||||||
|
// nothing to do
|
||||||
|
|
||||||
|
case *ast.GenDecl:
|
||||||
|
a.apply(n, "Doc", nil, n.Doc)
|
||||||
|
a.applyList(n, "Specs")
|
||||||
|
|
||||||
|
case *ast.FuncDecl:
|
||||||
|
a.apply(n, "Doc", nil, n.Doc)
|
||||||
|
a.apply(n, "Recv", nil, n.Recv)
|
||||||
|
a.apply(n, "Name", nil, n.Name)
|
||||||
|
a.apply(n, "Type", nil, n.Type)
|
||||||
|
a.apply(n, "Body", nil, n.Body)
|
||||||
|
|
||||||
|
// Files and packages
|
||||||
|
case *ast.File:
|
||||||
|
a.apply(n, "Doc", nil, n.Doc)
|
||||||
|
a.apply(n, "Name", nil, n.Name)
|
||||||
|
a.applyList(n, "Decls")
|
||||||
|
// Don't walk n.Comments; they have either been walked already if
|
||||||
|
// they are Doc comments, or they can be easily walked explicitly.
|
||||||
|
|
||||||
|
case *ast.Package:
|
||||||
|
// collect and sort names for reproducible behavior
|
||||||
|
var names []string
|
||||||
|
for name := range n.Files {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
sort.Strings(names)
|
||||||
|
for _, name := range names {
|
||||||
|
a.apply(n, name, nil, n.Files[name])
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("Apply: unexpected node type %T", n))
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.post != nil && !a.post(&a.cursor) {
|
||||||
|
panic(abort)
|
||||||
|
}
|
||||||
|
|
||||||
|
a.cursor = saved
|
||||||
|
}
|
||||||
|
|
||||||
|
// An iterator controls iteration over a slice of nodes.
|
||||||
|
type iterator struct {
|
||||||
|
index, step int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *application) applyList(parent ast.Node, name string) {
|
||||||
|
// avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead
|
||||||
|
saved := a.iter
|
||||||
|
a.iter.index = 0
|
||||||
|
for {
|
||||||
|
// must reload parent.name each time, since cursor modifications might change it
|
||||||
|
v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name)
|
||||||
|
if a.iter.index >= v.Len() {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// element x may be nil in a bad AST - be cautious
|
||||||
|
var x ast.Node
|
||||||
|
if e := v.Index(a.iter.index); e.IsValid() {
|
||||||
|
x = e.Interface().(ast.Node)
|
||||||
|
}
|
||||||
|
|
||||||
|
a.iter.step = 1
|
||||||
|
a.apply(parent, name, &a.iter, x)
|
||||||
|
a.iter.index += a.iter.step
|
||||||
|
}
|
||||||
|
a.iter = saved
|
||||||
|
}
|
||||||
11
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/go/ast/astutil/util.go
generated
vendored
Normal file
11
src/cmd/compile/internal/ssa/_gen/vendor/golang.org/x/tools/go/ast/astutil/util.go
generated
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package astutil
|
||||||
|
|
||||||
|
import "go/ast"
|
||||||
|
|
||||||
|
// Unparen returns e with any enclosing parentheses stripped.
|
||||||
|
// Deprecated: use [ast.Unparen].
|
||||||
|
func Unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) }
|
||||||
3
src/cmd/compile/internal/ssa/_gen/vendor/modules.txt
vendored
Normal file
3
src/cmd/compile/internal/ssa/_gen/vendor/modules.txt
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
# golang.org/x/tools v0.27.0
|
||||||
|
## explicit; go 1.22.0
|
||||||
|
golang.org/x/tools/go/ast/astutil
|
||||||
135
src/cmd/compile/internal/ssa/generate_test.go
Normal file
135
src/cmd/compile/internal/ssa/generate_test.go
Normal file
|
|
@ -0,0 +1,135 @@
|
||||||
|
// Copyright 2025 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package ssa
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"internal/testenv"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
const expectedHeader = "// Code generated from _gen/" // this is the common part
|
||||||
|
|
||||||
|
// TestGeneratedFilesUpToDate regenerates all the rewrite and rewrite-related
|
||||||
|
// files defined in _gen into a temporary directory,
|
||||||
|
// checks that they match what appears in the source tree,
|
||||||
|
// verifies that they start with the prefix of a generated header,
|
||||||
|
// and checks that the only source files with that header were actually generated.
|
||||||
|
func TestGeneratedFilesUpToDate(t *testing.T) {
|
||||||
|
testenv.MustHaveGoRun(t)
|
||||||
|
wd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Failed to get current working directory: %v", err)
|
||||||
|
}
|
||||||
|
genDir := filepath.Join(wd, "_gen")
|
||||||
|
if _, err := os.Stat(genDir); os.IsNotExist(err) {
|
||||||
|
t.Fatalf("_gen directory not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
tmpdir := t.TempDir()
|
||||||
|
|
||||||
|
// Accumulate a list of all existing files that look generated.
|
||||||
|
// It's an error if this set does not match the set that are
|
||||||
|
// generated into tmpdir.
|
||||||
|
genFiles := make(map[string]bool)
|
||||||
|
genPrefix := []byte(expectedHeader)
|
||||||
|
ssaFiles, err := filepath.Glob(filepath.Join(wd, "*.go"))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("could not glob for .go files in ssa directory: %v", err)
|
||||||
|
}
|
||||||
|
for _, f := range ssaFiles {
|
||||||
|
contents, err := os.ReadFile(f)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("could not read source file from ssa directory: %v", err)
|
||||||
|
}
|
||||||
|
// verify that the generated file has the expected header
|
||||||
|
// (this should cause other failures later, but if this is
|
||||||
|
// the problem, diagnose it here to shorten the treasure hunt.)
|
||||||
|
if bytes.HasPrefix(contents, genPrefix) {
|
||||||
|
genFiles[filepath.Base(f)] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
goFiles, err := filepath.Glob(filepath.Join(genDir, "*.go"))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("could not glob for .go files in _gen: %v", err)
|
||||||
|
}
|
||||||
|
if len(goFiles) == 0 {
|
||||||
|
t.Fatal("no .go files found in _gen")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Construct the command line for "go run".
|
||||||
|
// Explicitly list the files, just to make it
|
||||||
|
// clear what is included (if the test is logging).
|
||||||
|
args := []string{"run", "-C", genDir}
|
||||||
|
for _, f := range goFiles {
|
||||||
|
args = append(args, filepath.Base(f))
|
||||||
|
}
|
||||||
|
args = append(args, "-outdir", tmpdir)
|
||||||
|
|
||||||
|
logArgs := fmt.Sprintf("%v", args)
|
||||||
|
logArgs = logArgs[1 : len(logArgs)-2] // strip '[' and ']'
|
||||||
|
t.Logf("%s %v", testenv.GoToolPath(t), logArgs)
|
||||||
|
output, err := testenv.Command(t, testenv.GoToolPath(t), args...).CombinedOutput()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("go run in _gen failed: %v\n%s", err, output)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare generated files with existing files in the parent directory.
|
||||||
|
files, err := os.ReadDir(tmpdir)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("could not read tmpdir %s: %v", tmpdir, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, file := range files {
|
||||||
|
if file.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filename := file.Name()
|
||||||
|
|
||||||
|
// filename must be in the generated set,
|
||||||
|
if !genFiles[filename] {
|
||||||
|
t.Errorf("%s does not start with the expected header '%s' (if the header was changed the test needs to be updated)",
|
||||||
|
filename, expectedHeader)
|
||||||
|
}
|
||||||
|
genFiles[filename] = false // remove from set
|
||||||
|
|
||||||
|
generatedPath := filepath.Join(tmpdir, filename)
|
||||||
|
originalPath := filepath.Join(wd, filename)
|
||||||
|
|
||||||
|
generatedData, err := os.ReadFile(generatedPath)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("could not read generated file %s: %v", generatedPath, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// there should be a corresponding file in the ssa directory,
|
||||||
|
originalData, err := os.ReadFile(originalPath)
|
||||||
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
t.Errorf("generated file %s was created, but does not exist in the ssa directory. It may need to be added to the repository.", filename)
|
||||||
|
} else {
|
||||||
|
t.Errorf("could not read original file %s: %v", originalPath, err)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// and the contents of that file should match.
|
||||||
|
if !bytes.Equal(originalData, generatedData) {
|
||||||
|
t.Errorf("%s is out of date. Please run 'go generate'.", filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// the generated set should be empty now.
|
||||||
|
for file, notGenerated := range genFiles {
|
||||||
|
if notGenerated {
|
||||||
|
t.Errorf("%s has the header of a generated file but was not generated", file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue