[dev.regabi] cmd/compile: replace *Node type with an interface Node [generated]

The plan is to introduce a Node interface that replaces the old *Node pointer-to-struct.

The previous CL defined an interface INode modeling a *Node.

This CL:
 - Changes all references outside internal/ir to use INode,
   along with many references inside internal/ir as well.
 - Renames Node to node.
 - Renames INode to Node

So now ir.Node is an interface implemented by *ir.node, which is otherwise inaccessible,
and the code outside package ir is now (clearly) using only the interface.

The usual rule is never to redefine an existing name with a new meaning,
so that old code that hasn't been updated gets a "unknown name" error
instead of more mysterious errors or silent misbehavior. That rule would
caution against replacing Node-the-struct with Node-the-interface,
as in this CL, because code that says *Node would now be using a pointer
to an interface. But this CL is being landed at the same time as another that
moves Node from gc to ir. So the net effect is to replace *gc.Node with ir.Node,
which does follow the rule: any lingering references to gc.Node will be told
it's gone, not silently start using pointers to interfaces. So the rule is followed
by the CL sequence, just not this specific CL.

Overall, the loss of inlining caused by using interfaces cuts the compiler speed
by about 6%, a not insignificant amount. However, as we convert the representation
to concrete structs that are not the giant Node over the next weeks, that speed
should come back as more of the compiler starts operating directly on concrete types
and the memory taken up by the graph of Nodes drops due to the more precise
structs. Honestly, I was expecting worse.

% benchstat bench.old bench.new
name                      old time/op       new time/op       delta
Template                        168ms ± 4%        182ms ± 2%   +8.34%  (p=0.000 n=9+9)
Unicode                        72.2ms ±10%       82.5ms ± 6%  +14.38%  (p=0.000 n=9+9)
GoTypes                         563ms ± 8%        598ms ± 2%   +6.14%  (p=0.006 n=9+9)
Compiler                        2.89s ± 4%        3.04s ± 2%   +5.37%  (p=0.000 n=10+9)
SSA                             6.45s ± 4%        7.25s ± 5%  +12.41%  (p=0.000 n=9+10)
Flate                           105ms ± 2%        115ms ± 1%   +9.66%  (p=0.000 n=10+8)
GoParser                        144ms ±10%        152ms ± 2%   +5.79%  (p=0.011 n=9+8)
Reflect                         345ms ± 9%        370ms ± 4%   +7.28%  (p=0.001 n=10+9)
Tar                             149ms ± 9%        161ms ± 5%   +8.05%  (p=0.001 n=10+9)
XML                             190ms ± 3%        209ms ± 2%   +9.54%  (p=0.000 n=9+8)
LinkCompiler                    327ms ± 2%        325ms ± 2%     ~     (p=0.382 n=8+8)
ExternalLinkCompiler            1.77s ± 4%        1.73s ± 6%     ~     (p=0.113 n=9+10)
LinkWithoutDebugCompiler        214ms ± 4%        211ms ± 2%     ~     (p=0.360 n=10+8)
StdCmd                          14.8s ± 3%        15.9s ± 1%   +6.98%  (p=0.000 n=10+9)
[Geo mean]                      480ms             510ms        +6.31%

name                      old user-time/op  new user-time/op  delta
Template                        223ms ± 3%        237ms ± 3%   +6.16%  (p=0.000 n=9+10)
Unicode                         103ms ± 6%        113ms ± 3%   +9.53%  (p=0.000 n=9+9)
GoTypes                         758ms ± 8%        800ms ± 2%   +5.55%  (p=0.003 n=10+9)
Compiler                        3.95s ± 2%        4.12s ± 2%   +4.34%  (p=0.000 n=10+9)
SSA                             9.43s ± 1%        9.74s ± 4%   +3.25%  (p=0.000 n=8+10)
Flate                           132ms ± 2%        141ms ± 2%   +6.89%  (p=0.000 n=9+9)
GoParser                        177ms ± 9%        183ms ± 4%     ~     (p=0.050 n=9+9)
Reflect                         467ms ±10%        495ms ± 7%   +6.17%  (p=0.029 n=10+10)
Tar                             183ms ± 9%        197ms ± 5%   +7.92%  (p=0.001 n=10+10)
XML                             249ms ± 5%        268ms ± 4%   +7.82%  (p=0.000 n=10+9)
LinkCompiler                    544ms ± 5%        544ms ± 6%     ~     (p=0.863 n=9+9)
ExternalLinkCompiler            1.79s ± 4%        1.75s ± 6%     ~     (p=0.075 n=10+10)
LinkWithoutDebugCompiler        248ms ± 6%        246ms ± 2%     ~     (p=0.965 n=10+8)
[Geo mean]                      483ms             504ms        +4.41%

[git-generate]
cd src/cmd/compile/internal/ir
: # We need to do the conversion in multiple steps, so we introduce
: # a temporary type alias that will start out meaning the pointer-to-struct
: # and then change to mean the interface.
rf '
	mv Node OldNode

	add node.go \
		type Node = *OldNode
'

: # It should work to do this ex in ir, but it misses test files, due to a bug in rf.
: # Run the command in gc to handle gc's tests, and then again in ssa for ssa's tests.
cd ../gc
rf '
        ex .  ../arm ../riscv64 ../arm64 ../mips64 ../ppc64 ../mips ../wasm {
                import "cmd/compile/internal/ir"
                *ir.OldNode -> ir.Node
        }
'
cd ../ssa
rf '
        ex {
                import "cmd/compile/internal/ir"
                *ir.OldNode -> ir.Node
        }
'

: # Back in ir, finish conversion clumsily with sed,
: # because type checking and circular aliases do not mix.
cd ../ir
sed -i '' '
	/type Node = \*OldNode/d
	s/\*OldNode/Node/g
	s/^func (n Node)/func (n *OldNode)/
	s/OldNode/node/g
	s/type INode interface/type Node interface/
	s/var _ INode = (Node)(nil)/var _ Node = (*node)(nil)/
' *.go
gofmt -w *.go

sed -i '' '
	s/{Func{}, 136, 248}/{Func{}, 152, 280}/
	s/{Name{}, 32, 56}/{Name{}, 44, 80}/
	s/{Param{}, 24, 48}/{Param{}, 44, 88}/
	s/{node{}, 76, 128}/{node{}, 88, 152}/
' sizeof_test.go

cd ../ssa
sed -i '' '
	s/{LocalSlot{}, 28, 40}/{LocalSlot{}, 32, 48}/
' sizeof_test.go

cd ../gc
sed -i '' 's/\*ir.Node/ir.Node/' mkbuiltin.go

cd ../../../..
go install std cmd
cd cmd/compile
go test -u || go test -u

Change-Id: I196bbe3b648e4701662e4a2bada40bf155e2a553
Reviewed-on: https://go-review.googlesource.com/c/go/+/272935
Trust: Russ Cox <rsc@golang.org>
Run-TryBot: Russ Cox <rsc@golang.org>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Russ Cox 2020-11-25 01:11:56 -05:00
parent 4d0d9c2c5c
commit 41f3af9d04
62 changed files with 1273 additions and 1272 deletions

View file

@ -22,14 +22,7 @@ package main_test
var knownFormats = map[string]string{ var knownFormats = map[string]string{
"*bytes.Buffer %s": "", "*bytes.Buffer %s": "",
"*cmd/compile/internal/gc.EscLocation %v": "", "*cmd/compile/internal/gc.EscLocation %v": "",
"*cmd/compile/internal/ir.Node %#v": "", "*cmd/compile/internal/ir.node %v": "",
"*cmd/compile/internal/ir.Node %+S": "",
"*cmd/compile/internal/ir.Node %+v": "",
"*cmd/compile/internal/ir.Node %L": "",
"*cmd/compile/internal/ir.Node %S": "",
"*cmd/compile/internal/ir.Node %j": "",
"*cmd/compile/internal/ir.Node %p": "",
"*cmd/compile/internal/ir.Node %v": "",
"*cmd/compile/internal/ssa.Block %s": "", "*cmd/compile/internal/ssa.Block %s": "",
"*cmd/compile/internal/ssa.Block %v": "", "*cmd/compile/internal/ssa.Block %v": "",
"*cmd/compile/internal/ssa.Func %s": "", "*cmd/compile/internal/ssa.Func %s": "",
@ -83,6 +76,14 @@ var knownFormats = map[string]string{
"cmd/compile/internal/ir.Class %d": "", "cmd/compile/internal/ir.Class %d": "",
"cmd/compile/internal/ir.Class %v": "", "cmd/compile/internal/ir.Class %v": "",
"cmd/compile/internal/ir.FmtMode %d": "", "cmd/compile/internal/ir.FmtMode %d": "",
"cmd/compile/internal/ir.Node %#v": "",
"cmd/compile/internal/ir.Node %+S": "",
"cmd/compile/internal/ir.Node %+v": "",
"cmd/compile/internal/ir.Node %L": "",
"cmd/compile/internal/ir.Node %S": "",
"cmd/compile/internal/ir.Node %j": "",
"cmd/compile/internal/ir.Node %p": "",
"cmd/compile/internal/ir.Node %v": "",
"cmd/compile/internal/ir.Nodes %#v": "", "cmd/compile/internal/ir.Nodes %#v": "",
"cmd/compile/internal/ir.Nodes %+v": "", "cmd/compile/internal/ir.Nodes %+v": "",
"cmd/compile/internal/ir.Nodes %.v": "", "cmd/compile/internal/ir.Nodes %.v": "",
@ -160,9 +161,9 @@ var knownFormats = map[string]string{
"interface{} %q": "", "interface{} %q": "",
"interface{} %s": "", "interface{} %s": "",
"interface{} %v": "", "interface{} %v": "",
"map[*cmd/compile/internal/ir.Node]*cmd/compile/internal/ssa.Value %v": "", "map[cmd/compile/internal/ir.Node]*cmd/compile/internal/ssa.Value %v": "",
"map[*cmd/compile/internal/ir.Node][]*cmd/compile/internal/ir.Node %v": "", "map[cmd/compile/internal/ir.Node][]cmd/compile/internal/ir.Node %v": "",
"map[cmd/compile/internal/ssa.ID]uint32 %v": "", "map[cmd/compile/internal/ssa.ID]uint32 %v": "",
"map[int64]uint32 %v": "", "map[int64]uint32 %v": "",
"math/big.Accuracy %s": "", "math/big.Accuracy %s": "",
"reflect.Type %s": "", "reflect.Type %s": "",

View file

@ -546,7 +546,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym: case *obj.LSym:
wantreg = "SB" wantreg = "SB"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case *ir.Node: case ir.Node:
wantreg = "SP" wantreg = "SP"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case nil: case nil:

View file

@ -396,7 +396,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym: case *obj.LSym:
wantreg = "SB" wantreg = "SB"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case *ir.Node: case ir.Node:
wantreg = "SP" wantreg = "SP"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case nil: case nil:

View file

@ -404,7 +404,7 @@ func genhash(t *types.Type) *obj.LSym {
return closure return closure
} }
func hashfor(t *types.Type) *ir.Node { func hashfor(t *types.Type) ir.Node {
var sym *types.Sym var sym *types.Sym
switch a, _ := algtype1(t); a { switch a, _ := algtype1(t); a {
@ -432,10 +432,10 @@ func hashfor(t *types.Type) *ir.Node {
n := NewName(sym) n := NewName(sym)
setNodeNameFunc(n) setNodeNameFunc(n)
n.SetType(functype(nil, []*ir.Node{ n.SetType(functype(nil, []ir.Node{
anonfield(types.NewPtr(t)), anonfield(types.NewPtr(t)),
anonfield(types.Types[types.TUINTPTR]), anonfield(types.Types[types.TUINTPTR]),
}, []*ir.Node{ }, []ir.Node{
anonfield(types.Types[types.TUINTPTR]), anonfield(types.Types[types.TUINTPTR]),
})) }))
return n return n
@ -567,9 +567,9 @@ func geneq(t *types.Type) *obj.LSym {
// //
// TODO(josharian): consider doing some loop unrolling // TODO(josharian): consider doing some loop unrolling
// for larger nelem as well, processing a few elements at a time in a loop. // for larger nelem as well, processing a few elements at a time in a loop.
checkAll := func(unroll int64, last bool, eq func(pi, qi *ir.Node) *ir.Node) { checkAll := func(unroll int64, last bool, eq func(pi, qi ir.Node) ir.Node) {
// checkIdx generates a node to check for equality at index i. // checkIdx generates a node to check for equality at index i.
checkIdx := func(i *ir.Node) *ir.Node { checkIdx := func(i ir.Node) ir.Node {
// pi := p[i] // pi := p[i]
pi := ir.Nod(ir.OINDEX, np, i) pi := ir.Nod(ir.OINDEX, np, i)
pi.SetBounded(true) pi.SetBounded(true)
@ -621,24 +621,24 @@ func geneq(t *types.Type) *obj.LSym {
// Do two loops. First, check that all the lengths match (cheap). // Do two loops. First, check that all the lengths match (cheap).
// Second, check that all the contents match (expensive). // Second, check that all the contents match (expensive).
// TODO: when the array size is small, unroll the length match checks. // TODO: when the array size is small, unroll the length match checks.
checkAll(3, false, func(pi, qi *ir.Node) *ir.Node { checkAll(3, false, func(pi, qi ir.Node) ir.Node {
// Compare lengths. // Compare lengths.
eqlen, _ := eqstring(pi, qi) eqlen, _ := eqstring(pi, qi)
return eqlen return eqlen
}) })
checkAll(1, true, func(pi, qi *ir.Node) *ir.Node { checkAll(1, true, func(pi, qi ir.Node) ir.Node {
// Compare contents. // Compare contents.
_, eqmem := eqstring(pi, qi) _, eqmem := eqstring(pi, qi)
return eqmem return eqmem
}) })
case types.TFLOAT32, types.TFLOAT64: case types.TFLOAT32, types.TFLOAT64:
checkAll(2, true, func(pi, qi *ir.Node) *ir.Node { checkAll(2, true, func(pi, qi ir.Node) ir.Node {
// p[i] == q[i] // p[i] == q[i]
return ir.Nod(ir.OEQ, pi, qi) return ir.Nod(ir.OEQ, pi, qi)
}) })
// TODO: pick apart structs, do them piecemeal too // TODO: pick apart structs, do them piecemeal too
default: default:
checkAll(1, true, func(pi, qi *ir.Node) *ir.Node { checkAll(1, true, func(pi, qi ir.Node) ir.Node {
// p[i] == q[i] // p[i] == q[i]
return ir.Nod(ir.OEQ, pi, qi) return ir.Nod(ir.OEQ, pi, qi)
}) })
@ -648,9 +648,9 @@ func geneq(t *types.Type) *obj.LSym {
// Build a list of conditions to satisfy. // Build a list of conditions to satisfy.
// The conditions are a list-of-lists. Conditions are reorderable // The conditions are a list-of-lists. Conditions are reorderable
// within each inner list. The outer lists must be evaluated in order. // within each inner list. The outer lists must be evaluated in order.
var conds [][]*ir.Node var conds [][]ir.Node
conds = append(conds, []*ir.Node{}) conds = append(conds, []ir.Node{})
and := func(n *ir.Node) { and := func(n ir.Node) {
i := len(conds) - 1 i := len(conds) - 1
conds[i] = append(conds[i], n) conds[i] = append(conds[i], n)
} }
@ -670,7 +670,7 @@ func geneq(t *types.Type) *obj.LSym {
if !IsRegularMemory(f.Type) { if !IsRegularMemory(f.Type) {
if EqCanPanic(f.Type) { if EqCanPanic(f.Type) {
// Enforce ordering by starting a new set of reorderable conditions. // Enforce ordering by starting a new set of reorderable conditions.
conds = append(conds, []*ir.Node{}) conds = append(conds, []ir.Node{})
} }
p := nodSym(ir.OXDOT, np, f.Sym) p := nodSym(ir.OXDOT, np, f.Sym)
q := nodSym(ir.OXDOT, nq, f.Sym) q := nodSym(ir.OXDOT, nq, f.Sym)
@ -684,7 +684,7 @@ func geneq(t *types.Type) *obj.LSym {
} }
if EqCanPanic(f.Type) { if EqCanPanic(f.Type) {
// Also enforce ordering after something that can panic. // Also enforce ordering after something that can panic.
conds = append(conds, []*ir.Node{}) conds = append(conds, []ir.Node{})
} }
i++ i++
continue continue
@ -709,9 +709,9 @@ func geneq(t *types.Type) *obj.LSym {
// Sort conditions to put runtime calls last. // Sort conditions to put runtime calls last.
// Preserve the rest of the ordering. // Preserve the rest of the ordering.
var flatConds []*ir.Node var flatConds []ir.Node
for _, c := range conds { for _, c := range conds {
isCall := func(n *ir.Node) bool { isCall := func(n ir.Node) bool {
return n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC return n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC
} }
sort.SliceStable(c, func(i, j int) bool { sort.SliceStable(c, func(i, j int) bool {
@ -785,7 +785,7 @@ func geneq(t *types.Type) *obj.LSym {
return closure return closure
} }
func hasCall(n *ir.Node) bool { func hasCall(n ir.Node) bool {
if n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC { if n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC {
return true return true
} }
@ -820,7 +820,7 @@ func hasCall(n *ir.Node) bool {
// eqfield returns the node // eqfield returns the node
// p.field == q.field // p.field == q.field
func eqfield(p *ir.Node, q *ir.Node, field *types.Sym) *ir.Node { func eqfield(p ir.Node, q ir.Node, field *types.Sym) ir.Node {
nx := nodSym(ir.OXDOT, p, field) nx := nodSym(ir.OXDOT, p, field)
ny := nodSym(ir.OXDOT, q, field) ny := nodSym(ir.OXDOT, q, field)
ne := ir.Nod(ir.OEQ, nx, ny) ne := ir.Nod(ir.OEQ, nx, ny)
@ -833,7 +833,7 @@ func eqfield(p *ir.Node, q *ir.Node, field *types.Sym) *ir.Node {
// memequal(s.ptr, t.ptr, len(s)) // memequal(s.ptr, t.ptr, len(s))
// which can be used to construct string equality comparison. // which can be used to construct string equality comparison.
// eqlen must be evaluated before eqmem, and shortcircuiting is required. // eqlen must be evaluated before eqmem, and shortcircuiting is required.
func eqstring(s, t *ir.Node) (eqlen, eqmem *ir.Node) { func eqstring(s, t ir.Node) (eqlen, eqmem ir.Node) {
s = conv(s, types.Types[types.TSTRING]) s = conv(s, types.Types[types.TSTRING])
t = conv(t, types.Types[types.TSTRING]) t = conv(t, types.Types[types.TSTRING])
sptr := ir.Nod(ir.OSPTR, s, nil) sptr := ir.Nod(ir.OSPTR, s, nil)
@ -859,13 +859,13 @@ func eqstring(s, t *ir.Node) (eqlen, eqmem *ir.Node) {
// ifaceeq(s.tab, s.data, t.data) (or efaceeq(s.typ, s.data, t.data), as appropriate) // ifaceeq(s.tab, s.data, t.data) (or efaceeq(s.typ, s.data, t.data), as appropriate)
// which can be used to construct interface equality comparison. // which can be used to construct interface equality comparison.
// eqtab must be evaluated before eqdata, and shortcircuiting is required. // eqtab must be evaluated before eqdata, and shortcircuiting is required.
func eqinterface(s, t *ir.Node) (eqtab, eqdata *ir.Node) { func eqinterface(s, t ir.Node) (eqtab, eqdata ir.Node) {
if !types.Identical(s.Type(), t.Type()) { if !types.Identical(s.Type(), t.Type()) {
base.Fatalf("eqinterface %v %v", s.Type(), t.Type()) base.Fatalf("eqinterface %v %v", s.Type(), t.Type())
} }
// func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool) // func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool) // func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
var fn *ir.Node var fn ir.Node
if s.Type().IsEmptyInterface() { if s.Type().IsEmptyInterface() {
fn = syslook("efaceeq") fn = syslook("efaceeq")
} else { } else {
@ -893,7 +893,7 @@ func eqinterface(s, t *ir.Node) (eqtab, eqdata *ir.Node) {
// eqmem returns the node // eqmem returns the node
// memequal(&p.field, &q.field [, size]) // memequal(&p.field, &q.field [, size])
func eqmem(p *ir.Node, q *ir.Node, field *types.Sym, size int64) *ir.Node { func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
nx := ir.Nod(ir.OADDR, nodSym(ir.OXDOT, p, field), nil) nx := ir.Nod(ir.OADDR, nodSym(ir.OXDOT, p, field), nil)
ny := ir.Nod(ir.OADDR, nodSym(ir.OXDOT, q, field), nil) ny := ir.Nod(ir.OADDR, nodSym(ir.OXDOT, q, field), nil)
nx = typecheck(nx, ctxExpr) nx = typecheck(nx, ctxExpr)
@ -910,7 +910,7 @@ func eqmem(p *ir.Node, q *ir.Node, field *types.Sym, size int64) *ir.Node {
return call return call
} }
func eqmemfunc(size int64, t *types.Type) (fn *ir.Node, needsize bool) { func eqmemfunc(size int64, t *types.Type) (fn ir.Node, needsize bool) {
switch size { switch size {
default: default:
fn = syslook("memequal") fn = syslook("memequal")

View file

@ -14,7 +14,7 @@ type exporter struct {
} }
// markObject visits a reachable object. // markObject visits a reachable object.
func (p *exporter) markObject(n *ir.Node) { func (p *exporter) markObject(n ir.Node) {
if n.Op() == ir.ONAME && n.Class() == ir.PFUNC { if n.Op() == ir.ONAME && n.Class() == ir.PFUNC {
inlFlood(n) inlFlood(n)
} }

View file

@ -9,11 +9,11 @@ import (
"cmd/internal/src" "cmd/internal/src"
) )
func npos(pos src.XPos, n *ir.Node) *ir.Node { func npos(pos src.XPos, n ir.Node) ir.Node {
n.SetPos(pos) n.SetPos(pos)
return n return n
} }
func builtinCall(op ir.Op) *ir.Node { func builtinCall(op ir.Op) ir.Node {
return ir.Nod(ir.OCALL, mkname(ir.BuiltinPkg.Lookup(ir.OpNames[op])), nil) return ir.Nod(ir.OCALL, mkname(ir.BuiltinPkg.Lookup(ir.OpNames[op])), nil)
} }

View file

@ -210,132 +210,132 @@ func runtimeTypes() []*types.Type {
typs[1] = types.NewPtr(typs[0]) typs[1] = types.NewPtr(typs[0])
typs[2] = types.Types[types.TANY] typs[2] = types.Types[types.TANY]
typs[3] = types.NewPtr(typs[2]) typs[3] = types.NewPtr(typs[2])
typs[4] = functype(nil, []*ir.Node{anonfield(typs[1])}, []*ir.Node{anonfield(typs[3])}) typs[4] = functype(nil, []ir.Node{anonfield(typs[1])}, []ir.Node{anonfield(typs[3])})
typs[5] = types.Types[types.TUINTPTR] typs[5] = types.Types[types.TUINTPTR]
typs[6] = types.Types[types.TBOOL] typs[6] = types.Types[types.TBOOL]
typs[7] = types.Types[types.TUNSAFEPTR] typs[7] = types.Types[types.TUNSAFEPTR]
typs[8] = functype(nil, []*ir.Node{anonfield(typs[5]), anonfield(typs[1]), anonfield(typs[6])}, []*ir.Node{anonfield(typs[7])}) typs[8] = functype(nil, []ir.Node{anonfield(typs[5]), anonfield(typs[1]), anonfield(typs[6])}, []ir.Node{anonfield(typs[7])})
typs[9] = functype(nil, nil, nil) typs[9] = functype(nil, nil, nil)
typs[10] = types.Types[types.TINTER] typs[10] = types.Types[types.TINTER]
typs[11] = functype(nil, []*ir.Node{anonfield(typs[10])}, nil) typs[11] = functype(nil, []ir.Node{anonfield(typs[10])}, nil)
typs[12] = types.Types[types.TINT32] typs[12] = types.Types[types.TINT32]
typs[13] = types.NewPtr(typs[12]) typs[13] = types.NewPtr(typs[12])
typs[14] = functype(nil, []*ir.Node{anonfield(typs[13])}, []*ir.Node{anonfield(typs[10])}) typs[14] = functype(nil, []ir.Node{anonfield(typs[13])}, []ir.Node{anonfield(typs[10])})
typs[15] = types.Types[types.TINT] typs[15] = types.Types[types.TINT]
typs[16] = functype(nil, []*ir.Node{anonfield(typs[15]), anonfield(typs[15])}, nil) typs[16] = functype(nil, []ir.Node{anonfield(typs[15]), anonfield(typs[15])}, nil)
typs[17] = types.Types[types.TUINT] typs[17] = types.Types[types.TUINT]
typs[18] = functype(nil, []*ir.Node{anonfield(typs[17]), anonfield(typs[15])}, nil) typs[18] = functype(nil, []ir.Node{anonfield(typs[17]), anonfield(typs[15])}, nil)
typs[19] = functype(nil, []*ir.Node{anonfield(typs[6])}, nil) typs[19] = functype(nil, []ir.Node{anonfield(typs[6])}, nil)
typs[20] = types.Types[types.TFLOAT64] typs[20] = types.Types[types.TFLOAT64]
typs[21] = functype(nil, []*ir.Node{anonfield(typs[20])}, nil) typs[21] = functype(nil, []ir.Node{anonfield(typs[20])}, nil)
typs[22] = types.Types[types.TINT64] typs[22] = types.Types[types.TINT64]
typs[23] = functype(nil, []*ir.Node{anonfield(typs[22])}, nil) typs[23] = functype(nil, []ir.Node{anonfield(typs[22])}, nil)
typs[24] = types.Types[types.TUINT64] typs[24] = types.Types[types.TUINT64]
typs[25] = functype(nil, []*ir.Node{anonfield(typs[24])}, nil) typs[25] = functype(nil, []ir.Node{anonfield(typs[24])}, nil)
typs[26] = types.Types[types.TCOMPLEX128] typs[26] = types.Types[types.TCOMPLEX128]
typs[27] = functype(nil, []*ir.Node{anonfield(typs[26])}, nil) typs[27] = functype(nil, []ir.Node{anonfield(typs[26])}, nil)
typs[28] = types.Types[types.TSTRING] typs[28] = types.Types[types.TSTRING]
typs[29] = functype(nil, []*ir.Node{anonfield(typs[28])}, nil) typs[29] = functype(nil, []ir.Node{anonfield(typs[28])}, nil)
typs[30] = functype(nil, []*ir.Node{anonfield(typs[2])}, nil) typs[30] = functype(nil, []ir.Node{anonfield(typs[2])}, nil)
typs[31] = functype(nil, []*ir.Node{anonfield(typs[5])}, nil) typs[31] = functype(nil, []ir.Node{anonfield(typs[5])}, nil)
typs[32] = types.NewArray(typs[0], 32) typs[32] = types.NewArray(typs[0], 32)
typs[33] = types.NewPtr(typs[32]) typs[33] = types.NewPtr(typs[32])
typs[34] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[28])}) typs[34] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[28])})
typs[35] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[28])}) typs[35] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[28])})
typs[36] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[28])}) typs[36] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[28])})
typs[37] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[28])}) typs[37] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[28])})
typs[38] = types.NewSlice(typs[28]) typs[38] = types.NewSlice(typs[28])
typs[39] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[38])}, []*ir.Node{anonfield(typs[28])}) typs[39] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[38])}, []ir.Node{anonfield(typs[28])})
typs[40] = functype(nil, []*ir.Node{anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[15])}) typs[40] = functype(nil, []ir.Node{anonfield(typs[28]), anonfield(typs[28])}, []ir.Node{anonfield(typs[15])})
typs[41] = types.NewArray(typs[0], 4) typs[41] = types.NewArray(typs[0], 4)
typs[42] = types.NewPtr(typs[41]) typs[42] = types.NewPtr(typs[41])
typs[43] = functype(nil, []*ir.Node{anonfield(typs[42]), anonfield(typs[22])}, []*ir.Node{anonfield(typs[28])}) typs[43] = functype(nil, []ir.Node{anonfield(typs[42]), anonfield(typs[22])}, []ir.Node{anonfield(typs[28])})
typs[44] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[1]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[28])}) typs[44] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[1]), anonfield(typs[15])}, []ir.Node{anonfield(typs[28])})
typs[45] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[28])}) typs[45] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15])}, []ir.Node{anonfield(typs[28])})
typs[46] = types.Runetype typs[46] = types.Runetype
typs[47] = types.NewSlice(typs[46]) typs[47] = types.NewSlice(typs[46])
typs[48] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[47])}, []*ir.Node{anonfield(typs[28])}) typs[48] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[47])}, []ir.Node{anonfield(typs[28])})
typs[49] = types.NewSlice(typs[0]) typs[49] = types.NewSlice(typs[0])
typs[50] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[49])}) typs[50] = functype(nil, []ir.Node{anonfield(typs[33]), anonfield(typs[28])}, []ir.Node{anonfield(typs[49])})
typs[51] = types.NewArray(typs[46], 32) typs[51] = types.NewArray(typs[46], 32)
typs[52] = types.NewPtr(typs[51]) typs[52] = types.NewPtr(typs[51])
typs[53] = functype(nil, []*ir.Node{anonfield(typs[52]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[47])}) typs[53] = functype(nil, []ir.Node{anonfield(typs[52]), anonfield(typs[28])}, []ir.Node{anonfield(typs[47])})
typs[54] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[5])}, []*ir.Node{anonfield(typs[15])}) typs[54] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[5])}, []ir.Node{anonfield(typs[15])})
typs[55] = functype(nil, []*ir.Node{anonfield(typs[28]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[46]), anonfield(typs[15])}) typs[55] = functype(nil, []ir.Node{anonfield(typs[28]), anonfield(typs[15])}, []ir.Node{anonfield(typs[46]), anonfield(typs[15])})
typs[56] = functype(nil, []*ir.Node{anonfield(typs[28])}, []*ir.Node{anonfield(typs[15])}) typs[56] = functype(nil, []ir.Node{anonfield(typs[28])}, []ir.Node{anonfield(typs[15])})
typs[57] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[2])}, []*ir.Node{anonfield(typs[2])}) typs[57] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[2])}, []ir.Node{anonfield(typs[2])})
typs[58] = functype(nil, []*ir.Node{anonfield(typs[2])}, []*ir.Node{anonfield(typs[7])}) typs[58] = functype(nil, []ir.Node{anonfield(typs[2])}, []ir.Node{anonfield(typs[7])})
typs[59] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[2])}) typs[59] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[3])}, []ir.Node{anonfield(typs[2])})
typs[60] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[2])}, []*ir.Node{anonfield(typs[2]), anonfield(typs[6])}) typs[60] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[2])}, []ir.Node{anonfield(typs[2]), anonfield(typs[6])})
typs[61] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[1])}, nil) typs[61] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[1])}, nil)
typs[62] = functype(nil, []*ir.Node{anonfield(typs[1])}, nil) typs[62] = functype(nil, []ir.Node{anonfield(typs[1])}, nil)
typs[63] = types.NewPtr(typs[5]) typs[63] = types.NewPtr(typs[5])
typs[64] = functype(nil, []*ir.Node{anonfield(typs[63]), anonfield(typs[7]), anonfield(typs[7])}, []*ir.Node{anonfield(typs[6])}) typs[64] = functype(nil, []ir.Node{anonfield(typs[63]), anonfield(typs[7]), anonfield(typs[7])}, []ir.Node{anonfield(typs[6])})
typs[65] = types.Types[types.TUINT32] typs[65] = types.Types[types.TUINT32]
typs[66] = functype(nil, nil, []*ir.Node{anonfield(typs[65])}) typs[66] = functype(nil, nil, []ir.Node{anonfield(typs[65])})
typs[67] = types.NewMap(typs[2], typs[2]) typs[67] = types.NewMap(typs[2], typs[2])
typs[68] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[67])}) typs[68] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[3])}, []ir.Node{anonfield(typs[67])})
typs[69] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[67])}) typs[69] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[3])}, []ir.Node{anonfield(typs[67])})
typs[70] = functype(nil, nil, []*ir.Node{anonfield(typs[67])}) typs[70] = functype(nil, nil, []ir.Node{anonfield(typs[67])})
typs[71] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[3])}) typs[71] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []ir.Node{anonfield(typs[3])})
typs[72] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []*ir.Node{anonfield(typs[3])}) typs[72] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []ir.Node{anonfield(typs[3])})
typs[73] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []*ir.Node{anonfield(typs[3])}) typs[73] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []ir.Node{anonfield(typs[3])})
typs[74] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[3]), anonfield(typs[6])}) typs[74] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []ir.Node{anonfield(typs[3]), anonfield(typs[6])})
typs[75] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []*ir.Node{anonfield(typs[3]), anonfield(typs[6])}) typs[75] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []ir.Node{anonfield(typs[3]), anonfield(typs[6])})
typs[76] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []*ir.Node{anonfield(typs[3]), anonfield(typs[6])}) typs[76] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []ir.Node{anonfield(typs[3]), anonfield(typs[6])})
typs[77] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, nil) typs[77] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, nil)
typs[78] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, nil) typs[78] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, nil)
typs[79] = functype(nil, []*ir.Node{anonfield(typs[3])}, nil) typs[79] = functype(nil, []ir.Node{anonfield(typs[3])}, nil)
typs[80] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67])}, nil) typs[80] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[67])}, nil)
typs[81] = types.NewChan(typs[2], types.Cboth) typs[81] = types.NewChan(typs[2], types.Cboth)
typs[82] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[22])}, []*ir.Node{anonfield(typs[81])}) typs[82] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[22])}, []ir.Node{anonfield(typs[81])})
typs[83] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[81])}) typs[83] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15])}, []ir.Node{anonfield(typs[81])})
typs[84] = types.NewChan(typs[2], types.Crecv) typs[84] = types.NewChan(typs[2], types.Crecv)
typs[85] = functype(nil, []*ir.Node{anonfield(typs[84]), anonfield(typs[3])}, nil) typs[85] = functype(nil, []ir.Node{anonfield(typs[84]), anonfield(typs[3])}, nil)
typs[86] = functype(nil, []*ir.Node{anonfield(typs[84]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[6])}) typs[86] = functype(nil, []ir.Node{anonfield(typs[84]), anonfield(typs[3])}, []ir.Node{anonfield(typs[6])})
typs[87] = types.NewChan(typs[2], types.Csend) typs[87] = types.NewChan(typs[2], types.Csend)
typs[88] = functype(nil, []*ir.Node{anonfield(typs[87]), anonfield(typs[3])}, nil) typs[88] = functype(nil, []ir.Node{anonfield(typs[87]), anonfield(typs[3])}, nil)
typs[89] = types.NewArray(typs[0], 3) typs[89] = types.NewArray(typs[0], 3)
typs[90] = tostruct([]*ir.Node{namedfield("enabled", typs[6]), namedfield("pad", typs[89]), namedfield("needed", typs[6]), namedfield("cgo", typs[6]), namedfield("alignme", typs[24])}) typs[90] = tostruct([]ir.Node{namedfield("enabled", typs[6]), namedfield("pad", typs[89]), namedfield("needed", typs[6]), namedfield("cgo", typs[6]), namedfield("alignme", typs[24])})
typs[91] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[3])}, nil) typs[91] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[3])}, nil)
typs[92] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[3])}, nil) typs[92] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[3])}, nil)
typs[93] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[15])}) typs[93] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15])}, []ir.Node{anonfield(typs[15])})
typs[94] = functype(nil, []*ir.Node{anonfield(typs[87]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[6])}) typs[94] = functype(nil, []ir.Node{anonfield(typs[87]), anonfield(typs[3])}, []ir.Node{anonfield(typs[6])})
typs[95] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[84])}, []*ir.Node{anonfield(typs[6])}) typs[95] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[84])}, []ir.Node{anonfield(typs[6])})
typs[96] = types.NewPtr(typs[6]) typs[96] = types.NewPtr(typs[6])
typs[97] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[96]), anonfield(typs[84])}, []*ir.Node{anonfield(typs[6])}) typs[97] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[96]), anonfield(typs[84])}, []ir.Node{anonfield(typs[6])})
typs[98] = functype(nil, []*ir.Node{anonfield(typs[63])}, nil) typs[98] = functype(nil, []ir.Node{anonfield(typs[63])}, nil)
typs[99] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[63]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[6])}, []*ir.Node{anonfield(typs[15]), anonfield(typs[6])}) typs[99] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[63]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[6])}, []ir.Node{anonfield(typs[15]), anonfield(typs[6])})
typs[100] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[7])}) typs[100] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15])}, []ir.Node{anonfield(typs[7])})
typs[101] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[22])}, []*ir.Node{anonfield(typs[7])}) typs[101] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[22])}, []ir.Node{anonfield(typs[7])})
typs[102] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[7])}, []*ir.Node{anonfield(typs[7])}) typs[102] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[7])}, []ir.Node{anonfield(typs[7])})
typs[103] = types.NewSlice(typs[2]) typs[103] = types.NewSlice(typs[2])
typs[104] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[103]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[103])}) typs[104] = functype(nil, []ir.Node{anonfield(typs[1]), anonfield(typs[103]), anonfield(typs[15])}, []ir.Node{anonfield(typs[103])})
typs[105] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, nil) typs[105] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, nil)
typs[106] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[5])}, nil) typs[106] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[5])}, nil)
typs[107] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, []*ir.Node{anonfield(typs[6])}) typs[107] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, []ir.Node{anonfield(typs[6])})
typs[108] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[6])}) typs[108] = functype(nil, []ir.Node{anonfield(typs[3]), anonfield(typs[3])}, []ir.Node{anonfield(typs[6])})
typs[109] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[7])}, []*ir.Node{anonfield(typs[6])}) typs[109] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[7])}, []ir.Node{anonfield(typs[6])})
typs[110] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[5]), anonfield(typs[5])}, []*ir.Node{anonfield(typs[5])}) typs[110] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[5]), anonfield(typs[5])}, []ir.Node{anonfield(typs[5])})
typs[111] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[5])}, []*ir.Node{anonfield(typs[5])}) typs[111] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[5])}, []ir.Node{anonfield(typs[5])})
typs[112] = functype(nil, []*ir.Node{anonfield(typs[22]), anonfield(typs[22])}, []*ir.Node{anonfield(typs[22])}) typs[112] = functype(nil, []ir.Node{anonfield(typs[22]), anonfield(typs[22])}, []ir.Node{anonfield(typs[22])})
typs[113] = functype(nil, []*ir.Node{anonfield(typs[24]), anonfield(typs[24])}, []*ir.Node{anonfield(typs[24])}) typs[113] = functype(nil, []ir.Node{anonfield(typs[24]), anonfield(typs[24])}, []ir.Node{anonfield(typs[24])})
typs[114] = functype(nil, []*ir.Node{anonfield(typs[20])}, []*ir.Node{anonfield(typs[22])}) typs[114] = functype(nil, []ir.Node{anonfield(typs[20])}, []ir.Node{anonfield(typs[22])})
typs[115] = functype(nil, []*ir.Node{anonfield(typs[20])}, []*ir.Node{anonfield(typs[24])}) typs[115] = functype(nil, []ir.Node{anonfield(typs[20])}, []ir.Node{anonfield(typs[24])})
typs[116] = functype(nil, []*ir.Node{anonfield(typs[20])}, []*ir.Node{anonfield(typs[65])}) typs[116] = functype(nil, []ir.Node{anonfield(typs[20])}, []ir.Node{anonfield(typs[65])})
typs[117] = functype(nil, []*ir.Node{anonfield(typs[22])}, []*ir.Node{anonfield(typs[20])}) typs[117] = functype(nil, []ir.Node{anonfield(typs[22])}, []ir.Node{anonfield(typs[20])})
typs[118] = functype(nil, []*ir.Node{anonfield(typs[24])}, []*ir.Node{anonfield(typs[20])}) typs[118] = functype(nil, []ir.Node{anonfield(typs[24])}, []ir.Node{anonfield(typs[20])})
typs[119] = functype(nil, []*ir.Node{anonfield(typs[65])}, []*ir.Node{anonfield(typs[20])}) typs[119] = functype(nil, []ir.Node{anonfield(typs[65])}, []ir.Node{anonfield(typs[20])})
typs[120] = functype(nil, []*ir.Node{anonfield(typs[26]), anonfield(typs[26])}, []*ir.Node{anonfield(typs[26])}) typs[120] = functype(nil, []ir.Node{anonfield(typs[26]), anonfield(typs[26])}, []ir.Node{anonfield(typs[26])})
typs[121] = functype(nil, []*ir.Node{anonfield(typs[5]), anonfield(typs[5])}, nil) typs[121] = functype(nil, []ir.Node{anonfield(typs[5]), anonfield(typs[5])}, nil)
typs[122] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[1]), anonfield(typs[5])}, nil) typs[122] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[1]), anonfield(typs[5])}, nil)
typs[123] = types.NewSlice(typs[7]) typs[123] = types.NewSlice(typs[7])
typs[124] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[123])}, nil) typs[124] = functype(nil, []ir.Node{anonfield(typs[7]), anonfield(typs[123])}, nil)
typs[125] = types.Types[types.TUINT8] typs[125] = types.Types[types.TUINT8]
typs[126] = functype(nil, []*ir.Node{anonfield(typs[125]), anonfield(typs[125])}, nil) typs[126] = functype(nil, []ir.Node{anonfield(typs[125]), anonfield(typs[125])}, nil)
typs[127] = types.Types[types.TUINT16] typs[127] = types.Types[types.TUINT16]
typs[128] = functype(nil, []*ir.Node{anonfield(typs[127]), anonfield(typs[127])}, nil) typs[128] = functype(nil, []ir.Node{anonfield(typs[127]), anonfield(typs[127])}, nil)
typs[129] = functype(nil, []*ir.Node{anonfield(typs[65]), anonfield(typs[65])}, nil) typs[129] = functype(nil, []ir.Node{anonfield(typs[65]), anonfield(typs[65])}, nil)
typs[130] = functype(nil, []*ir.Node{anonfield(typs[24]), anonfield(typs[24])}, nil) typs[130] = functype(nil, []ir.Node{anonfield(typs[24]), anonfield(typs[24])}, nil)
return typs[:] return typs[:]
} }

View file

@ -13,7 +13,7 @@ import (
"fmt" "fmt"
) )
func (p *noder) funcLit(expr *syntax.FuncLit) *ir.Node { func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
xtype := p.typeExpr(expr.Type) xtype := p.typeExpr(expr.Type)
ntype := p.typeExpr(expr.Type) ntype := p.typeExpr(expr.Type)
@ -78,7 +78,7 @@ func (p *noder) funcLit(expr *syntax.FuncLit) *ir.Node {
// function associated with the closure. // function associated with the closure.
// TODO: This creation of the named function should probably really be done in a // TODO: This creation of the named function should probably really be done in a
// separate pass from type-checking. // separate pass from type-checking.
func typecheckclosure(clo *ir.Node, top int) { func typecheckclosure(clo ir.Node, top int) {
fn := clo.Func() fn := clo.Func()
dcl := fn.Decl dcl := fn.Decl
// Set current associated iota value, so iota can be used inside // Set current associated iota value, so iota can be used inside
@ -140,7 +140,7 @@ var globClosgen int
// closurename generates a new unique name for a closure within // closurename generates a new unique name for a closure within
// outerfunc. // outerfunc.
func closurename(outerfunc *ir.Node) *types.Sym { func closurename(outerfunc ir.Node) *types.Sym {
outer := "glob." outer := "glob."
prefix := "func" prefix := "func"
gen := &globClosgen gen := &globClosgen
@ -172,7 +172,7 @@ var capturevarscomplete bool
// by value or by reference. // by value or by reference.
// We use value capturing for values <= 128 bytes that are never reassigned // We use value capturing for values <= 128 bytes that are never reassigned
// after capturing (effectively constant). // after capturing (effectively constant).
func capturevars(dcl *ir.Node) { func capturevars(dcl ir.Node) {
lno := base.Pos lno := base.Pos
base.Pos = dcl.Pos() base.Pos = dcl.Pos()
fn := dcl.Func() fn := dcl.Func()
@ -227,7 +227,7 @@ func capturevars(dcl *ir.Node) {
// transformclosure is called in a separate phase after escape analysis. // transformclosure is called in a separate phase after escape analysis.
// It transform closure bodies to properly reference captured variables. // It transform closure bodies to properly reference captured variables.
func transformclosure(dcl *ir.Node) { func transformclosure(dcl ir.Node) {
lno := base.Pos lno := base.Pos
base.Pos = dcl.Pos() base.Pos = dcl.Pos()
fn := dcl.Func() fn := dcl.Func()
@ -253,7 +253,7 @@ func transformclosure(dcl *ir.Node) {
// We are going to insert captured variables before input args. // We are going to insert captured variables before input args.
var params []*types.Field var params []*types.Field
var decls []*ir.Node var decls []ir.Node
for _, v := range fn.ClosureVars.Slice() { for _, v := range fn.ClosureVars.Slice() {
if !v.Name().Byval() { if !v.Name().Byval() {
// If v of type T is captured by reference, // If v of type T is captured by reference,
@ -284,7 +284,7 @@ func transformclosure(dcl *ir.Node) {
dcl.SetType(f.Type()) // update type of ODCLFUNC dcl.SetType(f.Type()) // update type of ODCLFUNC
} else { } else {
// The closure is not called, so it is going to stay as closure. // The closure is not called, so it is going to stay as closure.
var body []*ir.Node var body []ir.Node
offset := int64(Widthptr) offset := int64(Widthptr)
for _, v := range fn.ClosureVars.Slice() { for _, v := range fn.ClosureVars.Slice() {
// cv refers to the field inside of closure OSTRUCTLIT. // cv refers to the field inside of closure OSTRUCTLIT.
@ -332,13 +332,13 @@ func transformclosure(dcl *ir.Node) {
// hasemptycvars reports whether closure clo has an // hasemptycvars reports whether closure clo has an
// empty list of captured vars. // empty list of captured vars.
func hasemptycvars(clo *ir.Node) bool { func hasemptycvars(clo ir.Node) bool {
return clo.Func().ClosureVars.Len() == 0 return clo.Func().ClosureVars.Len() == 0
} }
// closuredebugruntimecheck applies boilerplate checks for debug flags // closuredebugruntimecheck applies boilerplate checks for debug flags
// and compiling runtime // and compiling runtime
func closuredebugruntimecheck(clo *ir.Node) { func closuredebugruntimecheck(clo ir.Node) {
if base.Debug.Closure > 0 { if base.Debug.Closure > 0 {
if clo.Esc() == EscHeap { if clo.Esc() == EscHeap {
base.WarnfAt(clo.Pos(), "heap closure, captured vars = %v", clo.Func().ClosureVars) base.WarnfAt(clo.Pos(), "heap closure, captured vars = %v", clo.Func().ClosureVars)
@ -354,7 +354,7 @@ func closuredebugruntimecheck(clo *ir.Node) {
// closureType returns the struct type used to hold all the information // closureType returns the struct type used to hold all the information
// needed in the closure for clo (clo must be a OCLOSURE node). // needed in the closure for clo (clo must be a OCLOSURE node).
// The address of a variable of the returned type can be cast to a func. // The address of a variable of the returned type can be cast to a func.
func closureType(clo *ir.Node) *types.Type { func closureType(clo ir.Node) *types.Type {
// Create closure in the form of a composite literal. // Create closure in the form of a composite literal.
// supposing the closure captures an int i and a string s // supposing the closure captures an int i and a string s
// and has one float64 argument and no results, // and has one float64 argument and no results,
@ -368,7 +368,7 @@ func closureType(clo *ir.Node) *types.Type {
// The information appears in the binary in the form of type descriptors; // The information appears in the binary in the form of type descriptors;
// the struct is unnamed so that closures in multiple packages with the // the struct is unnamed so that closures in multiple packages with the
// same struct type can share the descriptor. // same struct type can share the descriptor.
fields := []*ir.Node{ fields := []ir.Node{
namedfield(".F", types.Types[types.TUINTPTR]), namedfield(".F", types.Types[types.TUINTPTR]),
} }
for _, v := range clo.Func().ClosureVars.Slice() { for _, v := range clo.Func().ClosureVars.Slice() {
@ -383,7 +383,7 @@ func closureType(clo *ir.Node) *types.Type {
return typ return typ
} }
func walkclosure(clo *ir.Node, init *ir.Nodes) *ir.Node { func walkclosure(clo ir.Node, init *ir.Nodes) ir.Node {
fn := clo.Func() fn := clo.Func()
// If no closure vars, don't bother wrapping. // If no closure vars, don't bother wrapping.
@ -399,7 +399,7 @@ func walkclosure(clo *ir.Node, init *ir.Nodes) *ir.Node {
clos := ir.Nod(ir.OCOMPLIT, nil, typenod(typ)) clos := ir.Nod(ir.OCOMPLIT, nil, typenod(typ))
clos.SetEsc(clo.Esc()) clos.SetEsc(clo.Esc())
clos.PtrList().Set(append([]*ir.Node{ir.Nod(ir.OCFUNC, fn.Nname, nil)}, fn.ClosureEnter.Slice()...)) clos.PtrList().Set(append([]ir.Node{ir.Nod(ir.OCFUNC, fn.Nname, nil)}, fn.ClosureEnter.Slice()...))
clos = ir.Nod(ir.OADDR, clos, nil) clos = ir.Nod(ir.OADDR, clos, nil)
clos.SetEsc(clo.Esc()) clos.SetEsc(clo.Esc())
@ -419,7 +419,7 @@ func walkclosure(clo *ir.Node, init *ir.Nodes) *ir.Node {
return walkexpr(clos, init) return walkexpr(clos, init)
} }
func typecheckpartialcall(dot *ir.Node, sym *types.Sym) { func typecheckpartialcall(dot ir.Node, sym *types.Sym) {
switch dot.Op() { switch dot.Op() {
case ir.ODOTINTER, ir.ODOTMETH: case ir.ODOTINTER, ir.ODOTMETH:
break break
@ -440,7 +440,7 @@ func typecheckpartialcall(dot *ir.Node, sym *types.Sym) {
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed // makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls. // for partial calls.
func makepartialcall(dot *ir.Node, t0 *types.Type, meth *types.Sym) *ir.Node { func makepartialcall(dot ir.Node, t0 *types.Type, meth *types.Sym) ir.Node {
rcvrtype := dot.Left().Type() rcvrtype := dot.Left().Type()
sym := methodSymSuffix(rcvrtype, meth, "-fm") sym := methodSymSuffix(rcvrtype, meth, "-fm")
@ -484,7 +484,7 @@ func makepartialcall(dot *ir.Node, t0 *types.Type, meth *types.Sym) *ir.Node {
ptr := NewName(lookup(".this")) ptr := NewName(lookup(".this"))
declare(ptr, ir.PAUTO) declare(ptr, ir.PAUTO)
ptr.Name().SetUsed(true) ptr.Name().SetUsed(true)
var body []*ir.Node var body []ir.Node
if rcvrtype.IsPtr() || rcvrtype.IsInterface() { if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
ptr.SetType(rcvrtype) ptr.SetType(rcvrtype)
body = append(body, ir.Nod(ir.OAS, ptr, cv)) body = append(body, ir.Nod(ir.OAS, ptr, cv))
@ -522,8 +522,8 @@ func makepartialcall(dot *ir.Node, t0 *types.Type, meth *types.Sym) *ir.Node {
// partialCallType returns the struct type used to hold all the information // partialCallType returns the struct type used to hold all the information
// needed in the closure for n (n must be a OCALLPART node). // needed in the closure for n (n must be a OCALLPART node).
// The address of a variable of the returned type can be cast to a func. // The address of a variable of the returned type can be cast to a func.
func partialCallType(n *ir.Node) *types.Type { func partialCallType(n ir.Node) *types.Type {
t := tostruct([]*ir.Node{ t := tostruct([]ir.Node{
namedfield("F", types.Types[types.TUINTPTR]), namedfield("F", types.Types[types.TUINTPTR]),
namedfield("R", n.Left().Type()), namedfield("R", n.Left().Type()),
}) })
@ -531,7 +531,7 @@ func partialCallType(n *ir.Node) *types.Type {
return t return t
} }
func walkpartialcall(n *ir.Node, init *ir.Nodes) *ir.Node { func walkpartialcall(n ir.Node, init *ir.Nodes) ir.Node {
// Create closure in the form of a composite literal. // Create closure in the form of a composite literal.
// For x.M with receiver (x) type T, the generated code looks like: // For x.M with receiver (x) type T, the generated code looks like:
// //
@ -579,7 +579,7 @@ func walkpartialcall(n *ir.Node, init *ir.Nodes) *ir.Node {
// callpartMethod returns the *types.Field representing the method // callpartMethod returns the *types.Field representing the method
// referenced by method value n. // referenced by method value n.
func callpartMethod(n *ir.Node) *types.Field { func callpartMethod(n ir.Node) *types.Field {
if n.Op() != ir.OCALLPART { if n.Op() != ir.OCALLPART {
base.Fatalf("expected OCALLPART, got %v", n) base.Fatalf("expected OCALLPART, got %v", n)
} }

View file

@ -84,8 +84,8 @@ func trunccmplxlit(v constant.Value, t *types.Type) constant.Value {
} }
// TODO(mdempsky): Replace these with better APIs. // TODO(mdempsky): Replace these with better APIs.
func convlit(n *ir.Node, t *types.Type) *ir.Node { return convlit1(n, t, false, nil) } func convlit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
func defaultlit(n *ir.Node, t *types.Type) *ir.Node { return convlit1(n, t, false, nil) } func defaultlit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
// convlit1 converts an untyped expression n to type t. If n already // convlit1 converts an untyped expression n to type t. If n already
// has a type, convlit1 has no effect. // has a type, convlit1 has no effect.
@ -98,7 +98,7 @@ func defaultlit(n *ir.Node, t *types.Type) *ir.Node { return convlit1(n, t, fals
// //
// If there's an error converting n to t, context is used in the error // If there's an error converting n to t, context is used in the error
// message. // message.
func convlit1(n *ir.Node, t *types.Type, explicit bool, context func() string) *ir.Node { func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir.Node {
if explicit && t == nil { if explicit && t == nil {
base.Fatalf("explicit conversion missing type") base.Fatalf("explicit conversion missing type")
} }
@ -438,7 +438,7 @@ var tokenForOp = [...]token.Token{
// If n is not a constant, evalConst returns n. // If n is not a constant, evalConst returns n.
// Otherwise, evalConst returns a new OLITERAL with the same value as n, // Otherwise, evalConst returns a new OLITERAL with the same value as n,
// and with .Orig pointing back to n. // and with .Orig pointing back to n.
func evalConst(n *ir.Node) *ir.Node { func evalConst(n ir.Node) ir.Node {
nl, nr := n.Left(), n.Right() nl, nr := n.Left(), n.Right()
// Pick off just the opcodes that can be constant evaluated. // Pick off just the opcodes that can be constant evaluated.
@ -525,7 +525,7 @@ func evalConst(n *ir.Node) *ir.Node {
} }
return origConst(n, constant.MakeString(strings.Join(strs, ""))) return origConst(n, constant.MakeString(strings.Join(strs, "")))
} }
newList := make([]*ir.Node, 0, need) newList := make([]ir.Node, 0, need)
for i := 0; i < len(s); i++ { for i := 0; i < len(s); i++ {
if ir.IsConst(s[i], constant.String) && i+1 < len(s) && ir.IsConst(s[i+1], constant.String) { if ir.IsConst(s[i], constant.String) && i+1 < len(s) && ir.IsConst(s[i+1], constant.String) {
// merge from i up to but not including i2 // merge from i up to but not including i2
@ -619,7 +619,7 @@ var overflowNames = [...]string{
} }
// origConst returns an OLITERAL with orig n and value v. // origConst returns an OLITERAL with orig n and value v.
func origConst(n *ir.Node, v constant.Value) *ir.Node { func origConst(n ir.Node, v constant.Value) ir.Node {
lno := setlineno(n) lno := setlineno(n)
v = convertVal(v, n.Type(), false) v = convertVal(v, n.Type(), false)
base.Pos = lno base.Pos = lno
@ -648,11 +648,11 @@ func origConst(n *ir.Node, v constant.Value) *ir.Node {
return n return n
} }
func origBoolConst(n *ir.Node, v bool) *ir.Node { func origBoolConst(n ir.Node, v bool) ir.Node {
return origConst(n, constant.MakeBool(v)) return origConst(n, constant.MakeBool(v))
} }
func origIntConst(n *ir.Node, v int64) *ir.Node { func origIntConst(n ir.Node, v int64) ir.Node {
return origConst(n, constant.MakeInt64(v)) return origConst(n, constant.MakeInt64(v))
} }
@ -662,7 +662,7 @@ func origIntConst(n *ir.Node, v int64) *ir.Node {
// force means must assign concrete (non-ideal) type. // force means must assign concrete (non-ideal) type.
// The results of defaultlit2 MUST be assigned back to l and r, e.g. // The results of defaultlit2 MUST be assigned back to l and r, e.g.
// n.Left, n.Right = defaultlit2(n.Left, n.Right, force) // n.Left, n.Right = defaultlit2(n.Left, n.Right, force)
func defaultlit2(l *ir.Node, r *ir.Node, force bool) (*ir.Node, *ir.Node) { func defaultlit2(l ir.Node, r ir.Node, force bool) (ir.Node, ir.Node) {
if l.Type() == nil || r.Type() == nil { if l.Type() == nil || r.Type() == nil {
return l, r return l, r
} }
@ -747,7 +747,7 @@ func defaultType(t *types.Type) *types.Type {
return nil return nil
} }
func smallintconst(n *ir.Node) bool { func smallintconst(n ir.Node) bool {
if n.Op() == ir.OLITERAL { if n.Op() == ir.OLITERAL {
v, ok := constant.Int64Val(n.Val()) v, ok := constant.Int64Val(n.Val())
return ok && int64(int32(v)) == v return ok && int64(int32(v)) == v
@ -760,7 +760,7 @@ func smallintconst(n *ir.Node) bool {
// If n is not a constant expression, not representable as an // If n is not a constant expression, not representable as an
// integer, or negative, it returns -1. If n is too large, it // integer, or negative, it returns -1. If n is too large, it
// returns -2. // returns -2.
func indexconst(n *ir.Node) int64 { func indexconst(n ir.Node) int64 {
if n.Op() != ir.OLITERAL { if n.Op() != ir.OLITERAL {
return -1 return -1
} }
@ -783,11 +783,11 @@ func indexconst(n *ir.Node) int64 {
// //
// Expressions derived from nil, like string([]byte(nil)), while they // Expressions derived from nil, like string([]byte(nil)), while they
// may be known at compile time, are not Go language constants. // may be known at compile time, are not Go language constants.
func isGoConst(n *ir.Node) bool { func isGoConst(n ir.Node) bool {
return n.Op() == ir.OLITERAL return n.Op() == ir.OLITERAL
} }
func hascallchan(n *ir.Node) bool { func hascallchan(n ir.Node) bool {
if n == nil { if n == nil {
return false return false
} }
@ -851,7 +851,7 @@ type constSetKey struct {
// where are used in the error message. // where are used in the error message.
// //
// n must not be an untyped constant. // n must not be an untyped constant.
func (s *constSet) add(pos src.XPos, n *ir.Node, what, where string) { func (s *constSet) add(pos src.XPos, n ir.Node, what, where string) {
if n.Op() == ir.OCONVIFACE && n.Implicit() { if n.Op() == ir.OCONVIFACE && n.Implicit() {
n = n.Left() n = n.Left()
} }
@ -908,7 +908,7 @@ func (s *constSet) add(pos src.XPos, n *ir.Node, what, where string) {
// the latter is non-obvious. // the latter is non-obvious.
// //
// TODO(mdempsky): This could probably be a fmt.go flag. // TODO(mdempsky): This could probably be a fmt.go flag.
func nodeAndVal(n *ir.Node) string { func nodeAndVal(n ir.Node) string {
show := n.String() show := n.String()
val := ir.ConstValue(n) val := ir.ConstValue(n)
if s := fmt.Sprintf("%#v", val); show != s { if s := fmt.Sprintf("%#v", val); show != s {

View file

@ -18,7 +18,7 @@ import (
// Declaration stack & operations // Declaration stack & operations
var externdcl []*ir.Node var externdcl []ir.Node
func testdclstack() { func testdclstack() {
if !types.IsDclstackValid() { if !types.IsDclstackValid() {
@ -59,7 +59,7 @@ var declare_typegen int
// declare records that Node n declares symbol n.Sym in the specified // declare records that Node n declares symbol n.Sym in the specified
// declaration context. // declaration context.
func declare(n *ir.Node, ctxt ir.Class) { func declare(n ir.Node, ctxt ir.Class) {
if ir.IsBlank(n) { if ir.IsBlank(n) {
return return
} }
@ -128,7 +128,7 @@ func declare(n *ir.Node, ctxt ir.Class) {
autoexport(n, ctxt) autoexport(n, ctxt)
} }
func addvar(n *ir.Node, t *types.Type, ctxt ir.Class) { func addvar(n ir.Node, t *types.Type, ctxt ir.Class) {
if n == nil || n.Sym() == nil || (n.Op() != ir.ONAME && n.Op() != ir.ONONAME) || t == nil { if n == nil || n.Sym() == nil || (n.Op() != ir.ONAME && n.Op() != ir.ONONAME) || t == nil {
base.Fatalf("addvar: n=%v t=%v nil", n, t) base.Fatalf("addvar: n=%v t=%v nil", n, t)
} }
@ -140,8 +140,8 @@ func addvar(n *ir.Node, t *types.Type, ctxt ir.Class) {
// declare variables from grammar // declare variables from grammar
// new_name_list (type | [type] = expr_list) // new_name_list (type | [type] = expr_list)
func variter(vl []*ir.Node, t *ir.Node, el []*ir.Node) []*ir.Node { func variter(vl []ir.Node, t ir.Node, el []ir.Node) []ir.Node {
var init []*ir.Node var init []ir.Node
doexpr := len(el) > 0 doexpr := len(el) > 0
if len(el) == 1 && len(vl) > 1 { if len(el) == 1 && len(vl) > 1 {
@ -164,7 +164,7 @@ func variter(vl []*ir.Node, t *ir.Node, el []*ir.Node) []*ir.Node {
nel := len(el) nel := len(el)
for _, v := range vl { for _, v := range vl {
var e *ir.Node var e ir.Node
if doexpr { if doexpr {
if len(el) == 0 { if len(el) == 0 {
base.Errorf("assignment mismatch: %d variables but %d values", len(vl), nel) base.Errorf("assignment mismatch: %d variables but %d values", len(vl), nel)
@ -197,7 +197,7 @@ func variter(vl []*ir.Node, t *ir.Node, el []*ir.Node) []*ir.Node {
} }
// newnoname returns a new ONONAME Node associated with symbol s. // newnoname returns a new ONONAME Node associated with symbol s.
func newnoname(s *types.Sym) *ir.Node { func newnoname(s *types.Sym) ir.Node {
if s == nil { if s == nil {
base.Fatalf("newnoname nil") base.Fatalf("newnoname nil")
} }
@ -208,7 +208,7 @@ func newnoname(s *types.Sym) *ir.Node {
} }
// newfuncnamel generates a new name node for a function or method. // newfuncnamel generates a new name node for a function or method.
func newfuncnamel(pos src.XPos, s *types.Sym, fn *ir.Func) *ir.Node { func newfuncnamel(pos src.XPos, s *types.Sym, fn *ir.Func) ir.Node {
if fn.Nname != nil { if fn.Nname != nil {
base.Fatalf("newfuncnamel - already have name") base.Fatalf("newfuncnamel - already have name")
} }
@ -220,17 +220,17 @@ func newfuncnamel(pos src.XPos, s *types.Sym, fn *ir.Func) *ir.Node {
// this generates a new name node for a name // this generates a new name node for a name
// being declared. // being declared.
func dclname(s *types.Sym) *ir.Node { func dclname(s *types.Sym) ir.Node {
n := NewName(s) n := NewName(s)
n.SetOp(ir.ONONAME) // caller will correct it n.SetOp(ir.ONONAME) // caller will correct it
return n return n
} }
func typenod(t *types.Type) *ir.Node { func typenod(t *types.Type) ir.Node {
return typenodl(src.NoXPos, t) return typenodl(src.NoXPos, t)
} }
func typenodl(pos src.XPos, t *types.Type) *ir.Node { func typenodl(pos src.XPos, t *types.Type) ir.Node {
// if we copied another type with *t = *u // if we copied another type with *t = *u
// then t->nod might be out of date, so // then t->nod might be out of date, so
// check t->nod->type too // check t->nod->type too
@ -243,15 +243,15 @@ func typenodl(pos src.XPos, t *types.Type) *ir.Node {
return ir.AsNode(t.Nod) return ir.AsNode(t.Nod)
} }
func anonfield(typ *types.Type) *ir.Node { func anonfield(typ *types.Type) ir.Node {
return symfield(nil, typ) return symfield(nil, typ)
} }
func namedfield(s string, typ *types.Type) *ir.Node { func namedfield(s string, typ *types.Type) ir.Node {
return symfield(lookup(s), typ) return symfield(lookup(s), typ)
} }
func symfield(s *types.Sym, typ *types.Type) *ir.Node { func symfield(s *types.Sym, typ *types.Type) ir.Node {
n := nodSym(ir.ODCLFIELD, nil, s) n := nodSym(ir.ODCLFIELD, nil, s)
n.SetType(typ) n.SetType(typ)
return n return n
@ -261,7 +261,7 @@ func symfield(s *types.Sym, typ *types.Type) *ir.Node {
// If no such Node currently exists, an ONONAME Node is returned instead. // If no such Node currently exists, an ONONAME Node is returned instead.
// Automatically creates a new closure variable if the referenced symbol was // Automatically creates a new closure variable if the referenced symbol was
// declared in a different (containing) function. // declared in a different (containing) function.
func oldname(s *types.Sym) *ir.Node { func oldname(s *types.Sym) ir.Node {
n := ir.AsNode(s.Def) n := ir.AsNode(s.Def)
if n == nil { if n == nil {
// Maybe a top-level declaration will come along later to // Maybe a top-level declaration will come along later to
@ -302,7 +302,7 @@ func oldname(s *types.Sym) *ir.Node {
} }
// importName is like oldname, but it reports an error if sym is from another package and not exported. // importName is like oldname, but it reports an error if sym is from another package and not exported.
func importName(sym *types.Sym) *ir.Node { func importName(sym *types.Sym) ir.Node {
n := oldname(sym) n := oldname(sym)
if !types.IsExported(sym.Name) && sym.Pkg != ir.LocalPkg { if !types.IsExported(sym.Name) && sym.Pkg != ir.LocalPkg {
n.SetDiag(true) n.SetDiag(true)
@ -312,7 +312,7 @@ func importName(sym *types.Sym) *ir.Node {
} }
// := declarations // := declarations
func colasname(n *ir.Node) bool { func colasname(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.ONAME, case ir.ONAME,
ir.ONONAME, ir.ONONAME,
@ -325,7 +325,7 @@ func colasname(n *ir.Node) bool {
return false return false
} }
func colasdefn(left []*ir.Node, defn *ir.Node) { func colasdefn(left []ir.Node, defn ir.Node) {
for _, n := range left { for _, n := range left {
if n.Sym() != nil { if n.Sym() != nil {
n.Sym().SetUniq(true) n.Sym().SetUniq(true)
@ -370,7 +370,7 @@ func colasdefn(left []*ir.Node, defn *ir.Node) {
// declare the arguments in an // declare the arguments in an
// interface field declaration. // interface field declaration.
func ifacedcl(n *ir.Node) { func ifacedcl(n ir.Node) {
if n.Op() != ir.ODCLFIELD || n.Left() == nil { if n.Op() != ir.ODCLFIELD || n.Left() == nil {
base.Fatalf("ifacedcl") base.Fatalf("ifacedcl")
} }
@ -384,7 +384,7 @@ func ifacedcl(n *ir.Node) {
// and declare the arguments. // and declare the arguments.
// called in extern-declaration context // called in extern-declaration context
// returns in auto-declaration context. // returns in auto-declaration context.
func funchdr(n *ir.Node) { func funchdr(n ir.Node) {
// change the declaration context from extern to auto // change the declaration context from extern to auto
funcStack = append(funcStack, funcStackEnt{Curfn, dclcontext}) funcStack = append(funcStack, funcStackEnt{Curfn, dclcontext})
Curfn = n Curfn = n
@ -399,7 +399,7 @@ func funchdr(n *ir.Node) {
} }
} }
func funcargs(nt *ir.Node) { func funcargs(nt ir.Node) {
if nt.Op() != ir.OTFUNC { if nt.Op() != ir.OTFUNC {
base.Fatalf("funcargs %v", nt.Op()) base.Fatalf("funcargs %v", nt.Op())
} }
@ -449,7 +449,7 @@ func funcargs(nt *ir.Node) {
vargen = oldvargen vargen = oldvargen
} }
func funcarg(n *ir.Node, ctxt ir.Class) { func funcarg(n ir.Node, ctxt ir.Class) {
if n.Op() != ir.ODCLFIELD { if n.Op() != ir.ODCLFIELD {
base.Fatalf("funcarg %v", n.Op()) base.Fatalf("funcarg %v", n.Op())
} }
@ -499,7 +499,7 @@ func funcarg2(f *types.Field, ctxt ir.Class) {
var funcStack []funcStackEnt // stack of previous values of Curfn/dclcontext var funcStack []funcStackEnt // stack of previous values of Curfn/dclcontext
type funcStackEnt struct { type funcStackEnt struct {
curfn *ir.Node curfn ir.Node
dclcontext ir.Class dclcontext ir.Class
} }
@ -535,7 +535,7 @@ func checkembeddedtype(t *types.Type) {
} }
} }
func structfield(n *ir.Node) *types.Field { func structfield(n ir.Node) *types.Field {
lno := base.Pos lno := base.Pos
base.Pos = n.Pos() base.Pos = n.Pos()
@ -582,7 +582,7 @@ func checkdupfields(what string, fss ...[]*types.Field) {
// convert a parsed id/type list into // convert a parsed id/type list into
// a type for struct/interface/arglist // a type for struct/interface/arglist
func tostruct(l []*ir.Node) *types.Type { func tostruct(l []ir.Node) *types.Type {
t := types.New(types.TSTRUCT) t := types.New(types.TSTRUCT)
fields := make([]*types.Field, len(l)) fields := make([]*types.Field, len(l))
@ -604,7 +604,7 @@ func tostruct(l []*ir.Node) *types.Type {
return t return t
} }
func tofunargs(l []*ir.Node, funarg types.Funarg) *types.Type { func tofunargs(l []ir.Node, funarg types.Funarg) *types.Type {
t := types.New(types.TSTRUCT) t := types.New(types.TSTRUCT)
t.StructType().Funarg = funarg t.StructType().Funarg = funarg
@ -632,7 +632,7 @@ func tofunargsfield(fields []*types.Field, funarg types.Funarg) *types.Type {
return t return t
} }
func interfacefield(n *ir.Node) *types.Field { func interfacefield(n ir.Node) *types.Field {
lno := base.Pos lno := base.Pos
base.Pos = n.Pos() base.Pos = n.Pos()
@ -661,7 +661,7 @@ func interfacefield(n *ir.Node) *types.Field {
return f return f
} }
func tointerface(l []*ir.Node) *types.Type { func tointerface(l []ir.Node) *types.Type {
if len(l) == 0 { if len(l) == 0 {
return types.Types[types.TINTER] return types.Types[types.TINTER]
} }
@ -678,7 +678,7 @@ func tointerface(l []*ir.Node) *types.Type {
return t return t
} }
func fakeRecv() *ir.Node { func fakeRecv() ir.Node {
return anonfield(types.FakeRecvType()) return anonfield(types.FakeRecvType())
} }
@ -694,12 +694,12 @@ func isifacemethod(f *types.Type) bool {
} }
// turn a parsed function declaration into a type // turn a parsed function declaration into a type
func functype(this *ir.Node, in, out []*ir.Node) *types.Type { func functype(this ir.Node, in, out []ir.Node) *types.Type {
t := types.New(types.TFUNC) t := types.New(types.TFUNC)
var rcvr []*ir.Node var rcvr []ir.Node
if this != nil { if this != nil {
rcvr = []*ir.Node{this} rcvr = []ir.Node{this}
} }
t.FuncType().Receiver = tofunargs(rcvr, types.FunargRcvr) t.FuncType().Receiver = tofunargs(rcvr, types.FunargRcvr)
t.FuncType().Params = tofunargs(in, types.FunargParams) t.FuncType().Params = tofunargs(in, types.FunargParams)
@ -799,7 +799,7 @@ func methodSymSuffix(recv *types.Type, msym *types.Sym, suffix string) *types.Sy
// - msym is the method symbol // - msym is the method symbol
// - t is function type (with receiver) // - t is function type (with receiver)
// Returns a pointer to the existing or added Field; or nil if there's an error. // Returns a pointer to the existing or added Field; or nil if there's an error.
func addmethod(n *ir.Node, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field { func addmethod(n ir.Node, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field {
if msym == nil { if msym == nil {
base.Fatalf("no method symbol") base.Fatalf("no method symbol")
} }
@ -935,7 +935,7 @@ func makefuncsym(s *types.Sym) {
} }
// setNodeNameFunc marks a node as a function. // setNodeNameFunc marks a node as a function.
func setNodeNameFunc(n *ir.Node) { func setNodeNameFunc(n ir.Node) {
if n.Op() != ir.ONAME || n.Class() != ir.Pxxx { if n.Op() != ir.ONAME || n.Class() != ir.Pxxx {
base.Fatalf("expected ONAME/Pxxx node, got %v", n) base.Fatalf("expected ONAME/Pxxx node, got %v", n)
} }
@ -944,7 +944,7 @@ func setNodeNameFunc(n *ir.Node) {
n.Sym().SetFunc(true) n.Sym().SetFunc(true)
} }
func dclfunc(sym *types.Sym, tfn *ir.Node) *ir.Node { func dclfunc(sym *types.Sym, tfn ir.Node) ir.Node {
if tfn.Op() != ir.OTFUNC { if tfn.Op() != ir.OTFUNC {
base.Fatalf("expected OTFUNC node, got %v", tfn) base.Fatalf("expected OTFUNC node, got %v", tfn)
} }
@ -963,14 +963,14 @@ type nowritebarrierrecChecker struct {
// extraCalls contains extra function calls that may not be // extraCalls contains extra function calls that may not be
// visible during later analysis. It maps from the ODCLFUNC of // visible during later analysis. It maps from the ODCLFUNC of
// the caller to a list of callees. // the caller to a list of callees.
extraCalls map[*ir.Node][]nowritebarrierrecCall extraCalls map[ir.Node][]nowritebarrierrecCall
// curfn is the current function during AST walks. // curfn is the current function during AST walks.
curfn *ir.Node curfn ir.Node
} }
type nowritebarrierrecCall struct { type nowritebarrierrecCall struct {
target *ir.Node // ODCLFUNC of caller or callee target ir.Node // ODCLFUNC of caller or callee
lineno src.XPos // line of call lineno src.XPos // line of call
} }
@ -978,7 +978,7 @@ type nowritebarrierrecCall struct {
// must be called before transformclosure and walk. // must be called before transformclosure and walk.
func newNowritebarrierrecChecker() *nowritebarrierrecChecker { func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
c := &nowritebarrierrecChecker{ c := &nowritebarrierrecChecker{
extraCalls: make(map[*ir.Node][]nowritebarrierrecCall), extraCalls: make(map[ir.Node][]nowritebarrierrecCall),
} }
// Find all systemstack calls and record their targets. In // Find all systemstack calls and record their targets. In
@ -997,7 +997,7 @@ func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
return c return c
} }
func (c *nowritebarrierrecChecker) findExtraCalls(n *ir.Node) bool { func (c *nowritebarrierrecChecker) findExtraCalls(n ir.Node) bool {
if n.Op() != ir.OCALLFUNC { if n.Op() != ir.OCALLFUNC {
return true return true
} }
@ -1009,7 +1009,7 @@ func (c *nowritebarrierrecChecker) findExtraCalls(n *ir.Node) bool {
return true return true
} }
var callee *ir.Node var callee ir.Node
arg := n.List().First() arg := n.List().First()
switch arg.Op() { switch arg.Op() {
case ir.ONAME: case ir.ONAME:
@ -1034,7 +1034,7 @@ func (c *nowritebarrierrecChecker) findExtraCalls(n *ir.Node) bool {
// because that's all we know after we start SSA. // because that's all we know after we start SSA.
// //
// This can be called concurrently for different from Nodes. // This can be called concurrently for different from Nodes.
func (c *nowritebarrierrecChecker) recordCall(from *ir.Node, to *obj.LSym, pos src.XPos) { func (c *nowritebarrierrecChecker) recordCall(from ir.Node, to *obj.LSym, pos src.XPos) {
if from.Op() != ir.ODCLFUNC { if from.Op() != ir.ODCLFUNC {
base.Fatalf("expected ODCLFUNC, got %v", from) base.Fatalf("expected ODCLFUNC, got %v", from)
} }
@ -1052,14 +1052,14 @@ func (c *nowritebarrierrecChecker) check() {
// capture all calls created by lowering, but this means we // capture all calls created by lowering, but this means we
// only get to see the obj.LSyms of calls. symToFunc lets us // only get to see the obj.LSyms of calls. symToFunc lets us
// get back to the ODCLFUNCs. // get back to the ODCLFUNCs.
symToFunc := make(map[*obj.LSym]*ir.Node) symToFunc := make(map[*obj.LSym]ir.Node)
// funcs records the back-edges of the BFS call graph walk. It // funcs records the back-edges of the BFS call graph walk. It
// maps from the ODCLFUNC of each function that must not have // maps from the ODCLFUNC of each function that must not have
// write barriers to the call that inhibits them. Functions // write barriers to the call that inhibits them. Functions
// that are directly marked go:nowritebarrierrec are in this // that are directly marked go:nowritebarrierrec are in this
// map with a zero-valued nowritebarrierrecCall. This also // map with a zero-valued nowritebarrierrecCall. This also
// acts as the set of marks for the BFS of the call graph. // acts as the set of marks for the BFS of the call graph.
funcs := make(map[*ir.Node]nowritebarrierrecCall) funcs := make(map[ir.Node]nowritebarrierrecCall)
// q is the queue of ODCLFUNC Nodes to visit in BFS order. // q is the queue of ODCLFUNC Nodes to visit in BFS order.
var q ir.NodeQueue var q ir.NodeQueue
@ -1083,7 +1083,7 @@ func (c *nowritebarrierrecChecker) check() {
// Perform a BFS of the call graph from all // Perform a BFS of the call graph from all
// go:nowritebarrierrec functions. // go:nowritebarrierrec functions.
enqueue := func(src, target *ir.Node, pos src.XPos) { enqueue := func(src, target ir.Node, pos src.XPos) {
if target.Func().Pragma&ir.Yeswritebarrierrec != 0 { if target.Func().Pragma&ir.Yeswritebarrierrec != 0 {
// Don't flow into this function. // Don't flow into this function.
return return

View file

@ -17,7 +17,7 @@ import (
"strings" "strings"
) )
var embedlist []*ir.Node var embedlist []ir.Node
const ( const (
embedUnknown = iota embedUnknown = iota
@ -28,7 +28,7 @@ const (
var numLocalEmbed int var numLocalEmbed int
func varEmbed(p *noder, names []*ir.Node, typ *ir.Node, exprs []*ir.Node, embeds []PragmaEmbed) (newExprs []*ir.Node) { func varEmbed(p *noder, names []ir.Node, typ ir.Node, exprs []ir.Node, embeds []PragmaEmbed) (newExprs []ir.Node) {
haveEmbed := false haveEmbed := false
for _, decl := range p.file.DeclList { for _, decl := range p.file.DeclList {
imp, ok := decl.(*syntax.ImportDecl) imp, ok := decl.(*syntax.ImportDecl)
@ -118,7 +118,7 @@ func varEmbed(p *noder, names []*ir.Node, typ *ir.Node, exprs []*ir.Node, embeds
v.Name().Param.Ntype = typ v.Name().Param.Ntype = typ
v.SetClass(ir.PEXTERN) v.SetClass(ir.PEXTERN)
externdcl = append(externdcl, v) externdcl = append(externdcl, v)
exprs = []*ir.Node{v} exprs = []ir.Node{v}
} }
v.Name().Param.SetEmbedFiles(list) v.Name().Param.SetEmbedFiles(list)
@ -130,7 +130,7 @@ func varEmbed(p *noder, names []*ir.Node, typ *ir.Node, exprs []*ir.Node, embeds
// The match is approximate because we haven't done scope resolution yet and // The match is approximate because we haven't done scope resolution yet and
// can't tell whether "string" and "byte" really mean "string" and "byte". // can't tell whether "string" and "byte" really mean "string" and "byte".
// The result must be confirmed later, after type checking, using embedKind. // The result must be confirmed later, after type checking, using embedKind.
func embedKindApprox(typ *ir.Node) int { func embedKindApprox(typ ir.Node) int {
if typ.Sym() != nil && typ.Sym().Name == "FS" && (typ.Sym().Pkg.Path == "embed" || (typ.Sym().Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) { if typ.Sym() != nil && typ.Sym().Name == "FS" && (typ.Sym().Pkg.Path == "embed" || (typ.Sym().Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
return embedFiles return embedFiles
} }
@ -192,7 +192,7 @@ func dumpembeds() {
// initEmbed emits the init data for a //go:embed variable, // initEmbed emits the init data for a //go:embed variable,
// which is either a string, a []byte, or an embed.FS. // which is either a string, a []byte, or an embed.FS.
func initEmbed(v *ir.Node) { func initEmbed(v ir.Node) {
files := v.Name().Param.EmbedFiles() files := v.Name().Param.EmbedFiles()
switch kind := embedKind(v.Type()); kind { switch kind := embedKind(v.Type()); kind {
case embedUnknown: case embedUnknown:

View file

@ -86,7 +86,7 @@ import (
type Escape struct { type Escape struct {
allLocs []*EscLocation allLocs []*EscLocation
curfn *ir.Node curfn ir.Node
// loopDepth counts the current loop nesting depth within // loopDepth counts the current loop nesting depth within
// curfn. It increments within each "for" loop and at each // curfn. It increments within each "for" loop and at each
@ -101,8 +101,8 @@ type Escape struct {
// An EscLocation represents an abstract location that stores a Go // An EscLocation represents an abstract location that stores a Go
// variable. // variable.
type EscLocation struct { type EscLocation struct {
n *ir.Node // represented variable or expression, if any n ir.Node // represented variable or expression, if any
curfn *ir.Node // enclosing function curfn ir.Node // enclosing function
edges []EscEdge // incoming edges edges []EscEdge // incoming edges
loopDepth int // loopDepth at declaration loopDepth int // loopDepth at declaration
@ -147,7 +147,7 @@ func init() {
} }
// escFmt is called from node printing to print information about escape analysis results. // escFmt is called from node printing to print information about escape analysis results.
func escFmt(n *ir.Node, short bool) string { func escFmt(n ir.Node, short bool) string {
text := "" text := ""
switch n.Esc() { switch n.Esc() {
case EscUnknown: case EscUnknown:
@ -179,7 +179,7 @@ func escFmt(n *ir.Node, short bool) string {
// escapeFuncs performs escape analysis on a minimal batch of // escapeFuncs performs escape analysis on a minimal batch of
// functions. // functions.
func escapeFuncs(fns []*ir.Node, recursive bool) { func escapeFuncs(fns []ir.Node, recursive bool) {
for _, fn := range fns { for _, fn := range fns {
if fn.Op() != ir.ODCLFUNC { if fn.Op() != ir.ODCLFUNC {
base.Fatalf("unexpected node: %v", fn) base.Fatalf("unexpected node: %v", fn)
@ -202,7 +202,7 @@ func escapeFuncs(fns []*ir.Node, recursive bool) {
e.finish(fns) e.finish(fns)
} }
func (e *Escape) initFunc(fn *ir.Node) { func (e *Escape) initFunc(fn ir.Node) {
if fn.Op() != ir.ODCLFUNC || fn.Esc() != EscFuncUnknown { if fn.Op() != ir.ODCLFUNC || fn.Esc() != EscFuncUnknown {
base.Fatalf("unexpected node: %v", fn) base.Fatalf("unexpected node: %v", fn)
} }
@ -222,11 +222,11 @@ func (e *Escape) initFunc(fn *ir.Node) {
} }
} }
func (e *Escape) walkFunc(fn *ir.Node) { func (e *Escape) walkFunc(fn ir.Node) {
fn.SetEsc(EscFuncStarted) fn.SetEsc(EscFuncStarted)
// Identify labels that mark the head of an unstructured loop. // Identify labels that mark the head of an unstructured loop.
ir.InspectList(fn.Body(), func(n *ir.Node) bool { ir.InspectList(fn.Body(), func(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.OLABEL: case ir.OLABEL:
n.Sym().Label = nonlooping n.Sym().Label = nonlooping
@ -274,7 +274,7 @@ func (e *Escape) walkFunc(fn *ir.Node) {
// } // }
// stmt evaluates a single Go statement. // stmt evaluates a single Go statement.
func (e *Escape) stmt(n *ir.Node) { func (e *Escape) stmt(n ir.Node) {
if n == nil { if n == nil {
return return
} }
@ -447,7 +447,7 @@ func (e *Escape) block(l ir.Nodes) {
// expr models evaluating an expression n and flowing the result into // expr models evaluating an expression n and flowing the result into
// hole k. // hole k.
func (e *Escape) expr(k EscHole, n *ir.Node) { func (e *Escape) expr(k EscHole, n ir.Node) {
if n == nil { if n == nil {
return return
} }
@ -455,7 +455,7 @@ func (e *Escape) expr(k EscHole, n *ir.Node) {
e.exprSkipInit(k, n) e.exprSkipInit(k, n)
} }
func (e *Escape) exprSkipInit(k EscHole, n *ir.Node) { func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
if n == nil { if n == nil {
return return
} }
@ -653,7 +653,7 @@ func (e *Escape) exprSkipInit(k EscHole, n *ir.Node) {
// unsafeValue evaluates a uintptr-typed arithmetic expression looking // unsafeValue evaluates a uintptr-typed arithmetic expression looking
// for conversions from an unsafe.Pointer. // for conversions from an unsafe.Pointer.
func (e *Escape) unsafeValue(k EscHole, n *ir.Node) { func (e *Escape) unsafeValue(k EscHole, n ir.Node) {
if n.Type().Etype != types.TUINTPTR { if n.Type().Etype != types.TUINTPTR {
base.Fatalf("unexpected type %v for %v", n.Type(), n) base.Fatalf("unexpected type %v for %v", n.Type(), n)
} }
@ -690,7 +690,7 @@ func (e *Escape) unsafeValue(k EscHole, n *ir.Node) {
// discard evaluates an expression n for side-effects, but discards // discard evaluates an expression n for side-effects, but discards
// its value. // its value.
func (e *Escape) discard(n *ir.Node) { func (e *Escape) discard(n ir.Node) {
e.expr(e.discardHole(), n) e.expr(e.discardHole(), n)
} }
@ -702,7 +702,7 @@ func (e *Escape) discards(l ir.Nodes) {
// addr evaluates an addressable expression n and returns an EscHole // addr evaluates an addressable expression n and returns an EscHole
// that represents storing into the represented location. // that represents storing into the represented location.
func (e *Escape) addr(n *ir.Node) EscHole { func (e *Escape) addr(n ir.Node) EscHole {
if n == nil || ir.IsBlank(n) { if n == nil || ir.IsBlank(n) {
// Can happen at least in OSELRECV. // Can happen at least in OSELRECV.
// TODO(mdempsky): Anywhere else? // TODO(mdempsky): Anywhere else?
@ -751,7 +751,7 @@ func (e *Escape) addrs(l ir.Nodes) []EscHole {
} }
// assign evaluates the assignment dst = src. // assign evaluates the assignment dst = src.
func (e *Escape) assign(dst, src *ir.Node, why string, where *ir.Node) { func (e *Escape) assign(dst, src ir.Node, why string, where ir.Node) {
// Filter out some no-op assignments for escape analysis. // Filter out some no-op assignments for escape analysis.
ignore := dst != nil && src != nil && isSelfAssign(dst, src) ignore := dst != nil && src != nil && isSelfAssign(dst, src)
if ignore && base.Flag.LowerM != 0 { if ignore && base.Flag.LowerM != 0 {
@ -769,14 +769,14 @@ func (e *Escape) assign(dst, src *ir.Node, why string, where *ir.Node) {
} }
} }
func (e *Escape) assignHeap(src *ir.Node, why string, where *ir.Node) { func (e *Escape) assignHeap(src ir.Node, why string, where ir.Node) {
e.expr(e.heapHole().note(where, why), src) e.expr(e.heapHole().note(where, why), src)
} }
// call evaluates a call expressions, including builtin calls. ks // call evaluates a call expressions, including builtin calls. ks
// should contain the holes representing where the function callee's // should contain the holes representing where the function callee's
// results flows; where is the OGO/ODEFER context of the call, if any. // results flows; where is the OGO/ODEFER context of the call, if any.
func (e *Escape) call(ks []EscHole, call, where *ir.Node) { func (e *Escape) call(ks []EscHole, call, where ir.Node) {
topLevelDefer := where != nil && where.Op() == ir.ODEFER && e.loopDepth == 1 topLevelDefer := where != nil && where.Op() == ir.ODEFER && e.loopDepth == 1
if topLevelDefer { if topLevelDefer {
// force stack allocation of defer record, unless // force stack allocation of defer record, unless
@ -784,7 +784,7 @@ func (e *Escape) call(ks []EscHole, call, where *ir.Node) {
where.SetEsc(EscNever) where.SetEsc(EscNever)
} }
argument := func(k EscHole, arg *ir.Node) { argument := func(k EscHole, arg ir.Node) {
if topLevelDefer { if topLevelDefer {
// Top level defers arguments don't escape to // Top level defers arguments don't escape to
// heap, but they do need to last until end of // heap, but they do need to last until end of
@ -805,7 +805,7 @@ func (e *Escape) call(ks []EscHole, call, where *ir.Node) {
fixVariadicCall(call) fixVariadicCall(call)
// Pick out the function callee, if statically known. // Pick out the function callee, if statically known.
var fn *ir.Node var fn ir.Node
switch call.Op() { switch call.Op() {
case ir.OCALLFUNC: case ir.OCALLFUNC:
switch v := staticValue(call.Left()); { switch v := staticValue(call.Left()); {
@ -894,7 +894,7 @@ func (e *Escape) call(ks []EscHole, call, where *ir.Node) {
// ks should contain the holes representing where the function // ks should contain the holes representing where the function
// callee's results flows. fn is the statically-known callee function, // callee's results flows. fn is the statically-known callee function,
// if any. // if any.
func (e *Escape) tagHole(ks []EscHole, fn *ir.Node, param *types.Field) EscHole { func (e *Escape) tagHole(ks []EscHole, fn ir.Node, param *types.Field) EscHole {
// If this is a dynamic call, we can't rely on param.Note. // If this is a dynamic call, we can't rely on param.Note.
if fn == nil { if fn == nil {
return e.heapHole() return e.heapHole()
@ -935,7 +935,7 @@ func (e *Escape) tagHole(ks []EscHole, fn *ir.Node, param *types.Field) EscHole
// fn has not yet been analyzed, so its parameters and results // fn has not yet been analyzed, so its parameters and results
// should be incorporated directly into the flow graph instead of // should be incorporated directly into the flow graph instead of
// relying on its escape analysis tagging. // relying on its escape analysis tagging.
func (e *Escape) inMutualBatch(fn *ir.Node) bool { func (e *Escape) inMutualBatch(fn ir.Node) bool {
if fn.Name().Defn != nil && fn.Name().Defn.Esc() < EscFuncTagged { if fn.Name().Defn != nil && fn.Name().Defn.Esc() < EscFuncTagged {
if fn.Name().Defn.Esc() == EscFuncUnknown { if fn.Name().Defn.Esc() == EscFuncUnknown {
base.Fatalf("graph inconsistency") base.Fatalf("graph inconsistency")
@ -960,11 +960,11 @@ type EscHole struct {
type EscNote struct { type EscNote struct {
next *EscNote next *EscNote
where *ir.Node where ir.Node
why string why string
} }
func (k EscHole) note(where *ir.Node, why string) EscHole { func (k EscHole) note(where ir.Node, why string) EscHole {
if where == nil || why == "" { if where == nil || why == "" {
base.Fatalf("note: missing where/why") base.Fatalf("note: missing where/why")
} }
@ -986,10 +986,10 @@ func (k EscHole) shift(delta int) EscHole {
return k return k
} }
func (k EscHole) deref(where *ir.Node, why string) EscHole { return k.shift(1).note(where, why) } func (k EscHole) deref(where ir.Node, why string) EscHole { return k.shift(1).note(where, why) }
func (k EscHole) addr(where *ir.Node, why string) EscHole { return k.shift(-1).note(where, why) } func (k EscHole) addr(where ir.Node, why string) EscHole { return k.shift(-1).note(where, why) }
func (k EscHole) dotType(t *types.Type, where *ir.Node, why string) EscHole { func (k EscHole) dotType(t *types.Type, where ir.Node, why string) EscHole {
if !t.IsInterface() && !isdirectiface(t) { if !t.IsInterface() && !isdirectiface(t) {
k = k.shift(1) k = k.shift(1)
} }
@ -1026,7 +1026,7 @@ func (e *Escape) teeHole(ks ...EscHole) EscHole {
return loc.asHole() return loc.asHole()
} }
func (e *Escape) dcl(n *ir.Node) EscHole { func (e *Escape) dcl(n ir.Node) EscHole {
loc := e.oldLoc(n) loc := e.oldLoc(n)
loc.loopDepth = e.loopDepth loc.loopDepth = e.loopDepth
return loc.asHole() return loc.asHole()
@ -1035,7 +1035,7 @@ func (e *Escape) dcl(n *ir.Node) EscHole {
// spill allocates a new location associated with expression n, flows // spill allocates a new location associated with expression n, flows
// its address to k, and returns a hole that flows values to it. It's // its address to k, and returns a hole that flows values to it. It's
// intended for use with most expressions that allocate storage. // intended for use with most expressions that allocate storage.
func (e *Escape) spill(k EscHole, n *ir.Node) EscHole { func (e *Escape) spill(k EscHole, n ir.Node) EscHole {
loc := e.newLoc(n, true) loc := e.newLoc(n, true)
e.flow(k.addr(n, "spill"), loc) e.flow(k.addr(n, "spill"), loc)
return loc.asHole() return loc.asHole()
@ -1052,7 +1052,7 @@ func (e *Escape) later(k EscHole) EscHole {
// canonicalNode returns the canonical *Node that n logically // canonicalNode returns the canonical *Node that n logically
// represents. // represents.
func canonicalNode(n *ir.Node) *ir.Node { func canonicalNode(n ir.Node) ir.Node {
if n != nil && n.Op() == ir.ONAME && n.Name().IsClosureVar() { if n != nil && n.Op() == ir.ONAME && n.Name().IsClosureVar() {
n = n.Name().Defn n = n.Name().Defn
if n.Name().IsClosureVar() { if n.Name().IsClosureVar() {
@ -1063,7 +1063,7 @@ func canonicalNode(n *ir.Node) *ir.Node {
return n return n
} }
func (e *Escape) newLoc(n *ir.Node, transient bool) *EscLocation { func (e *Escape) newLoc(n ir.Node, transient bool) *EscLocation {
if e.curfn == nil { if e.curfn == nil {
base.Fatalf("e.curfn isn't set") base.Fatalf("e.curfn isn't set")
} }
@ -1096,7 +1096,7 @@ func (e *Escape) newLoc(n *ir.Node, transient bool) *EscLocation {
return loc return loc
} }
func (e *Escape) oldLoc(n *ir.Node) *EscLocation { func (e *Escape) oldLoc(n ir.Node) *EscLocation {
n = canonicalNode(n) n = canonicalNode(n)
return n.Opt().(*EscLocation) return n.Opt().(*EscLocation)
} }
@ -1394,7 +1394,7 @@ func (e *Escape) outlives(l, other *EscLocation) bool {
} }
// containsClosure reports whether c is a closure contained within f. // containsClosure reports whether c is a closure contained within f.
func containsClosure(f, c *ir.Node) bool { func containsClosure(f, c ir.Node) bool {
if f.Op() != ir.ODCLFUNC || c.Op() != ir.ODCLFUNC { if f.Op() != ir.ODCLFUNC || c.Op() != ir.ODCLFUNC {
base.Fatalf("bad containsClosure: %v, %v", f, c) base.Fatalf("bad containsClosure: %v, %v", f, c)
} }
@ -1429,7 +1429,7 @@ func (l *EscLocation) leakTo(sink *EscLocation, derefs int) {
l.paramEsc.AddHeap(derefs) l.paramEsc.AddHeap(derefs)
} }
func (e *Escape) finish(fns []*ir.Node) { func (e *Escape) finish(fns []ir.Node) {
// Record parameter tags for package export data. // Record parameter tags for package export data.
for _, fn := range fns { for _, fn := range fns {
fn.SetEsc(EscFuncTagged) fn.SetEsc(EscFuncTagged)
@ -1574,7 +1574,7 @@ func ParseLeaks(s string) EscLeaks {
return l return l
} }
func escapes(all []*ir.Node) { func escapes(all []ir.Node) {
visitBottomUp(all, escapeFuncs) visitBottomUp(all, escapeFuncs)
} }
@ -1607,7 +1607,7 @@ const (
) )
// funcSym returns fn.Func.Nname.Sym if no nils are encountered along the way. // funcSym returns fn.Func.Nname.Sym if no nils are encountered along the way.
func funcSym(fn *ir.Node) *types.Sym { func funcSym(fn ir.Node) *types.Sym {
if fn == nil || fn.Func().Nname == nil { if fn == nil || fn.Func().Nname == nil {
return nil return nil
} }
@ -1622,7 +1622,7 @@ var (
nonlooping = ir.Nod(ir.OXXX, nil, nil) nonlooping = ir.Nod(ir.OXXX, nil, nil)
) )
func isSliceSelfAssign(dst, src *ir.Node) bool { func isSliceSelfAssign(dst, src ir.Node) bool {
// Detect the following special case. // Detect the following special case.
// //
// func (b *Buffer) Foo() { // func (b *Buffer) Foo() {
@ -1672,7 +1672,7 @@ func isSliceSelfAssign(dst, src *ir.Node) bool {
// isSelfAssign reports whether assignment from src to dst can // isSelfAssign reports whether assignment from src to dst can
// be ignored by the escape analysis as it's effectively a self-assignment. // be ignored by the escape analysis as it's effectively a self-assignment.
func isSelfAssign(dst, src *ir.Node) bool { func isSelfAssign(dst, src ir.Node) bool {
if isSliceSelfAssign(dst, src) { if isSliceSelfAssign(dst, src) {
return true return true
} }
@ -1709,7 +1709,7 @@ func isSelfAssign(dst, src *ir.Node) bool {
// mayAffectMemory reports whether evaluation of n may affect the program's // mayAffectMemory reports whether evaluation of n may affect the program's
// memory state. If the expression can't affect memory state, then it can be // memory state. If the expression can't affect memory state, then it can be
// safely ignored by the escape analysis. // safely ignored by the escape analysis.
func mayAffectMemory(n *ir.Node) bool { func mayAffectMemory(n ir.Node) bool {
// We may want to use a list of "memory safe" ops instead of generally // We may want to use a list of "memory safe" ops instead of generally
// "side-effect free", which would include all calls and other ops that can // "side-effect free", which would include all calls and other ops that can
// allocate or change global state. For now, it's safer to start with the latter. // allocate or change global state. For now, it's safer to start with the latter.
@ -1736,7 +1736,7 @@ func mayAffectMemory(n *ir.Node) bool {
// heapAllocReason returns the reason the given Node must be heap // heapAllocReason returns the reason the given Node must be heap
// allocated, or the empty string if it doesn't. // allocated, or the empty string if it doesn't.
func heapAllocReason(n *ir.Node) string { func heapAllocReason(n ir.Node) string {
if n.Type() == nil { if n.Type() == nil {
return "" return ""
} }
@ -1781,7 +1781,7 @@ func heapAllocReason(n *ir.Node) string {
// by "increasing" the "value" of n.Esc to EscHeap. // by "increasing" the "value" of n.Esc to EscHeap.
// Storage is allocated as necessary to allow the address // Storage is allocated as necessary to allow the address
// to be taken. // to be taken.
func addrescapes(n *ir.Node) { func addrescapes(n ir.Node) {
switch n.Op() { switch n.Op() {
default: default:
// Unexpected Op, probably due to a previous type error. Ignore. // Unexpected Op, probably due to a previous type error. Ignore.
@ -1847,7 +1847,7 @@ func addrescapes(n *ir.Node) {
} }
// moveToHeap records the parameter or local variable n as moved to the heap. // moveToHeap records the parameter or local variable n as moved to the heap.
func moveToHeap(n *ir.Node) { func moveToHeap(n ir.Node) {
if base.Flag.LowerR != 0 { if base.Flag.LowerR != 0 {
ir.Dump("MOVE", n) ir.Dump("MOVE", n)
} }
@ -1939,7 +1939,7 @@ const unsafeUintptrTag = "unsafe-uintptr"
// marked go:uintptrescapes. // marked go:uintptrescapes.
const uintptrEscapesTag = "uintptr-escapes" const uintptrEscapesTag = "uintptr-escapes"
func (e *Escape) paramTag(fn *ir.Node, narg int, f *types.Field) string { func (e *Escape) paramTag(fn ir.Node, narg int, f *types.Field) string {
name := func() string { name := func() string {
if f.Sym != nil { if f.Sym != nil {
return f.Sym.Name return f.Sym.Name

View file

@ -21,10 +21,10 @@ func exportf(bout *bio.Writer, format string, args ...interface{}) {
} }
} }
var asmlist []*ir.Node var asmlist []ir.Node
// exportsym marks n for export (or reexport). // exportsym marks n for export (or reexport).
func exportsym(n *ir.Node) { func exportsym(n ir.Node) {
if n.Sym().OnExportList() { if n.Sym().OnExportList() {
return return
} }
@ -41,7 +41,7 @@ func initname(s string) bool {
return s == "init" return s == "init"
} }
func autoexport(n *ir.Node, ctxt ir.Class) { func autoexport(n ir.Node, ctxt ir.Class) {
if n.Sym().Pkg != ir.LocalPkg { if n.Sym().Pkg != ir.LocalPkg {
return return
} }
@ -74,7 +74,7 @@ func dumpexport(bout *bio.Writer) {
} }
} }
func importsym(ipkg *types.Pkg, s *types.Sym, op ir.Op) *ir.Node { func importsym(ipkg *types.Pkg, s *types.Sym, op ir.Op) ir.Node {
n := ir.AsNode(s.PkgDef()) n := ir.AsNode(s.PkgDef())
if n == nil { if n == nil {
// iimport should have created a stub ONONAME // iimport should have created a stub ONONAME
@ -120,7 +120,7 @@ func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type {
// importobj declares symbol s as an imported object representable by op. // importobj declares symbol s as an imported object representable by op.
// ipkg is the package being imported // ipkg is the package being imported
func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Node { func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) ir.Node {
n := importsym(ipkg, s, op) n := importsym(ipkg, s, op)
if n.Op() != ir.ONONAME { if n.Op() != ir.ONONAME {
if n.Op() == op && (n.Class() != ctxt || !types.Identical(n.Type(), t)) { if n.Op() == op && (n.Class() != ctxt || !types.Identical(n.Type(), t)) {

View file

@ -30,13 +30,13 @@ func sysvar(name string) *obj.LSym {
// isParamStackCopy reports whether this is the on-stack copy of a // isParamStackCopy reports whether this is the on-stack copy of a
// function parameter that moved to the heap. // function parameter that moved to the heap.
func isParamStackCopy(n *ir.Node) bool { func isParamStackCopy(n ir.Node) bool {
return n.Op() == ir.ONAME && (n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Name().Param.Heapaddr != nil return n.Op() == ir.ONAME && (n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Name().Param.Heapaddr != nil
} }
// isParamHeapCopy reports whether this is the on-heap copy of // isParamHeapCopy reports whether this is the on-heap copy of
// a function parameter that moved to the heap. // a function parameter that moved to the heap.
func isParamHeapCopy(n *ir.Node) bool { func isParamHeapCopy(n ir.Node) bool {
return n.Op() == ir.ONAME && n.Class() == ir.PAUTOHEAP && n.Name().Param.Stackcopy != nil return n.Op() == ir.ONAME && n.Class() == ir.PAUTOHEAP && n.Name().Param.Stackcopy != nil
} }
@ -52,7 +52,7 @@ func autotmpname(n int) string {
} }
// make a new Node off the books // make a new Node off the books
func tempAt(pos src.XPos, curfn *ir.Node, t *types.Type) *ir.Node { func tempAt(pos src.XPos, curfn ir.Node, t *types.Type) ir.Node {
if curfn == nil { if curfn == nil {
base.Fatalf("no curfn for tempAt") base.Fatalf("no curfn for tempAt")
} }
@ -83,6 +83,6 @@ func tempAt(pos src.XPos, curfn *ir.Node, t *types.Type) *ir.Node {
return n.Orig() return n.Orig()
} }
func temp(t *types.Type) *ir.Node { func temp(t *types.Type) ir.Node {
return tempAt(base.Pos, Curfn, t) return tempAt(base.Pos, Curfn, t)
} }

View file

@ -128,11 +128,11 @@ var (
iscmp [ir.OEND]bool iscmp [ir.OEND]bool
) )
var xtop []*ir.Node var xtop []ir.Node
var exportlist []*ir.Node var exportlist []ir.Node
var importlist []*ir.Node // imported functions and methods with inlinable bodies var importlist []ir.Node // imported functions and methods with inlinable bodies
var ( var (
funcsymsmu sync.Mutex // protects funcsyms and associated package lookups (see func funcsym) funcsymsmu sync.Mutex // protects funcsyms and associated package lookups (see func funcsym)
@ -141,7 +141,7 @@ var (
var dclcontext ir.Class // PEXTERN/PAUTO var dclcontext ir.Class // PEXTERN/PAUTO
var Curfn *ir.Node var Curfn ir.Node
var Widthptr int var Widthptr int
@ -156,7 +156,7 @@ var instrumenting bool
// Whether we are tracking lexical scopes for DWARF. // Whether we are tracking lexical scopes for DWARF.
var trackScopes bool var trackScopes bool
var nodfp *ir.Node var nodfp ir.Node
var autogeneratedPos src.XPos var autogeneratedPos src.XPos
@ -193,7 +193,7 @@ var thearch Arch
var ( var (
staticuint64s, staticuint64s,
zerobase *ir.Node zerobase ir.Node
assertE2I, assertE2I,
assertE2I2, assertE2I2,

View file

@ -47,7 +47,7 @@ type Progs struct {
next *obj.Prog // next Prog next *obj.Prog // next Prog
pc int64 // virtual PC; count of Progs pc int64 // virtual PC; count of Progs
pos src.XPos // position to use for new Progs pos src.XPos // position to use for new Progs
curfn *ir.Node // fn these Progs are for curfn ir.Node // fn these Progs are for
progcache []obj.Prog // local progcache progcache []obj.Prog // local progcache
cacheidx int // first free element of progcache cacheidx int // first free element of progcache
@ -57,7 +57,7 @@ type Progs struct {
// newProgs returns a new Progs for fn. // newProgs returns a new Progs for fn.
// worker indicates which of the backend workers will use the Progs. // worker indicates which of the backend workers will use the Progs.
func newProgs(fn *ir.Node, worker int) *Progs { func newProgs(fn ir.Node, worker int) *Progs {
pp := new(Progs) pp := new(Progs)
if base.Ctxt.CanReuseProgs() { if base.Ctxt.CanReuseProgs() {
sz := len(sharedProgArray) / base.Flag.LowerC sz := len(sharedProgArray) / base.Flag.LowerC
@ -174,7 +174,7 @@ func (pp *Progs) Appendpp(p *obj.Prog, as obj.As, ftype obj.AddrType, freg int16
return q return q
} }
func (pp *Progs) settext(fn *ir.Node) { func (pp *Progs) settext(fn ir.Node) {
if pp.Text != nil { if pp.Text != nil {
base.Fatalf("Progs.settext called twice") base.Fatalf("Progs.settext called twice")
} }
@ -290,7 +290,7 @@ func initLSym(f *ir.Func, hasBody bool) {
base.Ctxt.InitTextSym(f.LSym, flag) base.Ctxt.InitTextSym(f.LSym, flag)
} }
func ggloblnod(nam *ir.Node) { func ggloblnod(nam ir.Node) {
s := nam.Sym().Linksym() s := nam.Sym().Linksym()
s.Gotype = ngotype(nam).Linksym() s.Gotype = ngotype(nam).Linksym()
flags := 0 flags := 0

View file

@ -259,8 +259,8 @@ func iexport(out *bufio.Writer) {
p := iexporter{ p := iexporter{
allPkgs: map[*types.Pkg]bool{}, allPkgs: map[*types.Pkg]bool{},
stringIndex: map[string]uint64{}, stringIndex: map[string]uint64{},
declIndex: map[*ir.Node]uint64{}, declIndex: map[ir.Node]uint64{},
inlineIndex: map[*ir.Node]uint64{}, inlineIndex: map[ir.Node]uint64{},
typIndex: map[*types.Type]uint64{}, typIndex: map[*types.Type]uint64{},
} }
@ -314,9 +314,9 @@ func iexport(out *bufio.Writer) {
// we're writing out the main index, which is also read by // we're writing out the main index, which is also read by
// non-compiler tools and includes a complete package description // non-compiler tools and includes a complete package description
// (i.e., name and height). // (i.e., name and height).
func (w *exportWriter) writeIndex(index map[*ir.Node]uint64, mainIndex bool) { func (w *exportWriter) writeIndex(index map[ir.Node]uint64, mainIndex bool) {
// Build a map from packages to objects from that package. // Build a map from packages to objects from that package.
pkgObjs := map[*types.Pkg][]*ir.Node{} pkgObjs := map[*types.Pkg][]ir.Node{}
// For the main index, make sure to include every package that // For the main index, make sure to include every package that
// we reference, even if we're not exporting (or reexporting) // we reference, even if we're not exporting (or reexporting)
@ -374,8 +374,8 @@ type iexporter struct {
stringIndex map[string]uint64 stringIndex map[string]uint64
data0 intWriter data0 intWriter
declIndex map[*ir.Node]uint64 declIndex map[ir.Node]uint64
inlineIndex map[*ir.Node]uint64 inlineIndex map[ir.Node]uint64
typIndex map[*types.Type]uint64 typIndex map[*types.Type]uint64
} }
@ -394,7 +394,7 @@ func (p *iexporter) stringOff(s string) uint64 {
} }
// pushDecl adds n to the declaration work queue, if not already present. // pushDecl adds n to the declaration work queue, if not already present.
func (p *iexporter) pushDecl(n *ir.Node) { func (p *iexporter) pushDecl(n ir.Node) {
if n.Sym() == nil || ir.AsNode(n.Sym().Def) != n && n.Op() != ir.OTYPE { if n.Sym() == nil || ir.AsNode(n.Sym().Def) != n && n.Op() != ir.OTYPE {
base.Fatalf("weird Sym: %v, %v", n, n.Sym()) base.Fatalf("weird Sym: %v, %v", n, n.Sym())
} }
@ -423,7 +423,7 @@ type exportWriter struct {
prevColumn int64 prevColumn int64
} }
func (p *iexporter) doDecl(n *ir.Node) { func (p *iexporter) doDecl(n ir.Node) {
w := p.newWriter() w := p.newWriter()
w.setPkg(n.Sym().Pkg, false) w.setPkg(n.Sym().Pkg, false)
@ -515,7 +515,7 @@ func (w *exportWriter) tag(tag byte) {
w.data.WriteByte(tag) w.data.WriteByte(tag)
} }
func (p *iexporter) doInline(f *ir.Node) { func (p *iexporter) doInline(f ir.Node) {
w := p.newWriter() w := p.newWriter()
w.setPkg(fnpkg(f), false) w.setPkg(fnpkg(f), false)
@ -570,7 +570,7 @@ func (w *exportWriter) pkg(pkg *types.Pkg) {
w.string(pkg.Path) w.string(pkg.Path)
} }
func (w *exportWriter) qualifiedIdent(n *ir.Node) { func (w *exportWriter) qualifiedIdent(n ir.Node) {
// Ensure any referenced declarations are written out too. // Ensure any referenced declarations are written out too.
w.p.pushDecl(n) w.p.pushDecl(n)
@ -955,12 +955,12 @@ func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
// Compiler-specific extensions. // Compiler-specific extensions.
func (w *exportWriter) varExt(n *ir.Node) { func (w *exportWriter) varExt(n ir.Node) {
w.linkname(n.Sym()) w.linkname(n.Sym())
w.symIdx(n.Sym()) w.symIdx(n.Sym())
} }
func (w *exportWriter) funcExt(n *ir.Node) { func (w *exportWriter) funcExt(n ir.Node) {
w.linkname(n.Sym()) w.linkname(n.Sym())
w.symIdx(n.Sym()) w.symIdx(n.Sym())
@ -1037,7 +1037,7 @@ func (w *exportWriter) stmtList(list ir.Nodes) {
w.op(ir.OEND) w.op(ir.OEND)
} }
func (w *exportWriter) node(n *ir.Node) { func (w *exportWriter) node(n ir.Node) {
if ir.OpPrec[n.Op()] < 0 { if ir.OpPrec[n.Op()] < 0 {
w.stmt(n) w.stmt(n)
} else { } else {
@ -1047,7 +1047,7 @@ func (w *exportWriter) node(n *ir.Node) {
// Caution: stmt will emit more than one node for statement nodes n that have a non-empty // Caution: stmt will emit more than one node for statement nodes n that have a non-empty
// n.Ninit and where n cannot have a natural init section (such as in "if", "for", etc.). // n.Ninit and where n cannot have a natural init section (such as in "if", "for", etc.).
func (w *exportWriter) stmt(n *ir.Node) { func (w *exportWriter) stmt(n ir.Node) {
if n.Init().Len() > 0 && !ir.StmtWithInit(n.Op()) { if n.Init().Len() > 0 && !ir.StmtWithInit(n.Op()) {
// can't use stmtList here since we don't want the final OEND // can't use stmtList here since we don't want the final OEND
for _, n := range n.Init().Slice() { for _, n := range n.Init().Slice() {
@ -1095,7 +1095,7 @@ func (w *exportWriter) stmt(n *ir.Node) {
w.op(ir.OAS2) w.op(ir.OAS2)
w.pos(n.Pos()) w.pos(n.Pos())
w.exprList(n.List()) w.exprList(n.List())
w.exprList(ir.AsNodes([]*ir.Node{n.Right()})) w.exprList(ir.AsNodes([]ir.Node{n.Right()}))
case ir.ORETURN: case ir.ORETURN:
w.op(ir.ORETURN) w.op(ir.ORETURN)
@ -1164,7 +1164,7 @@ func (w *exportWriter) stmt(n *ir.Node) {
} }
} }
func (w *exportWriter) caseList(sw *ir.Node) { func (w *exportWriter) caseList(sw ir.Node) {
namedTypeSwitch := sw.Op() == ir.OSWITCH && sw.Left() != nil && sw.Left().Op() == ir.OTYPESW && sw.Left().Left() != nil namedTypeSwitch := sw.Op() == ir.OSWITCH && sw.Left() != nil && sw.Left().Op() == ir.OTYPESW && sw.Left().Left() != nil
cases := sw.List().Slice() cases := sw.List().Slice()
@ -1189,7 +1189,7 @@ func (w *exportWriter) exprList(list ir.Nodes) {
w.op(ir.OEND) w.op(ir.OEND)
} }
func (w *exportWriter) expr(n *ir.Node) { func (w *exportWriter) expr(n ir.Node) {
// from nodefmt (fmt.go) // from nodefmt (fmt.go)
// //
// nodefmt reverts nodes back to their original - we don't need to do // nodefmt reverts nodes back to their original - we don't need to do
@ -1430,7 +1430,7 @@ func (w *exportWriter) op(op ir.Op) {
w.uint64(uint64(op)) w.uint64(uint64(op))
} }
func (w *exportWriter) exprsOrNil(a, b *ir.Node) { func (w *exportWriter) exprsOrNil(a, b ir.Node) {
ab := 0 ab := 0
if a != nil { if a != nil {
ab |= 1 ab |= 1
@ -1455,7 +1455,7 @@ func (w *exportWriter) elemList(list ir.Nodes) {
} }
} }
func (w *exportWriter) localName(n *ir.Node) { func (w *exportWriter) localName(n ir.Node) {
// Escape analysis happens after inline bodies are saved, but // Escape analysis happens after inline bodies are saved, but
// we're using the same ONAME nodes, so we might still see // we're using the same ONAME nodes, so we might still see
// PAUTOHEAP here. // PAUTOHEAP here.

View file

@ -41,7 +41,7 @@ var (
inlineImporter = map[*types.Sym]iimporterAndOffset{} inlineImporter = map[*types.Sym]iimporterAndOffset{}
) )
func expandDecl(n *ir.Node) { func expandDecl(n ir.Node) {
if n.Op() != ir.ONONAME { if n.Op() != ir.ONONAME {
return return
} }
@ -55,7 +55,7 @@ func expandDecl(n *ir.Node) {
r.doDecl(n) r.doDecl(n)
} }
func expandInline(fn *ir.Node) { func expandInline(fn ir.Node) {
if fn.Func().Inl.Body != nil { if fn.Func().Inl.Body != nil {
return return
} }
@ -68,7 +68,7 @@ func expandInline(fn *ir.Node) {
r.doInline(fn) r.doInline(fn)
} }
func importReaderFor(n *ir.Node, importers map[*types.Sym]iimporterAndOffset) *importReader { func importReaderFor(n ir.Node, importers map[*types.Sym]iimporterAndOffset) *importReader {
x, ok := importers[n.Sym()] x, ok := importers[n.Sym()]
if !ok { if !ok {
return nil return nil
@ -281,7 +281,7 @@ func (r *importReader) setPkg() {
r.currPkg = r.pkg() r.currPkg = r.pkg()
} }
func (r *importReader) doDecl(n *ir.Node) { func (r *importReader) doDecl(n ir.Node) {
if n.Op() != ir.ONONAME { if n.Op() != ir.ONONAME {
base.Fatalf("doDecl: unexpected Op for %v: %v", n.Sym(), n.Op()) base.Fatalf("doDecl: unexpected Op for %v: %v", n.Sym(), n.Op())
} }
@ -635,12 +635,12 @@ func (r *importReader) byte() byte {
// Compiler-specific extensions. // Compiler-specific extensions.
func (r *importReader) varExt(n *ir.Node) { func (r *importReader) varExt(n ir.Node) {
r.linkname(n.Sym()) r.linkname(n.Sym())
r.symIdx(n.Sym()) r.symIdx(n.Sym())
} }
func (r *importReader) funcExt(n *ir.Node) { func (r *importReader) funcExt(n ir.Node) {
r.linkname(n.Sym()) r.linkname(n.Sym())
r.symIdx(n.Sym()) r.symIdx(n.Sym())
@ -695,7 +695,7 @@ func (r *importReader) typeExt(t *types.Type) {
// so we can use index to reference the symbol. // so we can use index to reference the symbol.
var typeSymIdx = make(map[*types.Type][2]int64) var typeSymIdx = make(map[*types.Type][2]int64)
func (r *importReader) doInline(n *ir.Node) { func (r *importReader) doInline(n ir.Node) {
if len(n.Func().Inl.Body) != 0 { if len(n.Func().Inl.Body) != 0 {
base.Fatalf("%v already has inline body", n) base.Fatalf("%v already has inline body", n)
} }
@ -710,7 +710,7 @@ func (r *importReader) doInline(n *ir.Node) {
// (not doing so can cause significant performance // (not doing so can cause significant performance
// degradation due to unnecessary calls to empty // degradation due to unnecessary calls to empty
// functions). // functions).
body = []*ir.Node{} body = []ir.Node{}
} }
n.Func().Inl.Body = body n.Func().Inl.Body = body
@ -740,8 +740,8 @@ func (r *importReader) doInline(n *ir.Node) {
// unrefined nodes (since this is what the importer uses). The respective case // unrefined nodes (since this is what the importer uses). The respective case
// entries are unreachable in the importer. // entries are unreachable in the importer.
func (r *importReader) stmtList() []*ir.Node { func (r *importReader) stmtList() []ir.Node {
var list []*ir.Node var list []ir.Node
for { for {
n := r.node() n := r.node()
if n == nil { if n == nil {
@ -758,10 +758,10 @@ func (r *importReader) stmtList() []*ir.Node {
return list return list
} }
func (r *importReader) caseList(sw *ir.Node) []*ir.Node { func (r *importReader) caseList(sw ir.Node) []ir.Node {
namedTypeSwitch := sw.Op() == ir.OSWITCH && sw.Left() != nil && sw.Left().Op() == ir.OTYPESW && sw.Left().Left() != nil namedTypeSwitch := sw.Op() == ir.OSWITCH && sw.Left() != nil && sw.Left().Op() == ir.OTYPESW && sw.Left().Left() != nil
cases := make([]*ir.Node, r.uint64()) cases := make([]ir.Node, r.uint64())
for i := range cases { for i := range cases {
cas := ir.NodAt(r.pos(), ir.OCASE, nil, nil) cas := ir.NodAt(r.pos(), ir.OCASE, nil, nil)
cas.PtrList().Set(r.stmtList()) cas.PtrList().Set(r.stmtList())
@ -780,8 +780,8 @@ func (r *importReader) caseList(sw *ir.Node) []*ir.Node {
return cases return cases
} }
func (r *importReader) exprList() []*ir.Node { func (r *importReader) exprList() []ir.Node {
var list []*ir.Node var list []ir.Node
for { for {
n := r.expr() n := r.expr()
if n == nil { if n == nil {
@ -792,7 +792,7 @@ func (r *importReader) exprList() []*ir.Node {
return list return list
} }
func (r *importReader) expr() *ir.Node { func (r *importReader) expr() ir.Node {
n := r.node() n := r.node()
if n != nil && n.Op() == ir.OBLOCK { if n != nil && n.Op() == ir.OBLOCK {
base.Fatalf("unexpected block node: %v", n) base.Fatalf("unexpected block node: %v", n)
@ -801,7 +801,7 @@ func (r *importReader) expr() *ir.Node {
} }
// TODO(gri) split into expr and stmt // TODO(gri) split into expr and stmt
func (r *importReader) node() *ir.Node { func (r *importReader) node() ir.Node {
switch op := r.op(); op { switch op := r.op(); op {
// expressions // expressions
// case OPAREN: // case OPAREN:
@ -814,7 +814,7 @@ func (r *importReader) node() *ir.Node {
pos := r.pos() pos := r.pos()
typ := r.typ() typ := r.typ()
var n *ir.Node var n ir.Node
if typ.HasNil() { if typ.HasNil() {
n = nodnil() n = nodnil()
} else { } else {
@ -906,7 +906,7 @@ func (r *importReader) node() *ir.Node {
case ir.OSLICE, ir.OSLICE3: case ir.OSLICE, ir.OSLICE3:
n := ir.NodAt(r.pos(), op, r.expr(), nil) n := ir.NodAt(r.pos(), op, r.expr(), nil)
low, high := r.exprsOrNil() low, high := r.exprsOrNil()
var max *ir.Node var max ir.Node
if n.Op().IsSlice3() { if n.Op().IsSlice3() {
max = r.expr() max = r.expr()
} }
@ -970,7 +970,7 @@ func (r *importReader) node() *ir.Node {
pos := r.pos() pos := r.pos()
lhs := npos(pos, dclname(r.ident())) lhs := npos(pos, dclname(r.ident()))
typ := typenod(r.typ()) typ := typenod(r.typ())
return npos(pos, liststmt(variter([]*ir.Node{lhs}, typ, nil))) // TODO(gri) avoid list creation return npos(pos, liststmt(variter([]ir.Node{lhs}, typ, nil))) // TODO(gri) avoid list creation
// case ODCLFIELD: // case ODCLFIELD:
// unimplemented // unimplemented
@ -1082,9 +1082,9 @@ func (r *importReader) op() ir.Op {
return ir.Op(r.uint64()) return ir.Op(r.uint64())
} }
func (r *importReader) elemList() []*ir.Node { func (r *importReader) elemList() []ir.Node {
c := r.uint64() c := r.uint64()
list := make([]*ir.Node, c) list := make([]ir.Node, c)
for i := range list { for i := range list {
s := r.ident() s := r.ident()
list[i] = nodSym(ir.OSTRUCTKEY, r.expr(), s) list[i] = nodSym(ir.OSTRUCTKEY, r.expr(), s)
@ -1092,7 +1092,7 @@ func (r *importReader) elemList() []*ir.Node {
return list return list
} }
func (r *importReader) exprsOrNil() (a, b *ir.Node) { func (r *importReader) exprsOrNil() (a, b ir.Node) {
ab := r.uint64() ab := r.uint64()
if ab&1 != 0 { if ab&1 != 0 {
a = r.expr() a = r.expr()

View file

@ -33,7 +33,7 @@ func renameinit() *types.Sym {
// 1) Initialize all of the packages the current package depends on. // 1) Initialize all of the packages the current package depends on.
// 2) Initialize all the variables that have initializers. // 2) Initialize all the variables that have initializers.
// 3) Run any init functions. // 3) Run any init functions.
func fninit(n []*ir.Node) { func fninit(n []ir.Node) {
nf := initOrder(n) nf := initOrder(n)
var deps []*obj.LSym // initTask records for packages the current package depends on var deps []*obj.LSym // initTask records for packages the current package depends on

View file

@ -64,7 +64,7 @@ const (
type InitOrder struct { type InitOrder struct {
// blocking maps initialization assignments to the assignments // blocking maps initialization assignments to the assignments
// that depend on it. // that depend on it.
blocking map[*ir.Node][]*ir.Node blocking map[ir.Node][]ir.Node
// ready is the queue of Pending initialization assignments // ready is the queue of Pending initialization assignments
// that are ready for initialization. // that are ready for initialization.
@ -75,13 +75,13 @@ type InitOrder struct {
// package-level declarations (in declaration order) and outputs the // package-level declarations (in declaration order) and outputs the
// corresponding list of statements to include in the init() function // corresponding list of statements to include in the init() function
// body. // body.
func initOrder(l []*ir.Node) []*ir.Node { func initOrder(l []ir.Node) []ir.Node {
s := InitSchedule{ s := InitSchedule{
initplans: make(map[*ir.Node]*InitPlan), initplans: make(map[ir.Node]*InitPlan),
inittemps: make(map[*ir.Node]*ir.Node), inittemps: make(map[ir.Node]ir.Node),
} }
o := InitOrder{ o := InitOrder{
blocking: make(map[*ir.Node][]*ir.Node), blocking: make(map[ir.Node][]ir.Node),
} }
// Process all package-level assignment in declaration order. // Process all package-level assignment in declaration order.
@ -110,7 +110,7 @@ func initOrder(l []*ir.Node) []*ir.Node {
// first. // first.
base.ExitIfErrors() base.ExitIfErrors()
findInitLoopAndExit(firstLHS(n), new([]*ir.Node)) findInitLoopAndExit(firstLHS(n), new([]ir.Node))
base.Fatalf("initialization unfinished, but failed to identify loop") base.Fatalf("initialization unfinished, but failed to identify loop")
} }
} }
@ -125,7 +125,7 @@ func initOrder(l []*ir.Node) []*ir.Node {
return s.out return s.out
} }
func (o *InitOrder) processAssign(n *ir.Node) { func (o *InitOrder) processAssign(n ir.Node) {
if n.Initorder() != InitNotStarted || n.Offset() != types.BADWIDTH { if n.Initorder() != InitNotStarted || n.Offset() != types.BADWIDTH {
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Offset()) base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Offset())
} }
@ -154,9 +154,9 @@ func (o *InitOrder) processAssign(n *ir.Node) {
// flushReady repeatedly applies initialize to the earliest (in // flushReady repeatedly applies initialize to the earliest (in
// declaration order) assignment ready for initialization and updates // declaration order) assignment ready for initialization and updates
// the inverse dependency ("blocking") graph. // the inverse dependency ("blocking") graph.
func (o *InitOrder) flushReady(initialize func(*ir.Node)) { func (o *InitOrder) flushReady(initialize func(ir.Node)) {
for o.ready.Len() != 0 { for o.ready.Len() != 0 {
n := heap.Pop(&o.ready).(*ir.Node) n := heap.Pop(&o.ready).(ir.Node)
if n.Initorder() != InitPending || n.Offset() != 0 { if n.Initorder() != InitPending || n.Offset() != 0 {
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Offset()) base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Offset())
} }
@ -183,7 +183,7 @@ func (o *InitOrder) flushReady(initialize func(*ir.Node)) {
// path points to a slice used for tracking the sequence of // path points to a slice used for tracking the sequence of
// variables/functions visited. Using a pointer to a slice allows the // variables/functions visited. Using a pointer to a slice allows the
// slice capacity to grow and limit reallocations. // slice capacity to grow and limit reallocations.
func findInitLoopAndExit(n *ir.Node, path *[]*ir.Node) { func findInitLoopAndExit(n ir.Node, path *[]ir.Node) {
// We implement a simple DFS loop-finding algorithm. This // We implement a simple DFS loop-finding algorithm. This
// could be faster, but initialization cycles are rare. // could be faster, but initialization cycles are rare.
@ -196,7 +196,7 @@ func findInitLoopAndExit(n *ir.Node, path *[]*ir.Node) {
// There might be multiple loops involving n; by sorting // There might be multiple loops involving n; by sorting
// references, we deterministically pick the one reported. // references, we deterministically pick the one reported.
refers := collectDeps(n.Name().Defn, false).Sorted(func(ni, nj *ir.Node) bool { refers := collectDeps(n.Name().Defn, false).Sorted(func(ni, nj ir.Node) bool {
return ni.Pos().Before(nj.Pos()) return ni.Pos().Before(nj.Pos())
}) })
@ -215,7 +215,7 @@ func findInitLoopAndExit(n *ir.Node, path *[]*ir.Node) {
// reportInitLoopAndExit reports and initialization loop as an error // reportInitLoopAndExit reports and initialization loop as an error
// and exits. However, if l is not actually an initialization loop, it // and exits. However, if l is not actually an initialization loop, it
// simply returns instead. // simply returns instead.
func reportInitLoopAndExit(l []*ir.Node) { func reportInitLoopAndExit(l []ir.Node) {
// Rotate loop so that the earliest variable declaration is at // Rotate loop so that the earliest variable declaration is at
// the start. // the start.
i := -1 i := -1
@ -250,7 +250,7 @@ func reportInitLoopAndExit(l []*ir.Node) {
// variables that declaration n depends on. If transitive is true, // variables that declaration n depends on. If transitive is true,
// then it also includes the transitive dependencies of any depended // then it also includes the transitive dependencies of any depended
// upon functions (but not variables). // upon functions (but not variables).
func collectDeps(n *ir.Node, transitive bool) ir.NodeSet { func collectDeps(n ir.Node, transitive bool) ir.NodeSet {
d := initDeps{transitive: transitive} d := initDeps{transitive: transitive}
switch n.Op() { switch n.Op() {
case ir.OAS: case ir.OAS:
@ -270,12 +270,12 @@ type initDeps struct {
seen ir.NodeSet seen ir.NodeSet
} }
func (d *initDeps) inspect(n *ir.Node) { ir.Inspect(n, d.visit) } func (d *initDeps) inspect(n ir.Node) { ir.Inspect(n, d.visit) }
func (d *initDeps) inspectList(l ir.Nodes) { ir.InspectList(l, d.visit) } func (d *initDeps) inspectList(l ir.Nodes) { ir.InspectList(l, d.visit) }
// visit calls foundDep on any package-level functions or variables // visit calls foundDep on any package-level functions or variables
// referenced by n, if any. // referenced by n, if any.
func (d *initDeps) visit(n *ir.Node) bool { func (d *initDeps) visit(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.OMETHEXPR: case ir.OMETHEXPR:
d.foundDep(methodExprName(n)) d.foundDep(methodExprName(n))
@ -299,7 +299,7 @@ func (d *initDeps) visit(n *ir.Node) bool {
// foundDep records that we've found a dependency on n by adding it to // foundDep records that we've found a dependency on n by adding it to
// seen. // seen.
func (d *initDeps) foundDep(n *ir.Node) { func (d *initDeps) foundDep(n ir.Node) {
// Can happen with method expressions involving interface // Can happen with method expressions involving interface
// types; e.g., fixedbugs/issue4495.go. // types; e.g., fixedbugs/issue4495.go.
if n == nil { if n == nil {
@ -328,7 +328,7 @@ func (d *initDeps) foundDep(n *ir.Node) {
// an OAS node's Pos may not be unique. For example, given the // an OAS node's Pos may not be unique. For example, given the
// declaration "var a, b = f(), g()", "a" must be ordered before "b", // declaration "var a, b = f(), g()", "a" must be ordered before "b",
// but both OAS nodes use the "=" token's position as their Pos. // but both OAS nodes use the "=" token's position as their Pos.
type declOrder []*ir.Node type declOrder []ir.Node
func (s declOrder) Len() int { return len(s) } func (s declOrder) Len() int { return len(s) }
func (s declOrder) Less(i, j int) bool { func (s declOrder) Less(i, j int) bool {
@ -336,7 +336,7 @@ func (s declOrder) Less(i, j int) bool {
} }
func (s declOrder) Swap(i, j int) { s[i], s[j] = s[j], s[i] } func (s declOrder) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s *declOrder) Push(x interface{}) { *s = append(*s, x.(*ir.Node)) } func (s *declOrder) Push(x interface{}) { *s = append(*s, x.(ir.Node)) }
func (s *declOrder) Pop() interface{} { func (s *declOrder) Pop() interface{} {
n := (*s)[len(*s)-1] n := (*s)[len(*s)-1]
*s = (*s)[:len(*s)-1] *s = (*s)[:len(*s)-1]
@ -345,7 +345,7 @@ func (s *declOrder) Pop() interface{} {
// firstLHS returns the first expression on the left-hand side of // firstLHS returns the first expression on the left-hand side of
// assignment n. // assignment n.
func firstLHS(n *ir.Node) *ir.Node { func firstLHS(n ir.Node) ir.Node {
switch n.Op() { switch n.Op() {
case ir.OAS: case ir.OAS:
return n.Left() return n.Left()

View file

@ -53,7 +53,7 @@ const (
// Get the function's package. For ordinary functions it's on the ->sym, but for imported methods // Get the function's package. For ordinary functions it's on the ->sym, but for imported methods
// the ->sym can be re-used in the local package, so peel it off the receiver's type. // the ->sym can be re-used in the local package, so peel it off the receiver's type.
func fnpkg(fn *ir.Node) *types.Pkg { func fnpkg(fn ir.Node) *types.Pkg {
if ir.IsMethod(fn) { if ir.IsMethod(fn) {
// method // method
rcvr := fn.Type().Recv().Type rcvr := fn.Type().Recv().Type
@ -73,7 +73,7 @@ func fnpkg(fn *ir.Node) *types.Pkg {
// Lazy typechecking of imported bodies. For local functions, caninl will set ->typecheck // Lazy typechecking of imported bodies. For local functions, caninl will set ->typecheck
// because they're a copy of an already checked body. // because they're a copy of an already checked body.
func typecheckinl(fn *ir.Node) { func typecheckinl(fn ir.Node) {
lno := setlineno(fn) lno := setlineno(fn)
expandInline(fn) expandInline(fn)
@ -111,7 +111,7 @@ func typecheckinl(fn *ir.Node) {
// Caninl determines whether fn is inlineable. // Caninl determines whether fn is inlineable.
// If so, caninl saves fn->nbody in fn->inl and substitutes it with a copy. // If so, caninl saves fn->nbody in fn->inl and substitutes it with a copy.
// fn and ->nbody will already have been typechecked. // fn and ->nbody will already have been typechecked.
func caninl(fn *ir.Node) { func caninl(fn ir.Node) {
if fn.Op() != ir.ODCLFUNC { if fn.Op() != ir.ODCLFUNC {
base.Fatalf("caninl %v", fn) base.Fatalf("caninl %v", fn)
} }
@ -207,7 +207,7 @@ func caninl(fn *ir.Node) {
visitor := hairyVisitor{ visitor := hairyVisitor{
budget: inlineMaxBudget, budget: inlineMaxBudget,
extraCallCost: cc, extraCallCost: cc,
usedLocals: make(map[*ir.Node]bool), usedLocals: make(map[ir.Node]bool),
} }
if visitor.visitList(fn.Body()) { if visitor.visitList(fn.Body()) {
reason = visitor.reason reason = visitor.reason
@ -236,7 +236,7 @@ func caninl(fn *ir.Node) {
// inlFlood marks n's inline body for export and recursively ensures // inlFlood marks n's inline body for export and recursively ensures
// all called functions are marked too. // all called functions are marked too.
func inlFlood(n *ir.Node) { func inlFlood(n ir.Node) {
if n == nil { if n == nil {
return return
} }
@ -260,7 +260,7 @@ func inlFlood(n *ir.Node) {
// Recursively identify all referenced functions for // Recursively identify all referenced functions for
// reexport. We want to include even non-called functions, // reexport. We want to include even non-called functions,
// because after inlining they might be callable. // because after inlining they might be callable.
ir.InspectList(ir.AsNodes(n.Func().Inl.Body), func(n *ir.Node) bool { ir.InspectList(ir.AsNodes(n.Func().Inl.Body), func(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.OMETHEXPR: case ir.OMETHEXPR:
inlFlood(methodExprName(n)) inlFlood(methodExprName(n))
@ -300,7 +300,7 @@ type hairyVisitor struct {
budget int32 budget int32
reason string reason string
extraCallCost int32 extraCallCost int32
usedLocals map[*ir.Node]bool usedLocals map[ir.Node]bool
} }
// Look for anything we want to punt on. // Look for anything we want to punt on.
@ -313,7 +313,7 @@ func (v *hairyVisitor) visitList(ll ir.Nodes) bool {
return false return false
} }
func (v *hairyVisitor) visit(n *ir.Node) bool { func (v *hairyVisitor) visit(n ir.Node) bool {
if n == nil { if n == nil {
return false return false
} }
@ -447,15 +447,15 @@ func (v *hairyVisitor) visit(n *ir.Node) bool {
// inlcopylist (together with inlcopy) recursively copies a list of nodes, except // inlcopylist (together with inlcopy) recursively copies a list of nodes, except
// that it keeps the same ONAME, OTYPE, and OLITERAL nodes. It is used for copying // that it keeps the same ONAME, OTYPE, and OLITERAL nodes. It is used for copying
// the body and dcls of an inlineable function. // the body and dcls of an inlineable function.
func inlcopylist(ll []*ir.Node) []*ir.Node { func inlcopylist(ll []ir.Node) []ir.Node {
s := make([]*ir.Node, 0, len(ll)) s := make([]ir.Node, 0, len(ll))
for _, n := range ll { for _, n := range ll {
s = append(s, inlcopy(n)) s = append(s, inlcopy(n))
} }
return s return s
} }
func inlcopy(n *ir.Node) *ir.Node { func inlcopy(n ir.Node) ir.Node {
if n == nil { if n == nil {
return nil return nil
} }
@ -479,7 +479,7 @@ func inlcopy(n *ir.Node) *ir.Node {
return m return m
} }
func countNodes(n *ir.Node) int { func countNodes(n ir.Node) int {
if n == nil { if n == nil {
return 0 return 0
} }
@ -503,7 +503,7 @@ func countNodes(n *ir.Node) int {
// Inlcalls/nodelist/node walks fn's statements and expressions and substitutes any // Inlcalls/nodelist/node walks fn's statements and expressions and substitutes any
// calls made to inlineable functions. This is the external entry point. // calls made to inlineable functions. This is the external entry point.
func inlcalls(fn *ir.Node) { func inlcalls(fn ir.Node) {
savefn := Curfn savefn := Curfn
Curfn = fn Curfn = fn
maxCost := int32(inlineMaxBudget) maxCost := int32(inlineMaxBudget)
@ -516,7 +516,7 @@ func inlcalls(fn *ir.Node) {
// but allow inlining if there is a recursion cycle of many functions. // but allow inlining if there is a recursion cycle of many functions.
// Most likely, the inlining will stop before we even hit the beginning of // Most likely, the inlining will stop before we even hit the beginning of
// the cycle again, but the map catches the unusual case. // the cycle again, but the map catches the unusual case.
inlMap := make(map[*ir.Node]bool) inlMap := make(map[ir.Node]bool)
fn = inlnode(fn, maxCost, inlMap) fn = inlnode(fn, maxCost, inlMap)
if fn != Curfn { if fn != Curfn {
base.Fatalf("inlnode replaced curfn") base.Fatalf("inlnode replaced curfn")
@ -525,7 +525,7 @@ func inlcalls(fn *ir.Node) {
} }
// Turn an OINLCALL into a statement. // Turn an OINLCALL into a statement.
func inlconv2stmt(n *ir.Node) { func inlconv2stmt(n ir.Node) {
n.SetOp(ir.OBLOCK) n.SetOp(ir.OBLOCK)
// n->ninit stays // n->ninit stays
@ -538,7 +538,7 @@ func inlconv2stmt(n *ir.Node) {
// Turn an OINLCALL into a single valued expression. // Turn an OINLCALL into a single valued expression.
// The result of inlconv2expr MUST be assigned back to n, e.g. // The result of inlconv2expr MUST be assigned back to n, e.g.
// n.Left = inlconv2expr(n.Left) // n.Left = inlconv2expr(n.Left)
func inlconv2expr(n *ir.Node) *ir.Node { func inlconv2expr(n ir.Node) ir.Node {
r := n.Rlist().First() r := n.Rlist().First()
return addinit(r, append(n.Init().Slice(), n.Body().Slice()...)) return addinit(r, append(n.Init().Slice(), n.Body().Slice()...))
} }
@ -548,7 +548,7 @@ func inlconv2expr(n *ir.Node) *ir.Node {
// containing the inlined statements on the first list element so // containing the inlined statements on the first list element so
// order will be preserved Used in return, oas2func and call // order will be preserved Used in return, oas2func and call
// statements. // statements.
func inlconv2list(n *ir.Node) []*ir.Node { func inlconv2list(n ir.Node) []ir.Node {
if n.Op() != ir.OINLCALL || n.Rlist().Len() == 0 { if n.Op() != ir.OINLCALL || n.Rlist().Len() == 0 {
base.Fatalf("inlconv2list %+v\n", n) base.Fatalf("inlconv2list %+v\n", n)
} }
@ -558,7 +558,7 @@ func inlconv2list(n *ir.Node) []*ir.Node {
return s return s
} }
func inlnodelist(l ir.Nodes, maxCost int32, inlMap map[*ir.Node]bool) { func inlnodelist(l ir.Nodes, maxCost int32, inlMap map[ir.Node]bool) {
s := l.Slice() s := l.Slice()
for i := range s { for i := range s {
s[i] = inlnode(s[i], maxCost, inlMap) s[i] = inlnode(s[i], maxCost, inlMap)
@ -578,7 +578,7 @@ func inlnodelist(l ir.Nodes, maxCost int32, inlMap map[*ir.Node]bool) {
// shorter and less complicated. // shorter and less complicated.
// The result of inlnode MUST be assigned back to n, e.g. // The result of inlnode MUST be assigned back to n, e.g.
// n.Left = inlnode(n.Left) // n.Left = inlnode(n.Left)
func inlnode(n *ir.Node, maxCost int32, inlMap map[*ir.Node]bool) *ir.Node { func inlnode(n ir.Node, maxCost int32, inlMap map[ir.Node]bool) ir.Node {
if n == nil { if n == nil {
return n return n
} }
@ -707,7 +707,7 @@ func inlnode(n *ir.Node, maxCost int32, inlMap map[*ir.Node]bool) *ir.Node {
// inlCallee takes a function-typed expression and returns the underlying function ONAME // inlCallee takes a function-typed expression and returns the underlying function ONAME
// that it refers to if statically known. Otherwise, it returns nil. // that it refers to if statically known. Otherwise, it returns nil.
func inlCallee(fn *ir.Node) *ir.Node { func inlCallee(fn ir.Node) ir.Node {
fn = staticValue(fn) fn = staticValue(fn)
switch { switch {
case fn.Op() == ir.OMETHEXPR: case fn.Op() == ir.OMETHEXPR:
@ -729,7 +729,7 @@ func inlCallee(fn *ir.Node) *ir.Node {
return nil return nil
} }
func staticValue(n *ir.Node) *ir.Node { func staticValue(n ir.Node) ir.Node {
for { for {
if n.Op() == ir.OCONVNOP { if n.Op() == ir.OCONVNOP {
n = n.Left() n = n.Left()
@ -747,7 +747,7 @@ func staticValue(n *ir.Node) *ir.Node {
// staticValue1 implements a simple SSA-like optimization. If n is a local variable // staticValue1 implements a simple SSA-like optimization. If n is a local variable
// that is initialized and never reassigned, staticValue1 returns the initializer // that is initialized and never reassigned, staticValue1 returns the initializer
// expression. Otherwise, it returns nil. // expression. Otherwise, it returns nil.
func staticValue1(n *ir.Node) *ir.Node { func staticValue1(n ir.Node) ir.Node {
if n.Op() != ir.ONAME || n.Class() != ir.PAUTO || n.Name().Addrtaken() { if n.Op() != ir.ONAME || n.Class() != ir.PAUTO || n.Name().Addrtaken() {
return nil return nil
} }
@ -757,7 +757,7 @@ func staticValue1(n *ir.Node) *ir.Node {
return nil return nil
} }
var rhs *ir.Node var rhs ir.Node
FindRHS: FindRHS:
switch defn.Op() { switch defn.Op() {
case ir.OAS: case ir.OAS:
@ -791,7 +791,7 @@ FindRHS:
// useful for -m output documenting the reason for inhibited optimizations. // useful for -m output documenting the reason for inhibited optimizations.
// NB: global variables are always considered to be re-assigned. // NB: global variables are always considered to be re-assigned.
// TODO: handle initial declaration not including an assignment and followed by a single assignment? // TODO: handle initial declaration not including an assignment and followed by a single assignment?
func reassigned(n *ir.Node) (bool, *ir.Node) { func reassigned(n ir.Node) (bool, ir.Node) {
if n.Op() != ir.ONAME { if n.Op() != ir.ONAME {
base.Fatalf("reassigned %v", n) base.Fatalf("reassigned %v", n)
} }
@ -814,10 +814,10 @@ func reassigned(n *ir.Node) (bool, *ir.Node) {
} }
type reassignVisitor struct { type reassignVisitor struct {
name *ir.Node name ir.Node
} }
func (v *reassignVisitor) visit(n *ir.Node) *ir.Node { func (v *reassignVisitor) visit(n ir.Node) ir.Node {
if n == nil { if n == nil {
return nil return nil
} }
@ -854,7 +854,7 @@ func (v *reassignVisitor) visit(n *ir.Node) *ir.Node {
return nil return nil
} }
func (v *reassignVisitor) visitList(l ir.Nodes) *ir.Node { func (v *reassignVisitor) visitList(l ir.Nodes) ir.Node {
for _, n := range l.Slice() { for _, n := range l.Slice() {
if a := v.visit(n); a != nil { if a := v.visit(n); a != nil {
return a return a
@ -863,7 +863,7 @@ func (v *reassignVisitor) visitList(l ir.Nodes) *ir.Node {
return nil return nil
} }
func inlParam(t *types.Field, as *ir.Node, inlvars map[*ir.Node]*ir.Node) *ir.Node { func inlParam(t *types.Field, as ir.Node, inlvars map[ir.Node]ir.Node) ir.Node {
n := ir.AsNode(t.Nname) n := ir.AsNode(t.Nname)
if n == nil || ir.IsBlank(n) { if n == nil || ir.IsBlank(n) {
return ir.BlankNode return ir.BlankNode
@ -887,7 +887,7 @@ var inlgen int
// parameters. // parameters.
// The result of mkinlcall MUST be assigned back to n, e.g. // The result of mkinlcall MUST be assigned back to n, e.g.
// n.Left = mkinlcall(n.Left, fn, isddd) // n.Left = mkinlcall(n.Left, fn, isddd)
func mkinlcall(n, fn *ir.Node, maxCost int32, inlMap map[*ir.Node]bool) *ir.Node { func mkinlcall(n, fn ir.Node, maxCost int32, inlMap map[ir.Node]bool) ir.Node {
if fn.Func().Inl == nil { if fn.Func().Inl == nil {
if logopt.Enabled() { if logopt.Enabled() {
logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(Curfn), logopt.LogOpt(n.Pos(), "cannotInlineCall", "inline", ir.FuncName(Curfn),
@ -969,10 +969,10 @@ func mkinlcall(n, fn *ir.Node, maxCost int32, inlMap map[*ir.Node]bool) *ir.Node
} }
// Make temp names to use instead of the originals. // Make temp names to use instead of the originals.
inlvars := make(map[*ir.Node]*ir.Node) inlvars := make(map[ir.Node]ir.Node)
// record formals/locals for later post-processing // record formals/locals for later post-processing
var inlfvars []*ir.Node var inlfvars []ir.Node
// Handle captured variables when inlining closures. // Handle captured variables when inlining closures.
if fn.Name().Defn != nil { if fn.Name().Defn != nil {
@ -1040,7 +1040,7 @@ func mkinlcall(n, fn *ir.Node, maxCost int32, inlMap map[*ir.Node]bool) *ir.Node
} }
nreturns := 0 nreturns := 0
ir.InspectList(ir.AsNodes(fn.Func().Inl.Body), func(n *ir.Node) bool { ir.InspectList(ir.AsNodes(fn.Func().Inl.Body), func(n ir.Node) bool {
if n != nil && n.Op() == ir.ORETURN { if n != nil && n.Op() == ir.ORETURN {
nreturns++ nreturns++
} }
@ -1053,9 +1053,9 @@ func mkinlcall(n, fn *ir.Node, maxCost int32, inlMap map[*ir.Node]bool) *ir.Node
delayretvars := nreturns == 1 delayretvars := nreturns == 1
// temporaries for return values. // temporaries for return values.
var retvars []*ir.Node var retvars []ir.Node
for i, t := range fn.Type().Results().Fields().Slice() { for i, t := range fn.Type().Results().Fields().Slice() {
var m *ir.Node var m ir.Node
if n := ir.AsNode(t.Nname); n != nil && !ir.IsBlank(n) && !strings.HasPrefix(n.Sym().Name, "~r") { if n := ir.AsNode(t.Nname); n != nil && !ir.IsBlank(n) && !strings.HasPrefix(n.Sym().Name, "~r") {
m = inlvar(n) m = inlvar(n)
m = typecheck(m, ctxExpr) m = typecheck(m, ctxExpr)
@ -1093,7 +1093,7 @@ func mkinlcall(n, fn *ir.Node, maxCost int32, inlMap map[*ir.Node]bool) *ir.Node
// For non-dotted calls to variadic functions, we assign the // For non-dotted calls to variadic functions, we assign the
// variadic parameter's temp name separately. // variadic parameter's temp name separately.
var vas *ir.Node var vas ir.Node
if recv := fn.Type().Recv(); recv != nil { if recv := fn.Type().Recv(); recv != nil {
as.PtrList().Append(inlParam(recv, as, inlvars)) as.PtrList().Append(inlParam(recv, as, inlvars))
@ -1228,7 +1228,7 @@ func mkinlcall(n, fn *ir.Node, maxCost int32, inlMap map[*ir.Node]bool) *ir.Node
// Every time we expand a function we generate a new set of tmpnames, // Every time we expand a function we generate a new set of tmpnames,
// PAUTO's in the calling functions, and link them off of the // PAUTO's in the calling functions, and link them off of the
// PPARAM's, PAUTOS and PPARAMOUTs of the called function. // PPARAM's, PAUTOS and PPARAMOUTs of the called function.
func inlvar(var_ *ir.Node) *ir.Node { func inlvar(var_ ir.Node) ir.Node {
if base.Flag.LowerM > 3 { if base.Flag.LowerM > 3 {
fmt.Printf("inlvar %+v\n", var_) fmt.Printf("inlvar %+v\n", var_)
} }
@ -1245,7 +1245,7 @@ func inlvar(var_ *ir.Node) *ir.Node {
} }
// Synthesize a variable to store the inlined function's results in. // Synthesize a variable to store the inlined function's results in.
func retvar(t *types.Field, i int) *ir.Node { func retvar(t *types.Field, i int) ir.Node {
n := NewName(lookupN("~R", i)) n := NewName(lookupN("~R", i))
n.SetType(t.Type) n.SetType(t.Type)
n.SetClass(ir.PAUTO) n.SetClass(ir.PAUTO)
@ -1257,7 +1257,7 @@ func retvar(t *types.Field, i int) *ir.Node {
// Synthesize a variable to store the inlined function's arguments // Synthesize a variable to store the inlined function's arguments
// when they come from a multiple return call. // when they come from a multiple return call.
func argvar(t *types.Type, i int) *ir.Node { func argvar(t *types.Type, i int) ir.Node {
n := NewName(lookupN("~arg", i)) n := NewName(lookupN("~arg", i))
n.SetType(t.Elem()) n.SetType(t.Elem())
n.SetClass(ir.PAUTO) n.SetClass(ir.PAUTO)
@ -1274,13 +1274,13 @@ type inlsubst struct {
retlabel *types.Sym retlabel *types.Sym
// Temporary result variables. // Temporary result variables.
retvars []*ir.Node retvars []ir.Node
// Whether result variables should be initialized at the // Whether result variables should be initialized at the
// "return" statement. // "return" statement.
delayretvars bool delayretvars bool
inlvars map[*ir.Node]*ir.Node inlvars map[ir.Node]ir.Node
// bases maps from original PosBase to PosBase with an extra // bases maps from original PosBase to PosBase with an extra
// inlined call frame. // inlined call frame.
@ -1292,8 +1292,8 @@ type inlsubst struct {
} }
// list inlines a list of nodes. // list inlines a list of nodes.
func (subst *inlsubst) list(ll ir.Nodes) []*ir.Node { func (subst *inlsubst) list(ll ir.Nodes) []ir.Node {
s := make([]*ir.Node, 0, ll.Len()) s := make([]ir.Node, 0, ll.Len())
for _, n := range ll.Slice() { for _, n := range ll.Slice() {
s = append(s, subst.node(n)) s = append(s, subst.node(n))
} }
@ -1304,7 +1304,7 @@ func (subst *inlsubst) list(ll ir.Nodes) []*ir.Node {
// inlined function, substituting references to input/output // inlined function, substituting references to input/output
// parameters with ones to the tmpnames, and substituting returns with // parameters with ones to the tmpnames, and substituting returns with
// assignments to the output. // assignments to the output.
func (subst *inlsubst) node(n *ir.Node) *ir.Node { func (subst *inlsubst) node(n ir.Node) ir.Node {
if n == nil { if n == nil {
return nil return nil
} }
@ -1409,8 +1409,8 @@ func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos {
return base.Ctxt.PosTable.XPos(pos) return base.Ctxt.PosTable.XPos(pos)
} }
func pruneUnusedAutos(ll []*ir.Node, vis *hairyVisitor) []*ir.Node { func pruneUnusedAutos(ll []ir.Node, vis *hairyVisitor) []ir.Node {
s := make([]*ir.Node, 0, len(ll)) s := make([]ir.Node, 0, len(ll))
for _, n := range ll { for _, n := range ll {
if n.Class() == ir.PAUTO { if n.Class() == ir.PAUTO {
if _, found := vis.usedLocals[n]; !found { if _, found := vis.usedLocals[n]; !found {
@ -1424,9 +1424,9 @@ func pruneUnusedAutos(ll []*ir.Node, vis *hairyVisitor) []*ir.Node {
// devirtualize replaces interface method calls within fn with direct // devirtualize replaces interface method calls within fn with direct
// concrete-type method calls where applicable. // concrete-type method calls where applicable.
func devirtualize(fn *ir.Node) { func devirtualize(fn ir.Node) {
Curfn = fn Curfn = fn
ir.InspectList(fn.Body(), func(n *ir.Node) bool { ir.InspectList(fn.Body(), func(n ir.Node) bool {
if n.Op() == ir.OCALLINTER { if n.Op() == ir.OCALLINTER {
devirtualizeCall(n) devirtualizeCall(n)
} }
@ -1434,7 +1434,7 @@ func devirtualize(fn *ir.Node) {
}) })
} }
func devirtualizeCall(call *ir.Node) { func devirtualizeCall(call ir.Node) {
recv := staticValue(call.Left().Left()) recv := staticValue(call.Left().Left())
if recv.Op() != ir.OCONVIFACE { if recv.Op() != ir.OCONVIFACE {
return return

View file

@ -330,7 +330,7 @@ func Main(archInit func(*Arch)) {
if base.Flag.LowerL != 0 { if base.Flag.LowerL != 0 {
// Find functions that can be inlined and clone them before walk expands them. // Find functions that can be inlined and clone them before walk expands them.
visitBottomUp(xtop, func(list []*ir.Node, recursive bool) { visitBottomUp(xtop, func(list []ir.Node, recursive bool) {
numfns := numNonClosures(list) numfns := numNonClosures(list)
for _, n := range list { for _, n := range list {
if !recursive || numfns > 1 { if !recursive || numfns > 1 {
@ -481,7 +481,7 @@ func Main(archInit func(*Arch)) {
} }
// numNonClosures returns the number of functions in list which are not closures. // numNonClosures returns the number of functions in list which are not closures.
func numNonClosures(list []*ir.Node) int { func numNonClosures(list []ir.Node) int {
count := 0 count := 0
for _, n := range list { for _, n := range list {
if n.Func().OClosure == nil { if n.Func().OClosure == nil {

View file

@ -207,7 +207,7 @@ func (i *typeInterner) fields(fl *ast.FieldList, keepNames bool) string {
} }
} }
} }
return fmt.Sprintf("[]*ir.Node{%s}", strings.Join(res, ", ")) return fmt.Sprintf("[]ir.Node{%s}", strings.Join(res, ", "))
} }
func intconst(e ast.Expr) int64 { func intconst(e ast.Expr) int64 {

View file

@ -152,7 +152,7 @@ type noder struct {
lastCloseScopePos syntax.Pos lastCloseScopePos syntax.Pos
} }
func (p *noder) funcBody(fn *ir.Node, block *syntax.BlockStmt) { func (p *noder) funcBody(fn ir.Node, block *syntax.BlockStmt) {
oldScope := p.scope oldScope := p.scope
p.scope = 0 p.scope = 0
funchdr(fn) funchdr(fn)
@ -160,7 +160,7 @@ func (p *noder) funcBody(fn *ir.Node, block *syntax.BlockStmt) {
if block != nil { if block != nil {
body := p.stmts(block.List) body := p.stmts(block.List)
if body == nil { if body == nil {
body = []*ir.Node{ir.Nod(ir.OEMPTY, nil, nil)} body = []ir.Node{ir.Nod(ir.OEMPTY, nil, nil)}
} }
fn.PtrBody().Set(body) fn.PtrBody().Set(body)
@ -294,7 +294,7 @@ func (p *noder) node() {
clearImports() clearImports()
} }
func (p *noder) decls(decls []syntax.Decl) (l []*ir.Node) { func (p *noder) decls(decls []syntax.Decl) (l []ir.Node) {
var cs constState var cs constState
for _, decl := range decls { for _, decl := range decls {
@ -378,11 +378,11 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
my.Block = 1 // at top level my.Block = 1 // at top level
} }
func (p *noder) varDecl(decl *syntax.VarDecl) []*ir.Node { func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node {
names := p.declNames(decl.NameList) names := p.declNames(decl.NameList)
typ := p.typeExprOrNil(decl.Type) typ := p.typeExprOrNil(decl.Type)
var exprs []*ir.Node var exprs []ir.Node
if decl.Values != nil { if decl.Values != nil {
exprs = p.exprList(decl.Values) exprs = p.exprList(decl.Values)
} }
@ -414,12 +414,12 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []*ir.Node {
// constant declarations are handled correctly (e.g., issue 15550). // constant declarations are handled correctly (e.g., issue 15550).
type constState struct { type constState struct {
group *syntax.Group group *syntax.Group
typ *ir.Node typ ir.Node
values []*ir.Node values []ir.Node
iota int64 iota int64
} }
func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*ir.Node { func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []ir.Node {
if decl.Group == nil || decl.Group != cs.group { if decl.Group == nil || decl.Group != cs.group {
*cs = constState{ *cs = constState{
group: decl.Group, group: decl.Group,
@ -433,7 +433,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*ir.Node {
names := p.declNames(decl.NameList) names := p.declNames(decl.NameList)
typ := p.typeExprOrNil(decl.Type) typ := p.typeExprOrNil(decl.Type)
var values []*ir.Node var values []ir.Node
if decl.Values != nil { if decl.Values != nil {
values = p.exprList(decl.Values) values = p.exprList(decl.Values)
cs.typ, cs.values = typ, values cs.typ, cs.values = typ, values
@ -444,7 +444,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*ir.Node {
typ, values = cs.typ, cs.values typ, values = cs.typ, cs.values
} }
nn := make([]*ir.Node, 0, len(names)) nn := make([]ir.Node, 0, len(names))
for i, n := range names { for i, n := range names {
if i >= len(values) { if i >= len(values) {
base.Errorf("missing value in const declaration") base.Errorf("missing value in const declaration")
@ -474,7 +474,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*ir.Node {
return nn return nn
} }
func (p *noder) typeDecl(decl *syntax.TypeDecl) *ir.Node { func (p *noder) typeDecl(decl *syntax.TypeDecl) ir.Node {
n := p.declName(decl.Name) n := p.declName(decl.Name)
n.SetOp(ir.OTYPE) n.SetOp(ir.OTYPE)
declare(n, dclcontext) declare(n, dclcontext)
@ -500,21 +500,21 @@ func (p *noder) typeDecl(decl *syntax.TypeDecl) *ir.Node {
return nod return nod
} }
func (p *noder) declNames(names []*syntax.Name) []*ir.Node { func (p *noder) declNames(names []*syntax.Name) []ir.Node {
nodes := make([]*ir.Node, 0, len(names)) nodes := make([]ir.Node, 0, len(names))
for _, name := range names { for _, name := range names {
nodes = append(nodes, p.declName(name)) nodes = append(nodes, p.declName(name))
} }
return nodes return nodes
} }
func (p *noder) declName(name *syntax.Name) *ir.Node { func (p *noder) declName(name *syntax.Name) ir.Node {
n := dclname(p.name(name)) n := dclname(p.name(name))
n.SetPos(p.pos(name)) n.SetPos(p.pos(name))
return n return n
} }
func (p *noder) funcDecl(fun *syntax.FuncDecl) *ir.Node { func (p *noder) funcDecl(fun *syntax.FuncDecl) ir.Node {
name := p.name(fun.Name) name := p.name(fun.Name)
t := p.signature(fun.Recv, fun.Type) t := p.signature(fun.Recv, fun.Type)
f := p.nod(fun, ir.ODCLFUNC, nil, nil) f := p.nod(fun, ir.ODCLFUNC, nil, nil)
@ -580,7 +580,7 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) *ir.Node {
return f return f
} }
func (p *noder) signature(recv *syntax.Field, typ *syntax.FuncType) *ir.Node { func (p *noder) signature(recv *syntax.Field, typ *syntax.FuncType) ir.Node {
n := p.nod(typ, ir.OTFUNC, nil, nil) n := p.nod(typ, ir.OTFUNC, nil, nil)
if recv != nil { if recv != nil {
n.SetLeft(p.param(recv, false, false)) n.SetLeft(p.param(recv, false, false))
@ -590,8 +590,8 @@ func (p *noder) signature(recv *syntax.Field, typ *syntax.FuncType) *ir.Node {
return n return n
} }
func (p *noder) params(params []*syntax.Field, dddOk bool) []*ir.Node { func (p *noder) params(params []*syntax.Field, dddOk bool) []ir.Node {
nodes := make([]*ir.Node, 0, len(params)) nodes := make([]ir.Node, 0, len(params))
for i, param := range params { for i, param := range params {
p.setlineno(param) p.setlineno(param)
nodes = append(nodes, p.param(param, dddOk, i+1 == len(params))) nodes = append(nodes, p.param(param, dddOk, i+1 == len(params)))
@ -599,7 +599,7 @@ func (p *noder) params(params []*syntax.Field, dddOk bool) []*ir.Node {
return nodes return nodes
} }
func (p *noder) param(param *syntax.Field, dddOk, final bool) *ir.Node { func (p *noder) param(param *syntax.Field, dddOk, final bool) ir.Node {
var name *types.Sym var name *types.Sym
if param.Name != nil { if param.Name != nil {
name = p.name(param.Name) name = p.name(param.Name)
@ -633,22 +633,22 @@ func (p *noder) param(param *syntax.Field, dddOk, final bool) *ir.Node {
return n return n
} }
func (p *noder) exprList(expr syntax.Expr) []*ir.Node { func (p *noder) exprList(expr syntax.Expr) []ir.Node {
if list, ok := expr.(*syntax.ListExpr); ok { if list, ok := expr.(*syntax.ListExpr); ok {
return p.exprs(list.ElemList) return p.exprs(list.ElemList)
} }
return []*ir.Node{p.expr(expr)} return []ir.Node{p.expr(expr)}
} }
func (p *noder) exprs(exprs []syntax.Expr) []*ir.Node { func (p *noder) exprs(exprs []syntax.Expr) []ir.Node {
nodes := make([]*ir.Node, 0, len(exprs)) nodes := make([]ir.Node, 0, len(exprs))
for _, expr := range exprs { for _, expr := range exprs {
nodes = append(nodes, p.expr(expr)) nodes = append(nodes, p.expr(expr))
} }
return nodes return nodes
} }
func (p *noder) expr(expr syntax.Expr) *ir.Node { func (p *noder) expr(expr syntax.Expr) ir.Node {
p.setlineno(expr) p.setlineno(expr)
switch expr := expr.(type) { switch expr := expr.(type) {
case nil, *syntax.BadExpr: case nil, *syntax.BadExpr:
@ -699,7 +699,7 @@ func (p *noder) expr(expr syntax.Expr) *ir.Node {
op = ir.OSLICE3 op = ir.OSLICE3
} }
n := p.nod(expr, op, p.expr(expr.X), nil) n := p.nod(expr, op, p.expr(expr.X), nil)
var index [3]*ir.Node var index [3]ir.Node
for i, x := range &expr.Index { for i, x := range &expr.Index {
if x != nil { if x != nil {
index[i] = p.expr(x) index[i] = p.expr(x)
@ -725,7 +725,7 @@ func (p *noder) expr(expr syntax.Expr) *ir.Node {
return n return n
case *syntax.ArrayType: case *syntax.ArrayType:
var len *ir.Node var len ir.Node
if expr.Len != nil { if expr.Len != nil {
len = p.expr(expr.Len) len = p.expr(expr.Len)
} else { } else {
@ -765,7 +765,7 @@ func (p *noder) expr(expr syntax.Expr) *ir.Node {
// sum efficiently handles very large summation expressions (such as // sum efficiently handles very large summation expressions (such as
// in issue #16394). In particular, it avoids left recursion and // in issue #16394). In particular, it avoids left recursion and
// collapses string literals. // collapses string literals.
func (p *noder) sum(x syntax.Expr) *ir.Node { func (p *noder) sum(x syntax.Expr) ir.Node {
// While we need to handle long sums with asymptotic // While we need to handle long sums with asymptotic
// efficiency, the vast majority of sums are very small: ~95% // efficiency, the vast majority of sums are very small: ~95%
// have only 2 or 3 operands, and ~99% of string literals are // have only 2 or 3 operands, and ~99% of string literals are
@ -800,7 +800,7 @@ func (p *noder) sum(x syntax.Expr) *ir.Node {
// handle correctly. For now, we avoid these problems by // handle correctly. For now, we avoid these problems by
// treating named string constants the same as non-constant // treating named string constants the same as non-constant
// operands. // operands.
var nstr *ir.Node var nstr ir.Node
chunks := make([]string, 0, 1) chunks := make([]string, 0, 1)
n := p.expr(x) n := p.expr(x)
@ -838,12 +838,12 @@ func (p *noder) sum(x syntax.Expr) *ir.Node {
return n return n
} }
func (p *noder) typeExpr(typ syntax.Expr) *ir.Node { func (p *noder) typeExpr(typ syntax.Expr) ir.Node {
// TODO(mdempsky): Be stricter? typecheck should handle errors anyway. // TODO(mdempsky): Be stricter? typecheck should handle errors anyway.
return p.expr(typ) return p.expr(typ)
} }
func (p *noder) typeExprOrNil(typ syntax.Expr) *ir.Node { func (p *noder) typeExprOrNil(typ syntax.Expr) ir.Node {
if typ != nil { if typ != nil {
return p.expr(typ) return p.expr(typ)
} }
@ -862,11 +862,11 @@ func (p *noder) chanDir(dir syntax.ChanDir) types.ChanDir {
panic("unhandled ChanDir") panic("unhandled ChanDir")
} }
func (p *noder) structType(expr *syntax.StructType) *ir.Node { func (p *noder) structType(expr *syntax.StructType) ir.Node {
l := make([]*ir.Node, 0, len(expr.FieldList)) l := make([]ir.Node, 0, len(expr.FieldList))
for i, field := range expr.FieldList { for i, field := range expr.FieldList {
p.setlineno(field) p.setlineno(field)
var n *ir.Node var n ir.Node
if field.Name == nil { if field.Name == nil {
n = p.embedded(field.Type) n = p.embedded(field.Type)
} else { } else {
@ -884,11 +884,11 @@ func (p *noder) structType(expr *syntax.StructType) *ir.Node {
return n return n
} }
func (p *noder) interfaceType(expr *syntax.InterfaceType) *ir.Node { func (p *noder) interfaceType(expr *syntax.InterfaceType) ir.Node {
l := make([]*ir.Node, 0, len(expr.MethodList)) l := make([]ir.Node, 0, len(expr.MethodList))
for _, method := range expr.MethodList { for _, method := range expr.MethodList {
p.setlineno(method) p.setlineno(method)
var n *ir.Node var n ir.Node
if method.Name == nil { if method.Name == nil {
n = p.nodSym(method, ir.ODCLFIELD, importName(p.packname(method.Type)), nil) n = p.nodSym(method, ir.ODCLFIELD, importName(p.packname(method.Type)), nil)
} else { } else {
@ -934,7 +934,7 @@ func (p *noder) packname(expr syntax.Expr) *types.Sym {
panic(fmt.Sprintf("unexpected packname: %#v", expr)) panic(fmt.Sprintf("unexpected packname: %#v", expr))
} }
func (p *noder) embedded(typ syntax.Expr) *ir.Node { func (p *noder) embedded(typ syntax.Expr) ir.Node {
op, isStar := typ.(*syntax.Operation) op, isStar := typ.(*syntax.Operation)
if isStar { if isStar {
if op.Op != syntax.Mul || op.Y != nil { if op.Op != syntax.Mul || op.Y != nil {
@ -953,12 +953,12 @@ func (p *noder) embedded(typ syntax.Expr) *ir.Node {
return n return n
} }
func (p *noder) stmts(stmts []syntax.Stmt) []*ir.Node { func (p *noder) stmts(stmts []syntax.Stmt) []ir.Node {
return p.stmtsFall(stmts, false) return p.stmtsFall(stmts, false)
} }
func (p *noder) stmtsFall(stmts []syntax.Stmt, fallOK bool) []*ir.Node { func (p *noder) stmtsFall(stmts []syntax.Stmt, fallOK bool) []ir.Node {
var nodes []*ir.Node var nodes []ir.Node
for i, stmt := range stmts { for i, stmt := range stmts {
s := p.stmtFall(stmt, fallOK && i+1 == len(stmts)) s := p.stmtFall(stmt, fallOK && i+1 == len(stmts))
if s == nil { if s == nil {
@ -971,11 +971,11 @@ func (p *noder) stmtsFall(stmts []syntax.Stmt, fallOK bool) []*ir.Node {
return nodes return nodes
} }
func (p *noder) stmt(stmt syntax.Stmt) *ir.Node { func (p *noder) stmt(stmt syntax.Stmt) ir.Node {
return p.stmtFall(stmt, false) return p.stmtFall(stmt, false)
} }
func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) *ir.Node { func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
p.setlineno(stmt) p.setlineno(stmt)
switch stmt := stmt.(type) { switch stmt := stmt.(type) {
case *syntax.EmptyStmt: case *syntax.EmptyStmt:
@ -1053,7 +1053,7 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) *ir.Node {
} }
return p.nod(stmt, op, p.expr(stmt.Call), nil) return p.nod(stmt, op, p.expr(stmt.Call), nil)
case *syntax.ReturnStmt: case *syntax.ReturnStmt:
var results []*ir.Node var results []ir.Node
if stmt.Results != nil { if stmt.Results != nil {
results = p.exprList(stmt.Results) results = p.exprList(stmt.Results)
} }
@ -1085,7 +1085,7 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) *ir.Node {
panic("unhandled Stmt") panic("unhandled Stmt")
} }
func (p *noder) assignList(expr syntax.Expr, defn *ir.Node, colas bool) []*ir.Node { func (p *noder) assignList(expr syntax.Expr, defn ir.Node, colas bool) []ir.Node {
if !colas { if !colas {
return p.exprList(expr) return p.exprList(expr)
} }
@ -1099,7 +1099,7 @@ func (p *noder) assignList(expr syntax.Expr, defn *ir.Node, colas bool) []*ir.No
exprs = []syntax.Expr{expr} exprs = []syntax.Expr{expr}
} }
res := make([]*ir.Node, len(exprs)) res := make([]ir.Node, len(exprs))
seen := make(map[*types.Sym]bool, len(exprs)) seen := make(map[*types.Sym]bool, len(exprs))
newOrErr := false newOrErr := false
@ -1145,14 +1145,14 @@ func (p *noder) assignList(expr syntax.Expr, defn *ir.Node, colas bool) []*ir.No
return res return res
} }
func (p *noder) blockStmt(stmt *syntax.BlockStmt) []*ir.Node { func (p *noder) blockStmt(stmt *syntax.BlockStmt) []ir.Node {
p.openScope(stmt.Pos()) p.openScope(stmt.Pos())
nodes := p.stmts(stmt.List) nodes := p.stmts(stmt.List)
p.closeScope(stmt.Rbrace) p.closeScope(stmt.Rbrace)
return nodes return nodes
} }
func (p *noder) ifStmt(stmt *syntax.IfStmt) *ir.Node { func (p *noder) ifStmt(stmt *syntax.IfStmt) ir.Node {
p.openScope(stmt.Pos()) p.openScope(stmt.Pos())
n := p.nod(stmt, ir.OIF, nil, nil) n := p.nod(stmt, ir.OIF, nil, nil)
if stmt.Init != nil { if stmt.Init != nil {
@ -1174,9 +1174,9 @@ func (p *noder) ifStmt(stmt *syntax.IfStmt) *ir.Node {
return n return n
} }
func (p *noder) forStmt(stmt *syntax.ForStmt) *ir.Node { func (p *noder) forStmt(stmt *syntax.ForStmt) ir.Node {
p.openScope(stmt.Pos()) p.openScope(stmt.Pos())
var n *ir.Node var n ir.Node
if r, ok := stmt.Init.(*syntax.RangeClause); ok { if r, ok := stmt.Init.(*syntax.RangeClause); ok {
if stmt.Cond != nil || stmt.Post != nil { if stmt.Cond != nil || stmt.Post != nil {
panic("unexpected RangeClause") panic("unexpected RangeClause")
@ -1203,7 +1203,7 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) *ir.Node {
return n return n
} }
func (p *noder) switchStmt(stmt *syntax.SwitchStmt) *ir.Node { func (p *noder) switchStmt(stmt *syntax.SwitchStmt) ir.Node {
p.openScope(stmt.Pos()) p.openScope(stmt.Pos())
n := p.nod(stmt, ir.OSWITCH, nil, nil) n := p.nod(stmt, ir.OSWITCH, nil, nil)
if stmt.Init != nil { if stmt.Init != nil {
@ -1223,8 +1223,8 @@ func (p *noder) switchStmt(stmt *syntax.SwitchStmt) *ir.Node {
return n return n
} }
func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.Node, rbrace syntax.Pos) []*ir.Node { func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch ir.Node, rbrace syntax.Pos) []ir.Node {
nodes := make([]*ir.Node, 0, len(clauses)) nodes := make([]ir.Node, 0, len(clauses))
for i, clause := range clauses { for i, clause := range clauses {
p.setlineno(clause) p.setlineno(clause)
if i > 0 { if i > 0 {
@ -1273,14 +1273,14 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.Node, rbra
return nodes return nodes
} }
func (p *noder) selectStmt(stmt *syntax.SelectStmt) *ir.Node { func (p *noder) selectStmt(stmt *syntax.SelectStmt) ir.Node {
n := p.nod(stmt, ir.OSELECT, nil, nil) n := p.nod(stmt, ir.OSELECT, nil, nil)
n.PtrList().Set(p.commClauses(stmt.Body, stmt.Rbrace)) n.PtrList().Set(p.commClauses(stmt.Body, stmt.Rbrace))
return n return n
} }
func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []*ir.Node { func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []ir.Node {
nodes := make([]*ir.Node, 0, len(clauses)) nodes := make([]ir.Node, 0, len(clauses))
for i, clause := range clauses { for i, clause := range clauses {
p.setlineno(clause) p.setlineno(clause)
if i > 0 { if i > 0 {
@ -1301,16 +1301,16 @@ func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []*
return nodes return nodes
} }
func (p *noder) labeledStmt(label *syntax.LabeledStmt, fallOK bool) *ir.Node { func (p *noder) labeledStmt(label *syntax.LabeledStmt, fallOK bool) ir.Node {
lhs := p.nodSym(label, ir.OLABEL, nil, p.name(label.Label)) lhs := p.nodSym(label, ir.OLABEL, nil, p.name(label.Label))
var ls *ir.Node var ls ir.Node
if label.Stmt != nil { // TODO(mdempsky): Should always be present. if label.Stmt != nil { // TODO(mdempsky): Should always be present.
ls = p.stmtFall(label.Stmt, fallOK) ls = p.stmtFall(label.Stmt, fallOK)
} }
lhs.Name().Defn = ls lhs.Name().Defn = ls
l := []*ir.Node{lhs} l := []ir.Node{lhs}
if ls != nil { if ls != nil {
if ls.Op() == ir.OBLOCK && ls.Init().Len() == 0 { if ls.Op() == ir.OBLOCK && ls.Init().Len() == 0 {
l = append(l, ls.List().Slice()...) l = append(l, ls.List().Slice()...)
@ -1443,12 +1443,12 @@ func (p *noder) name(name *syntax.Name) *types.Sym {
return lookup(name.Value) return lookup(name.Value)
} }
func (p *noder) mkname(name *syntax.Name) *ir.Node { func (p *noder) mkname(name *syntax.Name) ir.Node {
// TODO(mdempsky): Set line number? // TODO(mdempsky): Set line number?
return mkname(p.name(name)) return mkname(p.name(name))
} }
func (p *noder) wrapname(n syntax.Node, x *ir.Node) *ir.Node { func (p *noder) wrapname(n syntax.Node, x ir.Node) ir.Node {
// These nodes do not carry line numbers. // These nodes do not carry line numbers.
// Introduce a wrapper node to give them the correct line. // Introduce a wrapper node to give them the correct line.
switch x.Op() { switch x.Op() {
@ -1464,11 +1464,11 @@ func (p *noder) wrapname(n syntax.Node, x *ir.Node) *ir.Node {
return x return x
} }
func (p *noder) nod(orig syntax.Node, op ir.Op, left, right *ir.Node) *ir.Node { func (p *noder) nod(orig syntax.Node, op ir.Op, left, right ir.Node) ir.Node {
return ir.NodAt(p.pos(orig), op, left, right) return ir.NodAt(p.pos(orig), op, left, right)
} }
func (p *noder) nodSym(orig syntax.Node, op ir.Op, left *ir.Node, sym *types.Sym) *ir.Node { func (p *noder) nodSym(orig syntax.Node, op ir.Op, left ir.Node, sym *types.Sym) ir.Node {
n := nodSym(op, left, sym) n := nodSym(op, left, sym)
n.SetPos(p.pos(orig)) n.SetPos(p.pos(orig))
return n return n
@ -1668,7 +1668,7 @@ func safeArg(name string) bool {
return '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || c == '.' || c == '_' || c == '/' || c >= utf8.RuneSelf return '0' <= c && c <= '9' || 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || c == '.' || c == '_' || c == '/' || c >= utf8.RuneSelf
} }
func mkname(sym *types.Sym) *ir.Node { func mkname(sym *types.Sym) ir.Node {
n := oldname(sym) n := oldname(sym)
if n.Name() != nil && n.Name().Pack != nil { if n.Name() != nil && n.Name().Pack != nil {
n.Name().Pack.Name().SetUsed(true) n.Name().Pack.Name().SetUsed(true)

View file

@ -228,7 +228,7 @@ func addptabs() {
} }
} }
func dumpGlobal(n *ir.Node) { func dumpGlobal(n ir.Node) {
if n.Type() == nil { if n.Type() == nil {
base.Fatalf("external %v nil type\n", n) base.Fatalf("external %v nil type\n", n)
} }
@ -242,7 +242,7 @@ func dumpGlobal(n *ir.Node) {
ggloblnod(n) ggloblnod(n)
} }
func dumpGlobalConst(n *ir.Node) { func dumpGlobalConst(n ir.Node) {
// only export typed constants // only export typed constants
t := n.Type() t := n.Type()
if t == nil { if t == nil {
@ -475,7 +475,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
var slicedataGen int var slicedataGen int
func slicedata(pos src.XPos, s string) *ir.Node { func slicedata(pos src.XPos, s string) ir.Node {
slicedataGen++ slicedataGen++
symname := fmt.Sprintf(".gobytes.%d", slicedataGen) symname := fmt.Sprintf(".gobytes.%d", slicedataGen)
sym := ir.LocalPkg.Lookup(symname) sym := ir.LocalPkg.Lookup(symname)
@ -489,7 +489,7 @@ func slicedata(pos src.XPos, s string) *ir.Node {
return symnode return symnode
} }
func slicebytes(nam *ir.Node, s string) { func slicebytes(nam ir.Node, s string) {
if nam.Op() != ir.ONAME { if nam.Op() != ir.ONAME {
base.Fatalf("slicebytes %v", nam) base.Fatalf("slicebytes %v", nam)
} }
@ -530,7 +530,7 @@ func dsymptrWeakOff(s *obj.LSym, off int, x *obj.LSym) int {
// slicesym writes a static slice symbol {&arr, lencap, lencap} to n. // slicesym writes a static slice symbol {&arr, lencap, lencap} to n.
// arr must be an ONAME. slicesym does not modify n. // arr must be an ONAME. slicesym does not modify n.
func slicesym(n, arr *ir.Node, lencap int64) { func slicesym(n, arr ir.Node, lencap int64) {
s := n.Sym().Linksym() s := n.Sym().Linksym()
off := n.Offset() off := n.Offset()
if arr.Op() != ir.ONAME { if arr.Op() != ir.ONAME {
@ -543,7 +543,7 @@ func slicesym(n, arr *ir.Node, lencap int64) {
// addrsym writes the static address of a to n. a must be an ONAME. // addrsym writes the static address of a to n. a must be an ONAME.
// Neither n nor a is modified. // Neither n nor a is modified.
func addrsym(n, a *ir.Node) { func addrsym(n, a ir.Node) {
if n.Op() != ir.ONAME { if n.Op() != ir.ONAME {
base.Fatalf("addrsym n op %v", n.Op()) base.Fatalf("addrsym n op %v", n.Op())
} }
@ -559,7 +559,7 @@ func addrsym(n, a *ir.Node) {
// pfuncsym writes the static address of f to n. f must be a global function. // pfuncsym writes the static address of f to n. f must be a global function.
// Neither n nor f is modified. // Neither n nor f is modified.
func pfuncsym(n, f *ir.Node) { func pfuncsym(n, f ir.Node) {
if n.Op() != ir.ONAME { if n.Op() != ir.ONAME {
base.Fatalf("pfuncsym n op %v", n.Op()) base.Fatalf("pfuncsym n op %v", n.Op())
} }
@ -575,7 +575,7 @@ func pfuncsym(n, f *ir.Node) {
// litsym writes the static literal c to n. // litsym writes the static literal c to n.
// Neither n nor c is modified. // Neither n nor c is modified.
func litsym(n, c *ir.Node, wid int) { func litsym(n, c ir.Node, wid int) {
if n.Op() != ir.ONAME { if n.Op() != ir.ONAME {
base.Fatalf("litsym n op %v", n.Op()) base.Fatalf("litsym n op %v", n.Op())
} }

View file

@ -44,27 +44,27 @@ import (
// Order holds state during the ordering process. // Order holds state during the ordering process.
type Order struct { type Order struct {
out []*ir.Node // list of generated statements out []ir.Node // list of generated statements
temp []*ir.Node // stack of temporary variables temp []ir.Node // stack of temporary variables
free map[string][]*ir.Node // free list of unused temporaries, by type.LongString(). free map[string][]ir.Node // free list of unused temporaries, by type.LongString().
} }
// Order rewrites fn.Nbody to apply the ordering constraints // Order rewrites fn.Nbody to apply the ordering constraints
// described in the comment at the top of the file. // described in the comment at the top of the file.
func order(fn *ir.Node) { func order(fn ir.Node) {
if base.Flag.W > 1 { if base.Flag.W > 1 {
s := fmt.Sprintf("\nbefore order %v", fn.Func().Nname.Sym()) s := fmt.Sprintf("\nbefore order %v", fn.Func().Nname.Sym())
ir.DumpList(s, fn.Body()) ir.DumpList(s, fn.Body())
} }
orderBlock(fn.PtrBody(), map[string][]*ir.Node{}) orderBlock(fn.PtrBody(), map[string][]ir.Node{})
} }
// newTemp allocates a new temporary with the given type, // newTemp allocates a new temporary with the given type,
// pushes it onto the temp stack, and returns it. // pushes it onto the temp stack, and returns it.
// If clear is true, newTemp emits code to zero the temporary. // If clear is true, newTemp emits code to zero the temporary.
func (o *Order) newTemp(t *types.Type, clear bool) *ir.Node { func (o *Order) newTemp(t *types.Type, clear bool) ir.Node {
var v *ir.Node var v ir.Node
// Note: LongString is close to the type equality we want, // Note: LongString is close to the type equality we want,
// but not exactly. We still need to double-check with types.Identical. // but not exactly. We still need to double-check with types.Identical.
key := t.LongString() key := t.LongString()
@ -103,7 +103,7 @@ func (o *Order) newTemp(t *types.Type, clear bool) *ir.Node {
// (The other candidate would be map access, but map access // (The other candidate would be map access, but map access
// returns a pointer to the result data instead of taking a pointer // returns a pointer to the result data instead of taking a pointer
// to be filled in.) // to be filled in.)
func (o *Order) copyExpr(n *ir.Node, t *types.Type, clear bool) *ir.Node { func (o *Order) copyExpr(n ir.Node, t *types.Type, clear bool) ir.Node {
v := o.newTemp(t, clear) v := o.newTemp(t, clear)
a := ir.Nod(ir.OAS, v, n) a := ir.Nod(ir.OAS, v, n)
a = typecheck(a, ctxStmt) a = typecheck(a, ctxStmt)
@ -115,7 +115,7 @@ func (o *Order) copyExpr(n *ir.Node, t *types.Type, clear bool) *ir.Node {
// The definition of cheap is that n is a variable or constant. // The definition of cheap is that n is a variable or constant.
// If not, cheapExpr allocates a new tmp, emits tmp = n, // If not, cheapExpr allocates a new tmp, emits tmp = n,
// and then returns tmp. // and then returns tmp.
func (o *Order) cheapExpr(n *ir.Node) *ir.Node { func (o *Order) cheapExpr(n ir.Node) ir.Node {
if n == nil { if n == nil {
return nil return nil
} }
@ -143,7 +143,7 @@ func (o *Order) cheapExpr(n *ir.Node) *ir.Node {
// as assigning to the original n. // as assigning to the original n.
// //
// The intended use is to apply to x when rewriting x += y into x = x + y. // The intended use is to apply to x when rewriting x += y into x = x + y.
func (o *Order) safeExpr(n *ir.Node) *ir.Node { func (o *Order) safeExpr(n ir.Node) ir.Node {
switch n.Op() { switch n.Op() {
case ir.ONAME, ir.OLITERAL, ir.ONIL: case ir.ONAME, ir.OLITERAL, ir.ONIL:
return n return n
@ -167,7 +167,7 @@ func (o *Order) safeExpr(n *ir.Node) *ir.Node {
return typecheck(a, ctxExpr) return typecheck(a, ctxExpr)
case ir.OINDEX, ir.OINDEXMAP: case ir.OINDEX, ir.OINDEXMAP:
var l *ir.Node var l ir.Node
if n.Left().Type().IsArray() { if n.Left().Type().IsArray() {
l = o.safeExpr(n.Left()) l = o.safeExpr(n.Left())
} else { } else {
@ -194,7 +194,7 @@ func (o *Order) safeExpr(n *ir.Node) *ir.Node {
// of ordinary stack variables, those are not 'isaddrokay'. Temporaries are okay, // of ordinary stack variables, those are not 'isaddrokay'. Temporaries are okay,
// because we emit explicit VARKILL instructions marking the end of those // because we emit explicit VARKILL instructions marking the end of those
// temporaries' lifetimes. // temporaries' lifetimes.
func isaddrokay(n *ir.Node) bool { func isaddrokay(n ir.Node) bool {
return islvalue(n) && (n.Op() != ir.ONAME || n.Class() == ir.PEXTERN || ir.IsAutoTmp(n)) return islvalue(n) && (n.Op() != ir.ONAME || n.Class() == ir.PEXTERN || ir.IsAutoTmp(n))
} }
@ -203,7 +203,7 @@ func isaddrokay(n *ir.Node) bool {
// tmp = n, and then returns tmp. // tmp = n, and then returns tmp.
// The result of addrTemp MUST be assigned back to n, e.g. // The result of addrTemp MUST be assigned back to n, e.g.
// n.Left = o.addrTemp(n.Left) // n.Left = o.addrTemp(n.Left)
func (o *Order) addrTemp(n *ir.Node) *ir.Node { func (o *Order) addrTemp(n ir.Node) ir.Node {
if n.Op() == ir.OLITERAL || n.Op() == ir.ONIL { if n.Op() == ir.OLITERAL || n.Op() == ir.ONIL {
// TODO: expand this to all static composite literal nodes? // TODO: expand this to all static composite literal nodes?
n = defaultlit(n, nil) n = defaultlit(n, nil)
@ -225,7 +225,7 @@ func (o *Order) addrTemp(n *ir.Node) *ir.Node {
// mapKeyTemp prepares n to be a key in a map runtime call and returns n. // mapKeyTemp prepares n to be a key in a map runtime call and returns n.
// It should only be used for map runtime calls which have *_fast* versions. // It should only be used for map runtime calls which have *_fast* versions.
func (o *Order) mapKeyTemp(t *types.Type, n *ir.Node) *ir.Node { func (o *Order) mapKeyTemp(t *types.Type, n ir.Node) ir.Node {
// Most map calls need to take the address of the key. // Most map calls need to take the address of the key.
// Exception: map*_fast* calls. See golang.org/issue/19015. // Exception: map*_fast* calls. See golang.org/issue/19015.
if mapfast(t) == mapslow { if mapfast(t) == mapslow {
@ -248,7 +248,7 @@ func (o *Order) mapKeyTemp(t *types.Type, n *ir.Node) *ir.Node {
// It would be nice to handle these generally, but because // It would be nice to handle these generally, but because
// []byte keys are not allowed in maps, the use of string(k) // []byte keys are not allowed in maps, the use of string(k)
// comes up in important cases in practice. See issue 3512. // comes up in important cases in practice. See issue 3512.
func mapKeyReplaceStrConv(n *ir.Node) bool { func mapKeyReplaceStrConv(n ir.Node) bool {
var replaced bool var replaced bool
switch n.Op() { switch n.Op() {
case ir.OBYTES2STR: case ir.OBYTES2STR:
@ -293,8 +293,8 @@ func (o *Order) popTemp(mark ordermarker) {
// cleanTempNoPop emits VARKILL instructions to *out // cleanTempNoPop emits VARKILL instructions to *out
// for each temporary above the mark on the temporary stack. // for each temporary above the mark on the temporary stack.
// It does not pop the temporaries from the stack. // It does not pop the temporaries from the stack.
func (o *Order) cleanTempNoPop(mark ordermarker) []*ir.Node { func (o *Order) cleanTempNoPop(mark ordermarker) []ir.Node {
var out []*ir.Node var out []ir.Node
for i := len(o.temp) - 1; i >= int(mark); i-- { for i := len(o.temp) - 1; i >= int(mark); i-- {
n := o.temp[i] n := o.temp[i]
kill := ir.Nod(ir.OVARKILL, n, nil) kill := ir.Nod(ir.OVARKILL, n, nil)
@ -324,7 +324,7 @@ func (o *Order) stmtList(l ir.Nodes) {
// m = OMAKESLICE([]T, x); OCOPY(m, s) // m = OMAKESLICE([]T, x); OCOPY(m, s)
// and rewrites it to: // and rewrites it to:
// m = OMAKESLICECOPY([]T, x, s); nil // m = OMAKESLICECOPY([]T, x, s); nil
func orderMakeSliceCopy(s []*ir.Node) { func orderMakeSliceCopy(s []ir.Node) {
if base.Flag.N != 0 || instrumenting { if base.Flag.N != 0 || instrumenting {
return return
} }
@ -406,7 +406,7 @@ func (o *Order) edge() {
// orderBlock orders the block of statements in n into a new slice, // orderBlock orders the block of statements in n into a new slice,
// and then replaces the old slice in n with the new slice. // and then replaces the old slice in n with the new slice.
// free is a map that can be used to obtain temporary variables by type. // free is a map that can be used to obtain temporary variables by type.
func orderBlock(n *ir.Nodes, free map[string][]*ir.Node) { func orderBlock(n *ir.Nodes, free map[string][]ir.Node) {
var order Order var order Order
order.free = free order.free = free
mark := order.markTemp() mark := order.markTemp()
@ -420,7 +420,7 @@ func orderBlock(n *ir.Nodes, free map[string][]*ir.Node) {
// leaves them as the init list of the final *np. // leaves them as the init list of the final *np.
// The result of exprInPlace MUST be assigned back to n, e.g. // The result of exprInPlace MUST be assigned back to n, e.g.
// n.Left = o.exprInPlace(n.Left) // n.Left = o.exprInPlace(n.Left)
func (o *Order) exprInPlace(n *ir.Node) *ir.Node { func (o *Order) exprInPlace(n ir.Node) ir.Node {
var order Order var order Order
order.free = o.free order.free = o.free
n = order.expr(n, nil) n = order.expr(n, nil)
@ -437,7 +437,7 @@ func (o *Order) exprInPlace(n *ir.Node) *ir.Node {
// The result of orderStmtInPlace MUST be assigned back to n, e.g. // The result of orderStmtInPlace MUST be assigned back to n, e.g.
// n.Left = orderStmtInPlace(n.Left) // n.Left = orderStmtInPlace(n.Left)
// free is a map that can be used to obtain temporary variables by type. // free is a map that can be used to obtain temporary variables by type.
func orderStmtInPlace(n *ir.Node, free map[string][]*ir.Node) *ir.Node { func orderStmtInPlace(n ir.Node, free map[string][]ir.Node) ir.Node {
var order Order var order Order
order.free = free order.free = free
mark := order.markTemp() mark := order.markTemp()
@ -447,7 +447,7 @@ func orderStmtInPlace(n *ir.Node, free map[string][]*ir.Node) *ir.Node {
} }
// init moves n's init list to o.out. // init moves n's init list to o.out.
func (o *Order) init(n *ir.Node) { func (o *Order) init(n ir.Node) {
if ir.MayBeShared(n) { if ir.MayBeShared(n) {
// For concurrency safety, don't mutate potentially shared nodes. // For concurrency safety, don't mutate potentially shared nodes.
// First, ensure that no work is required here. // First, ensure that no work is required here.
@ -462,7 +462,7 @@ func (o *Order) init(n *ir.Node) {
// call orders the call expression n. // call orders the call expression n.
// n.Op is OCALLMETH/OCALLFUNC/OCALLINTER or a builtin like OCOPY. // n.Op is OCALLMETH/OCALLFUNC/OCALLINTER or a builtin like OCOPY.
func (o *Order) call(n *ir.Node) { func (o *Order) call(n ir.Node) {
if n.Init().Len() > 0 { if n.Init().Len() > 0 {
// Caller should have already called o.init(n). // Caller should have already called o.init(n).
base.Fatalf("%v with unexpected ninit", n.Op()) base.Fatalf("%v with unexpected ninit", n.Op())
@ -483,7 +483,7 @@ func (o *Order) call(n *ir.Node) {
if n.Op() == ir.OCALLINTER { if n.Op() == ir.OCALLINTER {
return return
} }
keepAlive := func(arg *ir.Node) { keepAlive := func(arg ir.Node) {
// If the argument is really a pointer being converted to uintptr, // If the argument is really a pointer being converted to uintptr,
// arrange for the pointer to be kept alive until the call returns, // arrange for the pointer to be kept alive until the call returns,
// by copying it into a temp and marking that temp // by copying it into a temp and marking that temp
@ -525,7 +525,7 @@ func (o *Order) call(n *ir.Node) {
// cases they are also typically registerizable, so not much harm done. // cases they are also typically registerizable, so not much harm done.
// And this only applies to the multiple-assignment form. // And this only applies to the multiple-assignment form.
// We could do a more precise analysis if needed, like in walk.go. // We could do a more precise analysis if needed, like in walk.go.
func (o *Order) mapAssign(n *ir.Node) { func (o *Order) mapAssign(n ir.Node) {
switch n.Op() { switch n.Op() {
default: default:
base.Fatalf("order.mapAssign %v", n.Op()) base.Fatalf("order.mapAssign %v", n.Op())
@ -546,7 +546,7 @@ func (o *Order) mapAssign(n *ir.Node) {
o.out = append(o.out, n) o.out = append(o.out, n)
case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2MAPR, ir.OAS2FUNC: case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2MAPR, ir.OAS2FUNC:
var post []*ir.Node var post []ir.Node
for i, m := range n.List().Slice() { for i, m := range n.List().Slice() {
switch { switch {
case m.Op() == ir.OINDEXMAP: case m.Op() == ir.OINDEXMAP:
@ -574,7 +574,7 @@ func (o *Order) mapAssign(n *ir.Node) {
// stmt orders the statement n, appending to o.out. // stmt orders the statement n, appending to o.out.
// Temporaries created during the statement are cleaned // Temporaries created during the statement are cleaned
// up using VARKILL instructions as possible. // up using VARKILL instructions as possible.
func (o *Order) stmt(n *ir.Node) { func (o *Order) stmt(n ir.Node) {
if n == nil { if n == nil {
return return
} }
@ -1022,7 +1022,7 @@ func (o *Order) stmt(n *ir.Node) {
base.Pos = lno base.Pos = lno
} }
func hasDefaultCase(n *ir.Node) bool { func hasDefaultCase(n ir.Node) bool {
for _, ncas := range n.List().Slice() { for _, ncas := range n.List().Slice() {
if ncas.Op() != ir.OCASE { if ncas.Op() != ir.OCASE {
base.Fatalf("expected case, found %v", ncas.Op()) base.Fatalf("expected case, found %v", ncas.Op())
@ -1052,7 +1052,7 @@ func (o *Order) exprListInPlace(l ir.Nodes) {
} }
// prealloc[x] records the allocation to use for x. // prealloc[x] records the allocation to use for x.
var prealloc = map[*ir.Node]*ir.Node{} var prealloc = map[ir.Node]ir.Node{}
// expr orders a single expression, appending side // expr orders a single expression, appending side
// effects to o.out as needed. // effects to o.out as needed.
@ -1061,7 +1061,7 @@ var prealloc = map[*ir.Node]*ir.Node{}
// to avoid copying the result of the expression to a temporary.) // to avoid copying the result of the expression to a temporary.)
// The result of expr MUST be assigned back to n, e.g. // The result of expr MUST be assigned back to n, e.g.
// n.Left = o.expr(n.Left, lhs) // n.Left = o.expr(n.Left, lhs)
func (o *Order) expr(n, lhs *ir.Node) *ir.Node { func (o *Order) expr(n, lhs ir.Node) ir.Node {
if n == nil { if n == nil {
return n return n
} }
@ -1329,7 +1329,7 @@ func (o *Order) expr(n, lhs *ir.Node) *ir.Node {
// See issue 26552. // See issue 26552.
entries := n.List().Slice() entries := n.List().Slice()
statics := entries[:0] statics := entries[:0]
var dynamics []*ir.Node var dynamics []ir.Node
for _, r := range entries { for _, r := range entries {
if r.Op() != ir.OKEY { if r.Op() != ir.OKEY {
base.Fatalf("OMAPLIT entry not OKEY: %v\n", r) base.Fatalf("OMAPLIT entry not OKEY: %v\n", r)
@ -1377,7 +1377,7 @@ func (o *Order) expr(n, lhs *ir.Node) *ir.Node {
// okas creates and returns an assignment of val to ok, // okas creates and returns an assignment of val to ok,
// including an explicit conversion if necessary. // including an explicit conversion if necessary.
func okas(ok, val *ir.Node) *ir.Node { func okas(ok, val ir.Node) ir.Node {
if !ir.IsBlank(ok) { if !ir.IsBlank(ok) {
val = conv(val, ok.Type()) val = conv(val, ok.Type())
} }
@ -1392,9 +1392,9 @@ func okas(ok, val *ir.Node) *ir.Node {
// tmp1, tmp2, tmp3 = ... // tmp1, tmp2, tmp3 = ...
// a, b, a = tmp1, tmp2, tmp3 // a, b, a = tmp1, tmp2, tmp3
// This is necessary to ensure left to right assignment order. // This is necessary to ensure left to right assignment order.
func (o *Order) as2(n *ir.Node) { func (o *Order) as2(n ir.Node) {
tmplist := []*ir.Node{} tmplist := []ir.Node{}
left := []*ir.Node{} left := []ir.Node{}
for ni, l := range n.List().Slice() { for ni, l := range n.List().Slice() {
if !ir.IsBlank(l) { if !ir.IsBlank(l) {
tmp := o.newTemp(l.Type(), l.Type().HasPointers()) tmp := o.newTemp(l.Type(), l.Type().HasPointers())
@ -1415,8 +1415,8 @@ func (o *Order) as2(n *ir.Node) {
// okAs2 orders OAS2XXX with ok. // okAs2 orders OAS2XXX with ok.
// Just like as2, this also adds temporaries to ensure left-to-right assignment. // Just like as2, this also adds temporaries to ensure left-to-right assignment.
func (o *Order) okAs2(n *ir.Node) { func (o *Order) okAs2(n ir.Node) {
var tmp1, tmp2 *ir.Node var tmp1, tmp2 ir.Node
if !ir.IsBlank(n.List().First()) { if !ir.IsBlank(n.List().First()) {
typ := n.Right().Type() typ := n.Right().Type()
tmp1 = o.newTemp(typ, typ.HasPointers()) tmp1 = o.newTemp(typ, typ.HasPointers())

View file

@ -24,10 +24,10 @@ import (
// "Portable" code generation. // "Portable" code generation.
var ( var (
compilequeue []*ir.Node // functions waiting to be compiled compilequeue []ir.Node // functions waiting to be compiled
) )
func emitptrargsmap(fn *ir.Node) { func emitptrargsmap(fn ir.Node) {
if ir.FuncName(fn) == "_" || fn.Func().Nname.Sym().Linkname != "" { if ir.FuncName(fn) == "_" || fn.Func().Nname.Sym().Linkname != "" {
return return
} }
@ -68,7 +68,7 @@ func emitptrargsmap(fn *ir.Node) {
// really means, in memory, things with pointers needing zeroing at // really means, in memory, things with pointers needing zeroing at
// the top of the stack and increasing in size. // the top of the stack and increasing in size.
// Non-autos sort on offset. // Non-autos sort on offset.
func cmpstackvarlt(a, b *ir.Node) bool { func cmpstackvarlt(a, b ir.Node) bool {
if (a.Class() == ir.PAUTO) != (b.Class() == ir.PAUTO) { if (a.Class() == ir.PAUTO) != (b.Class() == ir.PAUTO) {
return b.Class() == ir.PAUTO return b.Class() == ir.PAUTO
} }
@ -101,7 +101,7 @@ func cmpstackvarlt(a, b *ir.Node) bool {
} }
// byStackvar implements sort.Interface for []*Node using cmpstackvarlt. // byStackvar implements sort.Interface for []*Node using cmpstackvarlt.
type byStackVar []*ir.Node type byStackVar []ir.Node
func (s byStackVar) Len() int { return len(s) } func (s byStackVar) Len() int { return len(s) }
func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) } func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) }
@ -128,7 +128,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
scratchUsed := false scratchUsed := false
for _, b := range f.Blocks { for _, b := range f.Blocks {
for _, v := range b.Values { for _, v := range b.Values {
if n, ok := v.Aux.(*ir.Node); ok { if n, ok := v.Aux.(ir.Node); ok {
switch n.Class() { switch n.Class() {
case ir.PPARAM, ir.PPARAMOUT: case ir.PPARAM, ir.PPARAMOUT:
// Don't modify nodfp; it is a global. // Don't modify nodfp; it is a global.
@ -193,7 +193,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
s.stkptrsize = Rnd(s.stkptrsize, int64(Widthreg)) s.stkptrsize = Rnd(s.stkptrsize, int64(Widthreg))
} }
func funccompile(fn *ir.Node) { func funccompile(fn ir.Node) {
if Curfn != nil { if Curfn != nil {
base.Fatalf("funccompile %v inside %v", fn.Func().Nname.Sym(), Curfn.Func().Nname.Sym()) base.Fatalf("funccompile %v inside %v", fn.Func().Nname.Sym(), Curfn.Func().Nname.Sym())
} }
@ -224,7 +224,7 @@ func funccompile(fn *ir.Node) {
dclcontext = ir.PEXTERN dclcontext = ir.PEXTERN
} }
func compile(fn *ir.Node) { func compile(fn ir.Node) {
errorsBefore := base.Errors() errorsBefore := base.Errors()
order(fn) order(fn)
if base.Errors() > errorsBefore { if base.Errors() > errorsBefore {
@ -284,7 +284,7 @@ func compile(fn *ir.Node) {
// If functions are not compiled immediately, // If functions are not compiled immediately,
// they are enqueued in compilequeue, // they are enqueued in compilequeue,
// which is drained by compileFunctions. // which is drained by compileFunctions.
func compilenow(fn *ir.Node) bool { func compilenow(fn ir.Node) bool {
// Issue 38068: if this function is a method AND an inline // Issue 38068: if this function is a method AND an inline
// candidate AND was not inlined (yet), put it onto the compile // candidate AND was not inlined (yet), put it onto the compile
// queue instead of compiling it immediately. This is in case we // queue instead of compiling it immediately. This is in case we
@ -299,7 +299,7 @@ func compilenow(fn *ir.Node) bool {
// isInlinableButNotInlined returns true if 'fn' was marked as an // isInlinableButNotInlined returns true if 'fn' was marked as an
// inline candidate but then never inlined (presumably because we // inline candidate but then never inlined (presumably because we
// found no call sites). // found no call sites).
func isInlinableButNotInlined(fn *ir.Node) bool { func isInlinableButNotInlined(fn ir.Node) bool {
if fn.Func().Nname.Func().Inl == nil { if fn.Func().Nname.Func().Inl == nil {
return false return false
} }
@ -315,7 +315,7 @@ const maxStackSize = 1 << 30
// uses it to generate a plist, // uses it to generate a plist,
// and flushes that plist to machine code. // and flushes that plist to machine code.
// worker indicates which of the backend workers is doing the processing. // worker indicates which of the backend workers is doing the processing.
func compileSSA(fn *ir.Node, worker int) { func compileSSA(fn ir.Node, worker int) {
f := buildssa(fn, worker) f := buildssa(fn, worker)
// Note: check arg size to fix issue 25507. // Note: check arg size to fix issue 25507.
if f.Frontend().(*ssafn).stksize >= maxStackSize || fn.Type().ArgWidth() >= maxStackSize { if f.Frontend().(*ssafn).stksize >= maxStackSize || fn.Type().ArgWidth() >= maxStackSize {
@ -360,7 +360,7 @@ func compileFunctions() {
sizeCalculationDisabled = true // not safe to calculate sizes concurrently sizeCalculationDisabled = true // not safe to calculate sizes concurrently
if race.Enabled { if race.Enabled {
// Randomize compilation order to try to shake out races. // Randomize compilation order to try to shake out races.
tmp := make([]*ir.Node, len(compilequeue)) tmp := make([]ir.Node, len(compilequeue))
perm := rand.Perm(len(compilequeue)) perm := rand.Perm(len(compilequeue))
for i, v := range perm { for i, v := range perm {
tmp[v] = compilequeue[i] tmp[v] = compilequeue[i]
@ -376,7 +376,7 @@ func compileFunctions() {
} }
var wg sync.WaitGroup var wg sync.WaitGroup
base.Ctxt.InParallel = true base.Ctxt.InParallel = true
c := make(chan *ir.Node, base.Flag.LowerC) c := make(chan ir.Node, base.Flag.LowerC)
for i := 0; i < base.Flag.LowerC; i++ { for i := 0; i < base.Flag.LowerC; i++ {
wg.Add(1) wg.Add(1)
go func(worker int) { go func(worker int) {
@ -398,7 +398,7 @@ func compileFunctions() {
} }
func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope, dwarf.InlCalls) { func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope, dwarf.InlCalls) {
fn := curfn.(*ir.Node) fn := curfn.(ir.Node)
if fn.Func().Nname != nil { if fn.Func().Nname != nil {
if expect := fn.Func().Nname.Sym().Linksym(); fnsym != expect { if expect := fn.Func().Nname.Sym().Linksym(); fnsym != expect {
base.Fatalf("unexpected fnsym: %v != %v", fnsym, expect) base.Fatalf("unexpected fnsym: %v != %v", fnsym, expect)
@ -432,7 +432,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
// Deciding the right answer is, as they say, future work. // Deciding the right answer is, as they say, future work.
isODCLFUNC := fn.Op() == ir.ODCLFUNC isODCLFUNC := fn.Op() == ir.ODCLFUNC
var apdecls []*ir.Node var apdecls []ir.Node
// Populate decls for fn. // Populate decls for fn.
if isODCLFUNC { if isODCLFUNC {
for _, n := range fn.Func().Dcl { for _, n := range fn.Func().Dcl {
@ -489,7 +489,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
return scopes, inlcalls return scopes, inlcalls
} }
func declPos(decl *ir.Node) src.XPos { func declPos(decl ir.Node) src.XPos {
if decl.Name().Defn != nil && (decl.Name().Captured() || decl.Name().Byval()) { if decl.Name().Defn != nil && (decl.Name().Captured() || decl.Name().Byval()) {
// It's not clear which position is correct for captured variables here: // It's not clear which position is correct for captured variables here:
// * decl.Pos is the wrong position for captured variables, in the inner // * decl.Pos is the wrong position for captured variables, in the inner
@ -512,10 +512,10 @@ func declPos(decl *ir.Node) src.XPos {
// createSimpleVars creates a DWARF entry for every variable declared in the // createSimpleVars creates a DWARF entry for every variable declared in the
// function, claiming that they are permanently on the stack. // function, claiming that they are permanently on the stack.
func createSimpleVars(fnsym *obj.LSym, apDecls []*ir.Node) ([]*ir.Node, []*dwarf.Var, map[*ir.Node]bool) { func createSimpleVars(fnsym *obj.LSym, apDecls []ir.Node) ([]ir.Node, []*dwarf.Var, map[ir.Node]bool) {
var vars []*dwarf.Var var vars []*dwarf.Var
var decls []*ir.Node var decls []ir.Node
selected := make(map[*ir.Node]bool) selected := make(map[ir.Node]bool)
for _, n := range apDecls { for _, n := range apDecls {
if ir.IsAutoTmp(n) { if ir.IsAutoTmp(n) {
continue continue
@ -528,7 +528,7 @@ func createSimpleVars(fnsym *obj.LSym, apDecls []*ir.Node) ([]*ir.Node, []*dwarf
return decls, vars, selected return decls, vars, selected
} }
func createSimpleVar(fnsym *obj.LSym, n *ir.Node) *dwarf.Var { func createSimpleVar(fnsym *obj.LSym, n ir.Node) *dwarf.Var {
var abbrev int var abbrev int
offs := n.Offset() offs := n.Offset()
@ -579,13 +579,13 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Node) *dwarf.Var {
// createComplexVars creates recomposed DWARF vars with location lists, // createComplexVars creates recomposed DWARF vars with location lists,
// suitable for describing optimized code. // suitable for describing optimized code.
func createComplexVars(fnsym *obj.LSym, fn *ir.Func) ([]*ir.Node, []*dwarf.Var, map[*ir.Node]bool) { func createComplexVars(fnsym *obj.LSym, fn *ir.Func) ([]ir.Node, []*dwarf.Var, map[ir.Node]bool) {
debugInfo := fn.DebugInfo.(*ssa.FuncDebug) debugInfo := fn.DebugInfo.(*ssa.FuncDebug)
// Produce a DWARF variable entry for each user variable. // Produce a DWARF variable entry for each user variable.
var decls []*ir.Node var decls []ir.Node
var vars []*dwarf.Var var vars []*dwarf.Var
ssaVars := make(map[*ir.Node]bool) ssaVars := make(map[ir.Node]bool)
for varID, dvar := range debugInfo.Vars { for varID, dvar := range debugInfo.Vars {
n := dvar n := dvar
@ -605,11 +605,11 @@ func createComplexVars(fnsym *obj.LSym, fn *ir.Func) ([]*ir.Node, []*dwarf.Var,
// createDwarfVars process fn, returning a list of DWARF variables and the // createDwarfVars process fn, returning a list of DWARF variables and the
// Nodes they represent. // Nodes they represent.
func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir.Node) ([]*ir.Node, []*dwarf.Var) { func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []ir.Node) ([]ir.Node, []*dwarf.Var) {
// Collect a raw list of DWARF vars. // Collect a raw list of DWARF vars.
var vars []*dwarf.Var var vars []*dwarf.Var
var decls []*ir.Node var decls []ir.Node
var selected map[*ir.Node]bool var selected map[ir.Node]bool
if base.Ctxt.Flag_locationlists && base.Ctxt.Flag_optimize && fn.DebugInfo != nil && complexOK { if base.Ctxt.Flag_locationlists && base.Ctxt.Flag_optimize && fn.DebugInfo != nil && complexOK {
decls, vars, selected = createComplexVars(fnsym, fn) decls, vars, selected = createComplexVars(fnsym, fn)
} else { } else {
@ -708,9 +708,9 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
// function that is not local to the package being compiled, then the // function that is not local to the package being compiled, then the
// names of the variables may have been "versioned" to avoid conflicts // names of the variables may have been "versioned" to avoid conflicts
// with local vars; disregard this versioning when sorting. // with local vars; disregard this versioning when sorting.
func preInliningDcls(fnsym *obj.LSym) []*ir.Node { func preInliningDcls(fnsym *obj.LSym) []ir.Node {
fn := base.Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*ir.Node) fn := base.Ctxt.DwFixups.GetPrecursorFunc(fnsym).(ir.Node)
var rdcl []*ir.Node var rdcl []ir.Node
for _, n := range fn.Func().Inl.Dcl { for _, n := range fn.Func().Inl.Dcl {
c := n.Sym().Name[0] c := n.Sym().Name[0]
// Avoid reporting "_" parameters, since if there are more than // Avoid reporting "_" parameters, since if there are more than

View file

@ -26,19 +26,19 @@ func typeWithPointers() *types.Type {
return t return t
} }
func markUsed(n *ir.Node) *ir.Node { func markUsed(n ir.Node) ir.Node {
n.Name().SetUsed(true) n.Name().SetUsed(true)
return n return n
} }
func markNeedZero(n *ir.Node) *ir.Node { func markNeedZero(n ir.Node) ir.Node {
n.Name().SetNeedzero(true) n.Name().SetNeedzero(true)
return n return n
} }
// Test all code paths for cmpstackvarlt. // Test all code paths for cmpstackvarlt.
func TestCmpstackvar(t *testing.T) { func TestCmpstackvar(t *testing.T) {
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) *ir.Node { nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) ir.Node {
if s == nil { if s == nil {
s = &types.Sym{Name: "."} s = &types.Sym{Name: "."}
} }
@ -49,7 +49,7 @@ func TestCmpstackvar(t *testing.T) {
return n return n
} }
testdata := []struct { testdata := []struct {
a, b *ir.Node a, b ir.Node
lt bool lt bool
}{ }{
{ {
@ -156,14 +156,14 @@ func TestCmpstackvar(t *testing.T) {
} }
func TestStackvarSort(t *testing.T) { func TestStackvarSort(t *testing.T) {
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) *ir.Node { nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) ir.Node {
n := NewName(s) n := NewName(s)
n.SetType(t) n.SetType(t)
n.SetOffset(xoffset) n.SetOffset(xoffset)
n.SetClass(cl) n.SetClass(cl)
return n return n
} }
inp := []*ir.Node{ inp := []ir.Node{
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC), nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO), nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC), nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
@ -178,7 +178,7 @@ func TestStackvarSort(t *testing.T) {
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO), nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO), nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
} }
want := []*ir.Node{ want := []ir.Node{
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC), nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC), nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(10, &types.Type{}, &types.Sym{}, ir.PFUNC), nod(10, &types.Type{}, &types.Sym{}, ir.PFUNC),

View file

@ -41,11 +41,11 @@ func (s *state) insertPhis() {
} }
type phiState struct { type phiState struct {
s *state // SSA state s *state // SSA state
f *ssa.Func // function to work on f *ssa.Func // function to work on
defvars []map[*ir.Node]*ssa.Value // defined variables at end of each block defvars []map[ir.Node]*ssa.Value // defined variables at end of each block
varnum map[*ir.Node]int32 // variable numbering varnum map[ir.Node]int32 // variable numbering
// properties of the dominator tree // properties of the dominator tree
idom []*ssa.Block // dominator parents idom []*ssa.Block // dominator parents
@ -71,15 +71,15 @@ func (s *phiState) insertPhis() {
// Find all the variables for which we need to match up reads & writes. // Find all the variables for which we need to match up reads & writes.
// This step prunes any basic-block-only variables from consideration. // This step prunes any basic-block-only variables from consideration.
// Generate a numbering for these variables. // Generate a numbering for these variables.
s.varnum = map[*ir.Node]int32{} s.varnum = map[ir.Node]int32{}
var vars []*ir.Node var vars []ir.Node
var vartypes []*types.Type var vartypes []*types.Type
for _, b := range s.f.Blocks { for _, b := range s.f.Blocks {
for _, v := range b.Values { for _, v := range b.Values {
if v.Op != ssa.OpFwdRef { if v.Op != ssa.OpFwdRef {
continue continue
} }
var_ := v.Aux.(*ir.Node) var_ := v.Aux.(ir.Node)
// Optimization: look back 1 block for the definition. // Optimization: look back 1 block for the definition.
if len(b.Preds) == 1 { if len(b.Preds) == 1 {
@ -184,7 +184,7 @@ levels:
} }
} }
func (s *phiState) insertVarPhis(n int, var_ *ir.Node, defs []*ssa.Block, typ *types.Type) { func (s *phiState) insertVarPhis(n int, var_ ir.Node, defs []*ssa.Block, typ *types.Type) {
priq := &s.priq priq := &s.priq
q := s.q q := s.q
queued := s.queued queued := s.queued
@ -319,7 +319,7 @@ func (s *phiState) resolveFwdRefs() {
if v.Op != ssa.OpFwdRef { if v.Op != ssa.OpFwdRef {
continue continue
} }
n := s.varnum[v.Aux.(*ir.Node)] n := s.varnum[v.Aux.(ir.Node)]
v.Op = ssa.OpCopy v.Op = ssa.OpCopy
v.Aux = nil v.Aux = nil
v.AddArg(values[n]) v.AddArg(values[n])
@ -433,11 +433,11 @@ func (s *sparseSet) clear() {
// Variant to use for small functions. // Variant to use for small functions.
type simplePhiState struct { type simplePhiState struct {
s *state // SSA state s *state // SSA state
f *ssa.Func // function to work on f *ssa.Func // function to work on
fwdrefs []*ssa.Value // list of FwdRefs to be processed fwdrefs []*ssa.Value // list of FwdRefs to be processed
defvars []map[*ir.Node]*ssa.Value // defined variables at end of each block defvars []map[ir.Node]*ssa.Value // defined variables at end of each block
reachable []bool // which blocks are reachable reachable []bool // which blocks are reachable
} }
func (s *simplePhiState) insertPhis() { func (s *simplePhiState) insertPhis() {
@ -450,7 +450,7 @@ func (s *simplePhiState) insertPhis() {
continue continue
} }
s.fwdrefs = append(s.fwdrefs, v) s.fwdrefs = append(s.fwdrefs, v)
var_ := v.Aux.(*ir.Node) var_ := v.Aux.(ir.Node)
if _, ok := s.defvars[b.ID][var_]; !ok { if _, ok := s.defvars[b.ID][var_]; !ok {
s.defvars[b.ID][var_] = v // treat FwdDefs as definitions. s.defvars[b.ID][var_] = v // treat FwdDefs as definitions.
} }
@ -464,7 +464,7 @@ loop:
v := s.fwdrefs[len(s.fwdrefs)-1] v := s.fwdrefs[len(s.fwdrefs)-1]
s.fwdrefs = s.fwdrefs[:len(s.fwdrefs)-1] s.fwdrefs = s.fwdrefs[:len(s.fwdrefs)-1]
b := v.Block b := v.Block
var_ := v.Aux.(*ir.Node) var_ := v.Aux.(ir.Node)
if b == s.f.Entry { if b == s.f.Entry {
// No variable should be live at entry. // No variable should be live at entry.
s.s.Fatalf("Value live at entry. It shouldn't be. func %s, node %v, value %v", s.f.Name, var_, v) s.s.Fatalf("Value live at entry. It shouldn't be. func %s, node %v, value %v", s.f.Name, var_, v)
@ -512,7 +512,7 @@ loop:
} }
// lookupVarOutgoing finds the variable's value at the end of block b. // lookupVarOutgoing finds the variable's value at the end of block b.
func (s *simplePhiState) lookupVarOutgoing(b *ssa.Block, t *types.Type, var_ *ir.Node, line src.XPos) *ssa.Value { func (s *simplePhiState) lookupVarOutgoing(b *ssa.Block, t *types.Type, var_ ir.Node, line src.XPos) *ssa.Value {
for { for {
if v := s.defvars[b.ID][var_]; v != nil { if v := s.defvars[b.ID][var_]; v != nil {
return v return v

View file

@ -101,10 +101,10 @@ type BlockEffects struct {
// A collection of global state used by liveness analysis. // A collection of global state used by liveness analysis.
type Liveness struct { type Liveness struct {
fn *ir.Node fn ir.Node
f *ssa.Func f *ssa.Func
vars []*ir.Node vars []ir.Node
idx map[*ir.Node]int32 idx map[ir.Node]int32
stkptrsize int64 stkptrsize int64
be []BlockEffects be []BlockEffects
@ -206,20 +206,20 @@ type progeffectscache struct {
// nor do we care about non-local variables, // nor do we care about non-local variables,
// nor do we care about empty structs (handled by the pointer check), // nor do we care about empty structs (handled by the pointer check),
// nor do we care about the fake PAUTOHEAP variables. // nor do we care about the fake PAUTOHEAP variables.
func livenessShouldTrack(n *ir.Node) bool { func livenessShouldTrack(n ir.Node) bool {
return n.Op() == ir.ONAME && (n.Class() == ir.PAUTO || n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Type().HasPointers() return n.Op() == ir.ONAME && (n.Class() == ir.PAUTO || n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Type().HasPointers()
} }
// getvariables returns the list of on-stack variables that we need to track // getvariables returns the list of on-stack variables that we need to track
// and a map for looking up indices by *Node. // and a map for looking up indices by *Node.
func getvariables(fn *ir.Node) ([]*ir.Node, map[*ir.Node]int32) { func getvariables(fn ir.Node) ([]ir.Node, map[ir.Node]int32) {
var vars []*ir.Node var vars []ir.Node
for _, n := range fn.Func().Dcl { for _, n := range fn.Func().Dcl {
if livenessShouldTrack(n) { if livenessShouldTrack(n) {
vars = append(vars, n) vars = append(vars, n)
} }
} }
idx := make(map[*ir.Node]int32, len(vars)) idx := make(map[ir.Node]int32, len(vars))
for i, n := range vars { for i, n := range vars {
idx[n] = int32(i) idx[n] = int32(i)
} }
@ -312,7 +312,7 @@ func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
} }
// affectedNode returns the *Node affected by v // affectedNode returns the *Node affected by v
func affectedNode(v *ssa.Value) (*ir.Node, ssa.SymEffect) { func affectedNode(v *ssa.Value) (ir.Node, ssa.SymEffect) {
// Special cases. // Special cases.
switch v.Op { switch v.Op {
case ssa.OpLoadReg: case ssa.OpLoadReg:
@ -323,9 +323,9 @@ func affectedNode(v *ssa.Value) (*ir.Node, ssa.SymEffect) {
return n, ssa.SymWrite return n, ssa.SymWrite
case ssa.OpVarLive: case ssa.OpVarLive:
return v.Aux.(*ir.Node), ssa.SymRead return v.Aux.(ir.Node), ssa.SymRead
case ssa.OpVarDef, ssa.OpVarKill: case ssa.OpVarDef, ssa.OpVarKill:
return v.Aux.(*ir.Node), ssa.SymWrite return v.Aux.(ir.Node), ssa.SymWrite
case ssa.OpKeepAlive: case ssa.OpKeepAlive:
n, _ := AutoVar(v.Args[0]) n, _ := AutoVar(v.Args[0])
return n, ssa.SymRead return n, ssa.SymRead
@ -340,7 +340,7 @@ func affectedNode(v *ssa.Value) (*ir.Node, ssa.SymEffect) {
case nil, *obj.LSym: case nil, *obj.LSym:
// ok, but no node // ok, but no node
return nil, e return nil, e
case *ir.Node: case ir.Node:
return a, e return a, e
default: default:
base.Fatalf("weird aux: %s", v.LongString()) base.Fatalf("weird aux: %s", v.LongString())
@ -356,7 +356,7 @@ type livenessFuncCache struct {
// Constructs a new liveness structure used to hold the global state of the // Constructs a new liveness structure used to hold the global state of the
// liveness computation. The cfg argument is a slice of *BasicBlocks and the // liveness computation. The cfg argument is a slice of *BasicBlocks and the
// vars argument is a slice of *Nodes. // vars argument is a slice of *Nodes.
func newliveness(fn *ir.Node, f *ssa.Func, vars []*ir.Node, idx map[*ir.Node]int32, stkptrsize int64) *Liveness { func newliveness(fn ir.Node, f *ssa.Func, vars []ir.Node, idx map[ir.Node]int32, stkptrsize int64) *Liveness {
lv := &Liveness{ lv := &Liveness{
fn: fn, fn: fn,
f: f, f: f,
@ -482,7 +482,7 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
// Generates live pointer value maps for arguments and local variables. The // Generates live pointer value maps for arguments and local variables. The
// this argument and the in arguments are always assumed live. The vars // this argument and the in arguments are always assumed live. The vars
// argument is a slice of *Nodes. // argument is a slice of *Nodes.
func (lv *Liveness) pointerMap(liveout bvec, vars []*ir.Node, args, locals bvec) { func (lv *Liveness) pointerMap(liveout bvec, vars []ir.Node, args, locals bvec) {
for i := int32(0); ; i++ { for i := int32(0); ; i++ {
i = liveout.Next(i) i = liveout.Next(i)
if i < 0 { if i < 0 {
@ -1164,7 +1164,7 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
// Size args bitmaps to be just large enough to hold the largest pointer. // Size args bitmaps to be just large enough to hold the largest pointer.
// First, find the largest Xoffset node we care about. // First, find the largest Xoffset node we care about.
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.) // (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
var maxArgNode *ir.Node var maxArgNode ir.Node
for _, n := range lv.vars { for _, n := range lv.vars {
switch n.Class() { switch n.Class() {
case ir.PPARAM, ir.PPARAMOUT: case ir.PPARAM, ir.PPARAMOUT:

View file

@ -60,7 +60,7 @@ func ispkgin(pkgs []string) bool {
return false return false
} }
func instrument(fn *ir.Node) { func instrument(fn ir.Node) {
if fn.Func().Pragma&ir.Norace != 0 { if fn.Func().Pragma&ir.Norace != 0 {
return return
} }

View file

@ -13,7 +13,7 @@ import (
) )
// range // range
func typecheckrange(n *ir.Node) { func typecheckrange(n ir.Node) {
// Typechecking order is important here: // Typechecking order is important here:
// 0. first typecheck range expression (slice/map/chan), // 0. first typecheck range expression (slice/map/chan),
// it is evaluated only once and so logically it is not part of the loop. // it is evaluated only once and so logically it is not part of the loop.
@ -39,7 +39,7 @@ func typecheckrange(n *ir.Node) {
decldepth-- decldepth--
} }
func typecheckrangeExpr(n *ir.Node) { func typecheckrangeExpr(n ir.Node) {
n.SetRight(typecheck(n.Right(), ctxExpr)) n.SetRight(typecheck(n.Right(), ctxExpr))
t := n.Right().Type() t := n.Right().Type()
@ -95,7 +95,7 @@ func typecheckrangeExpr(n *ir.Node) {
base.ErrorfAt(n.Pos(), "too many variables in range") base.ErrorfAt(n.Pos(), "too many variables in range")
} }
var v1, v2 *ir.Node var v1, v2 ir.Node
if n.List().Len() != 0 { if n.List().Len() != 0 {
v1 = n.List().First() v1 = n.List().First()
} }
@ -157,7 +157,7 @@ func cheapComputableIndex(width int64) bool {
// simpler forms. The result must be assigned back to n. // simpler forms. The result must be assigned back to n.
// Node n may also be modified in place, and may also be // Node n may also be modified in place, and may also be
// the returned node. // the returned node.
func walkrange(n *ir.Node) *ir.Node { func walkrange(n ir.Node) ir.Node {
if isMapClear(n) { if isMapClear(n) {
m := n.Right() m := n.Right()
lno := setlineno(m) lno := setlineno(m)
@ -179,7 +179,7 @@ func walkrange(n *ir.Node) *ir.Node {
lno := setlineno(a) lno := setlineno(a)
n.SetRight(nil) n.SetRight(nil)
var v1, v2 *ir.Node var v1, v2 ir.Node
l := n.List().Len() l := n.List().Len()
if l > 0 { if l > 0 {
v1 = n.List().First() v1 = n.List().First()
@ -205,12 +205,12 @@ func walkrange(n *ir.Node) *ir.Node {
// to avoid erroneous processing by racewalk. // to avoid erroneous processing by racewalk.
n.PtrList().Set(nil) n.PtrList().Set(nil)
var ifGuard *ir.Node var ifGuard ir.Node
translatedLoopOp := ir.OFOR translatedLoopOp := ir.OFOR
var body []*ir.Node var body []ir.Node
var init []*ir.Node var init []ir.Node
switch t.Etype { switch t.Etype {
default: default:
base.Fatalf("walkrange") base.Fatalf("walkrange")
@ -240,7 +240,7 @@ func walkrange(n *ir.Node) *ir.Node {
// for v1 := range ha { body } // for v1 := range ha { body }
if v2 == nil { if v2 == nil {
body = []*ir.Node{ir.Nod(ir.OAS, v1, hv1)} body = []ir.Node{ir.Nod(ir.OAS, v1, hv1)}
break break
} }
@ -254,7 +254,7 @@ func walkrange(n *ir.Node) *ir.Node {
a := ir.Nod(ir.OAS2, nil, nil) a := ir.Nod(ir.OAS2, nil, nil)
a.PtrList().Set2(v1, v2) a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(hv1, tmp) a.PtrRlist().Set2(hv1, tmp)
body = []*ir.Node{a} body = []ir.Node{a}
break break
} }
@ -321,14 +321,14 @@ func walkrange(n *ir.Node) *ir.Node {
if v1 == nil { if v1 == nil {
body = nil body = nil
} else if v2 == nil { } else if v2 == nil {
body = []*ir.Node{ir.Nod(ir.OAS, v1, key)} body = []ir.Node{ir.Nod(ir.OAS, v1, key)}
} else { } else {
elem := nodSym(ir.ODOT, hit, elemsym) elem := nodSym(ir.ODOT, hit, elemsym)
elem = ir.Nod(ir.ODEREF, elem, nil) elem = ir.Nod(ir.ODEREF, elem, nil)
a := ir.Nod(ir.OAS2, nil, nil) a := ir.Nod(ir.OAS2, nil, nil)
a.PtrList().Set2(v1, v2) a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(key, elem) a.PtrRlist().Set2(key, elem)
body = []*ir.Node{a} body = []ir.Node{a}
} }
case types.TCHAN: case types.TCHAN:
@ -353,7 +353,7 @@ func walkrange(n *ir.Node) *ir.Node {
if v1 == nil { if v1 == nil {
body = nil body = nil
} else { } else {
body = []*ir.Node{ir.Nod(ir.OAS, v1, hv1)} body = []ir.Node{ir.Nod(ir.OAS, v1, hv1)}
} }
// Zero hv1. This prevents hv1 from being the sole, inaccessible // Zero hv1. This prevents hv1 from being the sole, inaccessible
// reference to an otherwise GC-able value during the next channel receive. // reference to an otherwise GC-able value during the next channel receive.
@ -467,7 +467,7 @@ func walkrange(n *ir.Node) *ir.Node {
// } // }
// //
// where == for keys of map m is reflexive. // where == for keys of map m is reflexive.
func isMapClear(n *ir.Node) bool { func isMapClear(n ir.Node) bool {
if base.Flag.N != 0 || instrumenting { if base.Flag.N != 0 || instrumenting {
return false return false
} }
@ -509,7 +509,7 @@ func isMapClear(n *ir.Node) bool {
} }
// mapClear constructs a call to runtime.mapclear for the map m. // mapClear constructs a call to runtime.mapclear for the map m.
func mapClear(m *ir.Node) *ir.Node { func mapClear(m ir.Node) ir.Node {
t := m.Type() t := m.Type()
// instantiate mapclear(typ *type, hmap map[any]any) // instantiate mapclear(typ *type, hmap map[any]any)
@ -534,7 +534,7 @@ func mapClear(m *ir.Node) *ir.Node {
// in which the evaluation of a is side-effect-free. // in which the evaluation of a is side-effect-free.
// //
// Parameters are as in walkrange: "for v1, v2 = range a". // Parameters are as in walkrange: "for v1, v2 = range a".
func arrayClear(n, v1, v2, a *ir.Node) bool { func arrayClear(n, v1, v2, a ir.Node) bool {
if base.Flag.N != 0 || instrumenting { if base.Flag.N != 0 || instrumenting {
return false return false
} }
@ -590,7 +590,7 @@ func arrayClear(n, v1, v2, a *ir.Node) bool {
tmp = conv(tmp, types.Types[types.TUINTPTR]) tmp = conv(tmp, types.Types[types.TUINTPTR])
n.PtrBody().Append(ir.Nod(ir.OAS, hn, tmp)) n.PtrBody().Append(ir.Nod(ir.OAS, hn, tmp))
var fn *ir.Node var fn ir.Node
if a.Type().Elem().HasPointers() { if a.Type().Elem().HasPointers() {
// memclrHasPointers(hp, hn) // memclrHasPointers(hp, hn)
Curfn.Func().SetWBPos(stmt.Pos()) Curfn.Func().SetWBPos(stmt.Pos())
@ -615,7 +615,7 @@ func arrayClear(n, v1, v2, a *ir.Node) bool {
} }
// addptr returns (*T)(uintptr(p) + n). // addptr returns (*T)(uintptr(p) + n).
func addptr(p *ir.Node, n int64) *ir.Node { func addptr(p ir.Node, n int64) ir.Node {
t := p.Type() t := p.Type()
p = ir.Nod(ir.OCONVNOP, p, nil) p = ir.Nod(ir.OCONVNOP, p, nil)

View file

@ -347,7 +347,7 @@ func methodfunc(f *types.Type, receiver *types.Type) *types.Type {
if receiver != nil { if receiver != nil {
inLen++ inLen++
} }
in := make([]*ir.Node, 0, inLen) in := make([]ir.Node, 0, inLen)
if receiver != nil { if receiver != nil {
d := anonfield(receiver) d := anonfield(receiver)
@ -361,7 +361,7 @@ func methodfunc(f *types.Type, receiver *types.Type) *types.Type {
} }
outLen := f.Results().Fields().Len() outLen := f.Results().Fields().Len()
out := make([]*ir.Node, 0, outLen) out := make([]ir.Node, 0, outLen)
for _, t := range f.Results().Fields().Slice() { for _, t := range f.Results().Fields().Slice() {
d := anonfield(t.Type) d := anonfield(t.Type)
out = append(out, d) out = append(out, d)
@ -990,7 +990,7 @@ func typenamesym(t *types.Type) *types.Sym {
return s return s
} }
func typename(t *types.Type) *ir.Node { func typename(t *types.Type) ir.Node {
s := typenamesym(t) s := typenamesym(t)
if s.Def == nil { if s.Def == nil {
n := ir.NewNameAt(src.NoXPos, s) n := ir.NewNameAt(src.NoXPos, s)
@ -1006,7 +1006,7 @@ func typename(t *types.Type) *ir.Node {
return n return n
} }
func itabname(t, itype *types.Type) *ir.Node { func itabname(t, itype *types.Type) ir.Node {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() { if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() {
base.Fatalf("itabname(%v, %v)", t, itype) base.Fatalf("itabname(%v, %v)", t, itype)
} }
@ -1516,7 +1516,7 @@ func addsignat(t *types.Type) {
} }
} }
func addsignats(dcls []*ir.Node) { func addsignats(dcls []ir.Node) {
// copy types from dcl list to signatset // copy types from dcl list to signatset
for _, n := range dcls { for _, n := range dcls {
if n.Op() == ir.OTYPE { if n.Op() == ir.OTYPE {
@ -1626,7 +1626,7 @@ func dumpbasictypes() {
// The latter is the type of an auto-generated wrapper. // The latter is the type of an auto-generated wrapper.
dtypesym(types.NewPtr(types.Errortype)) dtypesym(types.NewPtr(types.Errortype))
dtypesym(functype(nil, []*ir.Node{anonfield(types.Errortype)}, []*ir.Node{anonfield(types.Types[types.TSTRING])})) dtypesym(functype(nil, []ir.Node{anonfield(types.Errortype)}, []ir.Node{anonfield(types.Types[types.TSTRING])}))
// add paths for runtime and main, which 6l imports implicitly. // add paths for runtime and main, which 6l imports implicitly.
dimportpath(Runtimepkg) dimportpath(Runtimepkg)
@ -1869,7 +1869,7 @@ func (p *GCProg) emit(t *types.Type, offset int64) {
// zeroaddr returns the address of a symbol with at least // zeroaddr returns the address of a symbol with at least
// size bytes of zeros. // size bytes of zeros.
func zeroaddr(size int64) *ir.Node { func zeroaddr(size int64) ir.Node {
if size >= 1<<31 { if size >= 1<<31 {
base.Fatalf("map elem too big %d", size) base.Fatalf("map elem too big %d", size)
} }

View file

@ -32,10 +32,10 @@ import "cmd/compile/internal/ir"
// when analyzing a set of mutually recursive functions. // when analyzing a set of mutually recursive functions.
type bottomUpVisitor struct { type bottomUpVisitor struct {
analyze func([]*ir.Node, bool) analyze func([]ir.Node, bool)
visitgen uint32 visitgen uint32
nodeID map[*ir.Node]uint32 nodeID map[ir.Node]uint32
stack []*ir.Node stack []ir.Node
} }
// visitBottomUp invokes analyze on the ODCLFUNC nodes listed in list. // visitBottomUp invokes analyze on the ODCLFUNC nodes listed in list.
@ -51,10 +51,10 @@ type bottomUpVisitor struct {
// If recursive is false, the list consists of only a single function and its closures. // If recursive is false, the list consists of only a single function and its closures.
// If recursive is true, the list may still contain only a single function, // If recursive is true, the list may still contain only a single function,
// if that function is itself recursive. // if that function is itself recursive.
func visitBottomUp(list []*ir.Node, analyze func(list []*ir.Node, recursive bool)) { func visitBottomUp(list []ir.Node, analyze func(list []ir.Node, recursive bool)) {
var v bottomUpVisitor var v bottomUpVisitor
v.analyze = analyze v.analyze = analyze
v.nodeID = make(map[*ir.Node]uint32) v.nodeID = make(map[ir.Node]uint32)
for _, n := range list { for _, n := range list {
if n.Op() == ir.ODCLFUNC && !n.Func().IsHiddenClosure() { if n.Op() == ir.ODCLFUNC && !n.Func().IsHiddenClosure() {
v.visit(n) v.visit(n)
@ -62,7 +62,7 @@ func visitBottomUp(list []*ir.Node, analyze func(list []*ir.Node, recursive bool
} }
} }
func (v *bottomUpVisitor) visit(n *ir.Node) uint32 { func (v *bottomUpVisitor) visit(n ir.Node) uint32 {
if id := v.nodeID[n]; id > 0 { if id := v.nodeID[n]; id > 0 {
// already visited // already visited
return id return id
@ -75,7 +75,7 @@ func (v *bottomUpVisitor) visit(n *ir.Node) uint32 {
min := v.visitgen min := v.visitgen
v.stack = append(v.stack, n) v.stack = append(v.stack, n)
ir.InspectList(n.Body(), func(n *ir.Node) bool { ir.InspectList(n.Body(), func(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.ONAME: case ir.ONAME:
if n.Class() == ir.PFUNC { if n.Class() == ir.PFUNC {

View file

@ -28,7 +28,7 @@ func findScope(marks []ir.Mark, pos src.XPos) ir.ScopeID {
return marks[i-1].Scope return marks[i-1].Scope
} }
func assembleScopes(fnsym *obj.LSym, fn *ir.Node, dwarfVars []*dwarf.Var, varScopes []ir.ScopeID) []dwarf.Scope { func assembleScopes(fnsym *obj.LSym, fn ir.Node, dwarfVars []*dwarf.Var, varScopes []ir.ScopeID) []dwarf.Scope {
// Initialize the DWARF scope tree based on lexical scopes. // Initialize the DWARF scope tree based on lexical scopes.
dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func().Parents)) dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func().Parents))
for i, parent := range fn.Func().Parents { for i, parent := range fn.Func().Parents {

View file

@ -11,8 +11,8 @@ import (
) )
// select // select
func typecheckselect(sel *ir.Node) { func typecheckselect(sel ir.Node) {
var def *ir.Node var def ir.Node
lno := setlineno(sel) lno := setlineno(sel)
typecheckslice(sel.Init().Slice(), ctxStmt) typecheckslice(sel.Init().Slice(), ctxStmt)
for _, ncase := range sel.List().Slice() { for _, ncase := range sel.List().Slice() {
@ -91,7 +91,7 @@ func typecheckselect(sel *ir.Node) {
base.Pos = lno base.Pos = lno
} }
func walkselect(sel *ir.Node) { func walkselect(sel ir.Node) {
lno := setlineno(sel) lno := setlineno(sel)
if sel.Body().Len() != 0 { if sel.Body().Len() != 0 {
base.Fatalf("double walkselect") base.Fatalf("double walkselect")
@ -109,13 +109,13 @@ func walkselect(sel *ir.Node) {
base.Pos = lno base.Pos = lno
} }
func walkselectcases(cases *ir.Nodes) []*ir.Node { func walkselectcases(cases *ir.Nodes) []ir.Node {
ncas := cases.Len() ncas := cases.Len()
sellineno := base.Pos sellineno := base.Pos
// optimization: zero-case select // optimization: zero-case select
if ncas == 0 { if ncas == 0 {
return []*ir.Node{mkcall("block", nil, nil)} return []ir.Node{mkcall("block", nil, nil)}
} }
// optimization: one-case select: single op. // optimization: one-case select: single op.
@ -168,7 +168,7 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
// convert case value arguments to addresses. // convert case value arguments to addresses.
// this rewrite is used by both the general code and the next optimization. // this rewrite is used by both the general code and the next optimization.
var dflt *ir.Node var dflt ir.Node
for _, cas := range cases.Slice() { for _, cas := range cases.Slice() {
setlineno(cas) setlineno(cas)
n := cas.Left() n := cas.Left()
@ -237,16 +237,16 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
r.SetLeft(typecheck(r.Left(), ctxExpr)) r.SetLeft(typecheck(r.Left(), ctxExpr))
r.PtrBody().Set(cas.Body().Slice()) r.PtrBody().Set(cas.Body().Slice())
r.PtrRlist().Set(append(dflt.Init().Slice(), dflt.Body().Slice()...)) r.PtrRlist().Set(append(dflt.Init().Slice(), dflt.Body().Slice()...))
return []*ir.Node{r, ir.Nod(ir.OBREAK, nil, nil)} return []ir.Node{r, ir.Nod(ir.OBREAK, nil, nil)}
} }
if dflt != nil { if dflt != nil {
ncas-- ncas--
} }
casorder := make([]*ir.Node, ncas) casorder := make([]ir.Node, ncas)
nsends, nrecvs := 0, 0 nsends, nrecvs := 0, 0
var init []*ir.Node var init []ir.Node
// generate sel-struct // generate sel-struct
base.Pos = sellineno base.Pos = sellineno
@ -258,7 +258,7 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
// No initialization for order; runtime.selectgo is responsible for that. // No initialization for order; runtime.selectgo is responsible for that.
order := temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas))) order := temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas)))
var pc0, pcs *ir.Node var pc0, pcs ir.Node
if base.Flag.Race { if base.Flag.Race {
pcs = temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas))) pcs = temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas)))
pc0 = typecheck(ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, pcs, nodintconst(0)), nil), ctxExpr) pc0 = typecheck(ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, pcs, nodintconst(0)), nil), ctxExpr)
@ -279,7 +279,7 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
} }
var i int var i int
var c, elem *ir.Node var c, elem ir.Node
switch n.Op() { switch n.Op() {
default: default:
base.Fatalf("select %v", n.Op()) base.Fatalf("select %v", n.Op())
@ -297,7 +297,7 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
casorder[i] = cas casorder[i] = cas
setField := func(f string, val *ir.Node) { setField := func(f string, val ir.Node) {
r := ir.Nod(ir.OAS, nodSym(ir.ODOT, ir.Nod(ir.OINDEX, selv, nodintconst(int64(i))), lookup(f)), val) r := ir.Nod(ir.OAS, nodSym(ir.ODOT, ir.Nod(ir.OINDEX, selv, nodintconst(int64(i))), lookup(f)), val)
r = typecheck(r, ctxStmt) r = typecheck(r, ctxStmt)
init = append(init, r) init = append(init, r)
@ -340,7 +340,7 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
} }
// dispatch cases // dispatch cases
dispatch := func(cond, cas *ir.Node) { dispatch := func(cond, cas ir.Node) {
cond = typecheck(cond, ctxExpr) cond = typecheck(cond, ctxExpr)
cond = defaultlit(cond, nil) cond = defaultlit(cond, nil)
@ -370,7 +370,7 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
} }
// bytePtrToIndex returns a Node representing "(*byte)(&n[i])". // bytePtrToIndex returns a Node representing "(*byte)(&n[i])".
func bytePtrToIndex(n *ir.Node, i int64) *ir.Node { func bytePtrToIndex(n ir.Node, i int64) ir.Node {
s := ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, n, nodintconst(i)), nil) s := ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, n, nodintconst(i)), nil)
t := types.NewPtr(types.Types[types.TUINT8]) t := types.NewPtr(types.Types[types.TUINT8])
return convnop(s, t) return convnop(s, t)
@ -381,7 +381,7 @@ var scase *types.Type
// Keep in sync with src/runtime/select.go. // Keep in sync with src/runtime/select.go.
func scasetype() *types.Type { func scasetype() *types.Type {
if scase == nil { if scase == nil {
scase = tostruct([]*ir.Node{ scase = tostruct([]ir.Node{
namedfield("c", types.Types[types.TUNSAFEPTR]), namedfield("c", types.Types[types.TUNSAFEPTR]),
namedfield("elem", types.Types[types.TUNSAFEPTR]), namedfield("elem", types.Types[types.TUNSAFEPTR]),
}) })

View file

@ -14,8 +14,8 @@ import (
) )
type InitEntry struct { type InitEntry struct {
Xoffset int64 // struct, array only Xoffset int64 // struct, array only
Expr *ir.Node // bytes of run-time computed expressions Expr ir.Node // bytes of run-time computed expressions
} }
type InitPlan struct { type InitPlan struct {
@ -29,18 +29,18 @@ type InitPlan struct {
type InitSchedule struct { type InitSchedule struct {
// out is the ordered list of dynamic initialization // out is the ordered list of dynamic initialization
// statements. // statements.
out []*ir.Node out []ir.Node
initplans map[*ir.Node]*InitPlan initplans map[ir.Node]*InitPlan
inittemps map[*ir.Node]*ir.Node inittemps map[ir.Node]ir.Node
} }
func (s *InitSchedule) append(n *ir.Node) { func (s *InitSchedule) append(n ir.Node) {
s.out = append(s.out, n) s.out = append(s.out, n)
} }
// staticInit adds an initialization statement n to the schedule. // staticInit adds an initialization statement n to the schedule.
func (s *InitSchedule) staticInit(n *ir.Node) { func (s *InitSchedule) staticInit(n ir.Node) {
if !s.tryStaticInit(n) { if !s.tryStaticInit(n) {
if base.Flag.Percent != 0 { if base.Flag.Percent != 0 {
ir.Dump("nonstatic", n) ir.Dump("nonstatic", n)
@ -51,7 +51,7 @@ func (s *InitSchedule) staticInit(n *ir.Node) {
// tryStaticInit attempts to statically execute an initialization // tryStaticInit attempts to statically execute an initialization
// statement and reports whether it succeeded. // statement and reports whether it succeeded.
func (s *InitSchedule) tryStaticInit(n *ir.Node) bool { func (s *InitSchedule) tryStaticInit(n ir.Node) bool {
// Only worry about simple "l = r" assignments. Multiple // Only worry about simple "l = r" assignments. Multiple
// variable/expression OAS2 assignments have already been // variable/expression OAS2 assignments have already been
// replaced by multiple simple OAS assignments, and the other // replaced by multiple simple OAS assignments, and the other
@ -70,7 +70,7 @@ func (s *InitSchedule) tryStaticInit(n *ir.Node) bool {
// like staticassign but we are copying an already // like staticassign but we are copying an already
// initialized value r. // initialized value r.
func (s *InitSchedule) staticcopy(l *ir.Node, r *ir.Node) bool { func (s *InitSchedule) staticcopy(l ir.Node, r ir.Node) bool {
if r.Op() != ir.ONAME && r.Op() != ir.OMETHEXPR { if r.Op() != ir.ONAME && r.Op() != ir.OMETHEXPR {
return false return false
} }
@ -168,7 +168,7 @@ func (s *InitSchedule) staticcopy(l *ir.Node, r *ir.Node) bool {
return false return false
} }
func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool { func (s *InitSchedule) staticassign(l ir.Node, r ir.Node) bool {
for r.Op() == ir.OCONVNOP { for r.Op() == ir.OCONVNOP {
r = r.Left() r = r.Left()
} }
@ -289,7 +289,7 @@ func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool {
markTypeUsedInInterface(val.Type(), l.Sym().Linksym()) markTypeUsedInInterface(val.Type(), l.Sym().Linksym())
var itab *ir.Node var itab ir.Node
if l.Type().IsEmptyInterface() { if l.Type().IsEmptyInterface() {
itab = typename(val.Type()) itab = typename(val.Type())
} else { } else {
@ -367,7 +367,7 @@ var statuniqgen int // name generator for static temps
// staticname returns a name backed by a (writable) static data symbol. // staticname returns a name backed by a (writable) static data symbol.
// Use readonlystaticname for read-only node. // Use readonlystaticname for read-only node.
func staticname(t *types.Type) *ir.Node { func staticname(t *types.Type) ir.Node {
// Don't use lookupN; it interns the resulting string, but these are all unique. // Don't use lookupN; it interns the resulting string, but these are all unique.
n := NewName(lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen))) n := NewName(lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen)))
statuniqgen++ statuniqgen++
@ -377,18 +377,18 @@ func staticname(t *types.Type) *ir.Node {
} }
// readonlystaticname returns a name backed by a (writable) static data symbol. // readonlystaticname returns a name backed by a (writable) static data symbol.
func readonlystaticname(t *types.Type) *ir.Node { func readonlystaticname(t *types.Type) ir.Node {
n := staticname(t) n := staticname(t)
n.MarkReadonly() n.MarkReadonly()
n.Sym().Linksym().Set(obj.AttrContentAddressable, true) n.Sym().Linksym().Set(obj.AttrContentAddressable, true)
return n return n
} }
func isSimpleName(n *ir.Node) bool { func isSimpleName(n ir.Node) bool {
return (n.Op() == ir.ONAME || n.Op() == ir.OMETHEXPR) && n.Class() != ir.PAUTOHEAP && n.Class() != ir.PEXTERN return (n.Op() == ir.ONAME || n.Op() == ir.OMETHEXPR) && n.Class() != ir.PAUTOHEAP && n.Class() != ir.PEXTERN
} }
func litas(l *ir.Node, r *ir.Node, init *ir.Nodes) { func litas(l ir.Node, r ir.Node, init *ir.Nodes) {
a := ir.Nod(ir.OAS, l, r) a := ir.Nod(ir.OAS, l, r)
a = typecheck(a, ctxStmt) a = typecheck(a, ctxStmt)
a = walkexpr(a, init) a = walkexpr(a, init)
@ -405,7 +405,7 @@ const (
// getdyn calculates the initGenType for n. // getdyn calculates the initGenType for n.
// If top is false, getdyn is recursing. // If top is false, getdyn is recursing.
func getdyn(n *ir.Node, top bool) initGenType { func getdyn(n ir.Node, top bool) initGenType {
switch n.Op() { switch n.Op() {
default: default:
if isGoConst(n) { if isGoConst(n) {
@ -447,7 +447,7 @@ func getdyn(n *ir.Node, top bool) initGenType {
} }
// isStaticCompositeLiteral reports whether n is a compile-time constant. // isStaticCompositeLiteral reports whether n is a compile-time constant.
func isStaticCompositeLiteral(n *ir.Node) bool { func isStaticCompositeLiteral(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.OSLICELIT: case ir.OSLICELIT:
return false return false
@ -509,13 +509,13 @@ const (
// fixedlit handles struct, array, and slice literals. // fixedlit handles struct, array, and slice literals.
// TODO: expand documentation. // TODO: expand documentation.
func fixedlit(ctxt initContext, kind initKind, n *ir.Node, var_ *ir.Node, init *ir.Nodes) { func fixedlit(ctxt initContext, kind initKind, n ir.Node, var_ ir.Node, init *ir.Nodes) {
isBlank := var_ == ir.BlankNode isBlank := var_ == ir.BlankNode
var splitnode func(*ir.Node) (a *ir.Node, value *ir.Node) var splitnode func(ir.Node) (a ir.Node, value ir.Node)
switch n.Op() { switch n.Op() {
case ir.OARRAYLIT, ir.OSLICELIT: case ir.OARRAYLIT, ir.OSLICELIT:
var k int64 var k int64
splitnode = func(r *ir.Node) (*ir.Node, *ir.Node) { splitnode = func(r ir.Node) (ir.Node, ir.Node) {
if r.Op() == ir.OKEY { if r.Op() == ir.OKEY {
k = indexconst(r.Left()) k = indexconst(r.Left())
if k < 0 { if k < 0 {
@ -531,7 +531,7 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.Node, var_ *ir.Node, init *
return a, r return a, r
} }
case ir.OSTRUCTLIT: case ir.OSTRUCTLIT:
splitnode = func(r *ir.Node) (*ir.Node, *ir.Node) { splitnode = func(r ir.Node) (ir.Node, ir.Node) {
if r.Op() != ir.OSTRUCTKEY { if r.Op() != ir.OSTRUCTKEY {
base.Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r) base.Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r)
} }
@ -576,7 +576,7 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.Node, var_ *ir.Node, init *
case initKindStatic: case initKindStatic:
genAsStatic(a) genAsStatic(a)
case initKindDynamic, initKindLocalCode: case initKindDynamic, initKindLocalCode:
a = orderStmtInPlace(a, map[string][]*ir.Node{}) a = orderStmtInPlace(a, map[string][]ir.Node{})
a = walkstmt(a) a = walkstmt(a)
init.Append(a) init.Append(a)
default: default:
@ -586,7 +586,7 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.Node, var_ *ir.Node, init *
} }
} }
func isSmallSliceLit(n *ir.Node) bool { func isSmallSliceLit(n ir.Node) bool {
if n.Op() != ir.OSLICELIT { if n.Op() != ir.OSLICELIT {
return false return false
} }
@ -596,7 +596,7 @@ func isSmallSliceLit(n *ir.Node) bool {
return smallintconst(r) && (n.Type().Elem().Width == 0 || r.Int64Val() <= smallArrayBytes/n.Type().Elem().Width) return smallintconst(r) && (n.Type().Elem().Width == 0 || r.Int64Val() <= smallArrayBytes/n.Type().Elem().Width)
} }
func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) { func slicelit(ctxt initContext, n ir.Node, var_ ir.Node, init *ir.Nodes) {
// make an array type corresponding the number of elements we have // make an array type corresponding the number of elements we have
t := types.NewArray(n.Type().Elem(), n.Right().Int64Val()) t := types.NewArray(n.Type().Elem(), n.Right().Int64Val())
dowidth(t) dowidth(t)
@ -639,7 +639,7 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
// if the literal contains constants, // if the literal contains constants,
// make static initialized array (1),(2) // make static initialized array (1),(2)
var vstat *ir.Node var vstat ir.Node
mode := getdyn(n, true) mode := getdyn(n, true)
if mode&initConst != 0 && !isSmallSliceLit(n) { if mode&initConst != 0 && !isSmallSliceLit(n) {
@ -655,7 +655,7 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
vauto := temp(types.NewPtr(t)) vauto := temp(types.NewPtr(t))
// set auto to point at new temp or heap (3 assign) // set auto to point at new temp or heap (3 assign)
var a *ir.Node var a ir.Node
if x := prealloc[n]; x != nil { if x := prealloc[n]; x != nil {
// temp allocated during order.go for dddarg // temp allocated during order.go for dddarg
if !types.Identical(t, x.Type()) { if !types.Identical(t, x.Type()) {
@ -745,7 +745,7 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
a = ir.Nod(ir.OAS, a, value) a = ir.Nod(ir.OAS, a, value)
a = typecheck(a, ctxStmt) a = typecheck(a, ctxStmt)
a = orderStmtInPlace(a, map[string][]*ir.Node{}) a = orderStmtInPlace(a, map[string][]ir.Node{})
a = walkstmt(a) a = walkstmt(a)
init.Append(a) init.Append(a)
} }
@ -754,12 +754,12 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
a = ir.Nod(ir.OAS, var_, ir.Nod(ir.OSLICE, vauto, nil)) a = ir.Nod(ir.OAS, var_, ir.Nod(ir.OSLICE, vauto, nil))
a = typecheck(a, ctxStmt) a = typecheck(a, ctxStmt)
a = orderStmtInPlace(a, map[string][]*ir.Node{}) a = orderStmtInPlace(a, map[string][]ir.Node{})
a = walkstmt(a) a = walkstmt(a)
init.Append(a) init.Append(a)
} }
func maplit(n *ir.Node, m *ir.Node, init *ir.Nodes) { func maplit(n ir.Node, m ir.Node, init *ir.Nodes) {
// make the map var // make the map var
a := ir.Nod(ir.OMAKE, nil, nil) a := ir.Nod(ir.OMAKE, nil, nil)
a.SetEsc(n.Esc()) a.SetEsc(n.Esc())
@ -866,7 +866,7 @@ func maplit(n *ir.Node, m *ir.Node, init *ir.Nodes) {
init.Append(a) init.Append(a)
} }
func anylit(n *ir.Node, var_ *ir.Node, init *ir.Nodes) { func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
t := n.Type() t := n.Type()
switch n.Op() { switch n.Op() {
default: default:
@ -882,7 +882,7 @@ func anylit(n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
base.Fatalf("anylit: not ptr") base.Fatalf("anylit: not ptr")
} }
var r *ir.Node var r ir.Node
if n.Right() != nil { if n.Right() != nil {
// n.Right is stack temporary used as backing store. // n.Right is stack temporary used as backing store.
init.Append(ir.Nod(ir.OAS, n.Right(), nil)) // zero backing store, just in case (#18410) init.Append(ir.Nod(ir.OAS, n.Right(), nil)) // zero backing store, just in case (#18410)
@ -959,7 +959,7 @@ func anylit(n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
} }
} }
func oaslit(n *ir.Node, init *ir.Nodes) bool { func oaslit(n ir.Node, init *ir.Nodes) bool {
if n.Left() == nil || n.Right() == nil { if n.Left() == nil || n.Right() == nil {
// not a special composite literal assignment // not a special composite literal assignment
return false return false
@ -995,7 +995,7 @@ func oaslit(n *ir.Node, init *ir.Nodes) bool {
return true return true
} }
func getlit(lit *ir.Node) int { func getlit(lit ir.Node) int {
if smallintconst(lit) { if smallintconst(lit) {
return int(lit.Int64Val()) return int(lit.Int64Val())
} }
@ -1003,7 +1003,7 @@ func getlit(lit *ir.Node) int {
} }
// stataddr returns the static address of n, if n has one, or else nil. // stataddr returns the static address of n, if n has one, or else nil.
func stataddr(n *ir.Node) *ir.Node { func stataddr(n ir.Node) ir.Node {
if n == nil { if n == nil {
return nil return nil
} }
@ -1046,7 +1046,7 @@ func stataddr(n *ir.Node) *ir.Node {
return nil return nil
} }
func (s *InitSchedule) initplan(n *ir.Node) { func (s *InitSchedule) initplan(n ir.Node) {
if s.initplans[n] != nil { if s.initplans[n] != nil {
return return
} }
@ -1091,7 +1091,7 @@ func (s *InitSchedule) initplan(n *ir.Node) {
} }
} }
func (s *InitSchedule) addvalue(p *InitPlan, xoffset int64, n *ir.Node) { func (s *InitSchedule) addvalue(p *InitPlan, xoffset int64, n ir.Node) {
// special case: zero can be dropped entirely // special case: zero can be dropped entirely
if isZero(n) { if isZero(n) {
return return
@ -1113,7 +1113,7 @@ func (s *InitSchedule) addvalue(p *InitPlan, xoffset int64, n *ir.Node) {
p.E = append(p.E, InitEntry{Xoffset: xoffset, Expr: n}) p.E = append(p.E, InitEntry{Xoffset: xoffset, Expr: n})
} }
func isZero(n *ir.Node) bool { func isZero(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.ONIL: case ir.ONIL:
return true return true
@ -1151,11 +1151,11 @@ func isZero(n *ir.Node) bool {
return false return false
} }
func isvaluelit(n *ir.Node) bool { func isvaluelit(n ir.Node) bool {
return n.Op() == ir.OARRAYLIT || n.Op() == ir.OSTRUCTLIT return n.Op() == ir.OARRAYLIT || n.Op() == ir.OSTRUCTLIT
} }
func genAsStatic(as *ir.Node) { func genAsStatic(as ir.Node) {
if as.Left().Type() == nil { if as.Left().Type() == nil {
base.Fatalf("genAsStatic as.Left not typechecked") base.Fatalf("genAsStatic as.Left not typechecked")
} }

File diff suppressed because it is too large Load diff

View file

@ -40,7 +40,7 @@ var (
// It's primarily used to distinguish references to named objects, // It's primarily used to distinguish references to named objects,
// whose Pos will point back to their declaration position rather than // whose Pos will point back to their declaration position rather than
// their usage position. // their usage position.
func hasUniquePos(n *ir.Node) bool { func hasUniquePos(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.ONAME, ir.OPACK: case ir.ONAME, ir.OPACK:
return false return false
@ -60,7 +60,7 @@ func hasUniquePos(n *ir.Node) bool {
return true return true
} }
func setlineno(n *ir.Node) src.XPos { func setlineno(n ir.Node) src.XPos {
lno := base.Pos lno := base.Pos
if n != nil && hasUniquePos(n) { if n != nil && hasUniquePos(n) {
base.Pos = n.Pos() base.Pos = n.Pos()
@ -102,7 +102,7 @@ func autolabel(prefix string) *types.Sym {
// find all the exported symbols in package opkg // find all the exported symbols in package opkg
// and make them available in the current package // and make them available in the current package
func importdot(opkg *types.Pkg, pack *ir.Node) { func importdot(opkg *types.Pkg, pack ir.Node) {
n := 0 n := 0
for _, s := range opkg.Syms { for _, s := range opkg.Syms {
if s.Def == nil { if s.Def == nil {
@ -136,7 +136,7 @@ func importdot(opkg *types.Pkg, pack *ir.Node) {
} }
// newname returns a new ONAME Node associated with symbol s. // newname returns a new ONAME Node associated with symbol s.
func NewName(s *types.Sym) *ir.Node { func NewName(s *types.Sym) ir.Node {
n := ir.NewNameAt(base.Pos, s) n := ir.NewNameAt(base.Pos, s)
n.Name().Curfn = Curfn n.Name().Curfn = Curfn
return n return n
@ -144,13 +144,13 @@ func NewName(s *types.Sym) *ir.Node {
// nodSym makes a Node with Op op and with the Left field set to left // nodSym makes a Node with Op op and with the Left field set to left
// and the Sym field set to sym. This is for ODOT and friends. // and the Sym field set to sym. This is for ODOT and friends.
func nodSym(op ir.Op, left *ir.Node, sym *types.Sym) *ir.Node { func nodSym(op ir.Op, left ir.Node, sym *types.Sym) ir.Node {
return nodlSym(base.Pos, op, left, sym) return nodlSym(base.Pos, op, left, sym)
} }
// nodlSym makes a Node with position Pos, with Op op, and with the Left field set to left // nodlSym makes a Node with position Pos, with Op op, and with the Left field set to left
// and the Sym field set to sym. This is for ODOT and friends. // and the Sym field set to sym. This is for ODOT and friends.
func nodlSym(pos src.XPos, op ir.Op, left *ir.Node, sym *types.Sym) *ir.Node { func nodlSym(pos src.XPos, op ir.Op, left ir.Node, sym *types.Sym) ir.Node {
n := ir.NodAt(pos, op, left, nil) n := ir.NodAt(pos, op, left, nil)
n.SetSym(sym) n.SetSym(sym)
return n return n
@ -163,21 +163,21 @@ func (x methcmp) Len() int { return len(x) }
func (x methcmp) Swap(i, j int) { x[i], x[j] = x[j], x[i] } func (x methcmp) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x methcmp) Less(i, j int) bool { return x[i].Sym.Less(x[j].Sym) } func (x methcmp) Less(i, j int) bool { return x[i].Sym.Less(x[j].Sym) }
func nodintconst(v int64) *ir.Node { func nodintconst(v int64) ir.Node {
return ir.NewLiteral(constant.MakeInt64(v)) return ir.NewLiteral(constant.MakeInt64(v))
} }
func nodnil() *ir.Node { func nodnil() ir.Node {
n := ir.Nod(ir.ONIL, nil, nil) n := ir.Nod(ir.ONIL, nil, nil)
n.SetType(types.Types[types.TNIL]) n.SetType(types.Types[types.TNIL])
return n return n
} }
func nodbool(b bool) *ir.Node { func nodbool(b bool) ir.Node {
return ir.NewLiteral(constant.MakeBool(b)) return ir.NewLiteral(constant.MakeBool(b))
} }
func nodstr(s string) *ir.Node { func nodstr(s string) ir.Node {
return ir.NewLiteral(constant.MakeString(s)) return ir.NewLiteral(constant.MakeString(s))
} }
@ -185,7 +185,7 @@ func nodstr(s string) *ir.Node {
// ONAME, OLITERAL, OTYPE, and ONONAME leaves. // ONAME, OLITERAL, OTYPE, and ONONAME leaves.
// If pos.IsKnown(), it sets the source position of newly // If pos.IsKnown(), it sets the source position of newly
// allocated nodes to pos. // allocated nodes to pos.
func treecopy(n *ir.Node, pos src.XPos) *ir.Node { func treecopy(n ir.Node, pos src.XPos) ir.Node {
if n == nil { if n == nil {
return nil return nil
} }
@ -511,12 +511,12 @@ func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
return ir.OXXX, "" return ir.OXXX, ""
} }
func assignconv(n *ir.Node, t *types.Type, context string) *ir.Node { func assignconv(n ir.Node, t *types.Type, context string) ir.Node {
return assignconvfn(n, t, func() string { return context }) return assignconvfn(n, t, func() string { return context })
} }
// Convert node n for assignment to type t. // Convert node n for assignment to type t.
func assignconvfn(n *ir.Node, t *types.Type, context func() string) *ir.Node { func assignconvfn(n ir.Node, t *types.Type, context func() string) ir.Node {
if n == nil || n.Type() == nil || n.Type().Broke() { if n == nil || n.Type() == nil || n.Type().Broke() {
return n return n
} }
@ -565,7 +565,7 @@ func assignconvfn(n *ir.Node, t *types.Type, context func() string) *ir.Node {
// backingArrayPtrLen extracts the pointer and length from a slice or string. // backingArrayPtrLen extracts the pointer and length from a slice or string.
// This constructs two nodes referring to n, so n must be a cheapexpr. // This constructs two nodes referring to n, so n must be a cheapexpr.
func backingArrayPtrLen(n *ir.Node) (ptr, len *ir.Node) { func backingArrayPtrLen(n ir.Node) (ptr, len ir.Node) {
var init ir.Nodes var init ir.Nodes
c := cheapexpr(n, &init) c := cheapexpr(n, &init)
if c != n || init.Len() != 0 { if c != n || init.Len() != 0 {
@ -584,7 +584,7 @@ func backingArrayPtrLen(n *ir.Node) (ptr, len *ir.Node) {
// labeledControl returns the control flow Node (for, switch, select) // labeledControl returns the control flow Node (for, switch, select)
// associated with the label n, if any. // associated with the label n, if any.
func labeledControl(n *ir.Node) *ir.Node { func labeledControl(n ir.Node) ir.Node {
if n.Op() != ir.OLABEL { if n.Op() != ir.OLABEL {
base.Fatalf("labeledControl %v", n.Op()) base.Fatalf("labeledControl %v", n.Op())
} }
@ -599,7 +599,7 @@ func labeledControl(n *ir.Node) *ir.Node {
return nil return nil
} }
func syslook(name string) *ir.Node { func syslook(name string) ir.Node {
s := Runtimepkg.Lookup(name) s := Runtimepkg.Lookup(name)
if s == nil || s.Def == nil { if s == nil || s.Def == nil {
base.Fatalf("syslook: can't find runtime.%s", name) base.Fatalf("syslook: can't find runtime.%s", name)
@ -618,14 +618,14 @@ func typehash(t *types.Type) uint32 {
// updateHasCall checks whether expression n contains any function // updateHasCall checks whether expression n contains any function
// calls and sets the n.HasCall flag if so. // calls and sets the n.HasCall flag if so.
func updateHasCall(n *ir.Node) { func updateHasCall(n ir.Node) {
if n == nil { if n == nil {
return return
} }
n.SetHasCall(calcHasCall(n)) n.SetHasCall(calcHasCall(n))
} }
func calcHasCall(n *ir.Node) bool { func calcHasCall(n ir.Node) bool {
if n.Init().Len() != 0 { if n.Init().Len() != 0 {
// TODO(mdempsky): This seems overly conservative. // TODO(mdempsky): This seems overly conservative.
return true return true
@ -740,7 +740,7 @@ func brrev(op ir.Op) ir.Op {
// return side effect-free n, appending side effects to init. // return side effect-free n, appending side effects to init.
// result is assignable if n is. // result is assignable if n is.
func safeexpr(n *ir.Node, init *ir.Nodes) *ir.Node { func safeexpr(n ir.Node, init *ir.Nodes) ir.Node {
if n == nil { if n == nil {
return nil return nil
} }
@ -800,7 +800,7 @@ func safeexpr(n *ir.Node, init *ir.Nodes) *ir.Node {
return cheapexpr(n, init) return cheapexpr(n, init)
} }
func copyexpr(n *ir.Node, t *types.Type, init *ir.Nodes) *ir.Node { func copyexpr(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
l := temp(t) l := temp(t)
a := ir.Nod(ir.OAS, l, n) a := ir.Nod(ir.OAS, l, n)
a = typecheck(a, ctxStmt) a = typecheck(a, ctxStmt)
@ -811,7 +811,7 @@ func copyexpr(n *ir.Node, t *types.Type, init *ir.Nodes) *ir.Node {
// return side-effect free and cheap n, appending side effects to init. // return side-effect free and cheap n, appending side effects to init.
// result may not be assignable. // result may not be assignable.
func cheapexpr(n *ir.Node, init *ir.Nodes) *ir.Node { func cheapexpr(n ir.Node, init *ir.Nodes) ir.Node {
switch n.Op() { switch n.Op() {
case ir.ONAME, ir.OLITERAL, ir.ONIL: case ir.ONAME, ir.OLITERAL, ir.ONIL:
return n return n
@ -957,7 +957,7 @@ func dotpath(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) (
// find missing fields that // find missing fields that
// will give shortest unique addressing. // will give shortest unique addressing.
// modify the tree with missing type names. // modify the tree with missing type names.
func adddot(n *ir.Node) *ir.Node { func adddot(n ir.Node) ir.Node {
n.SetLeft(typecheck(n.Left(), ctxType|ctxExpr)) n.SetLeft(typecheck(n.Left(), ctxType|ctxExpr))
if n.Left().Diag() { if n.Left().Diag() {
n.SetDiag(true) n.SetDiag(true)
@ -1116,8 +1116,8 @@ func expandmeth(t *types.Type) {
} }
// Given funarg struct list, return list of ODCLFIELD Node fn args. // Given funarg struct list, return list of ODCLFIELD Node fn args.
func structargs(tl *types.Type, mustname bool) []*ir.Node { func structargs(tl *types.Type, mustname bool) []ir.Node {
var args []*ir.Node var args []ir.Node
gen := 0 gen := 0
for _, t := range tl.Fields().Slice() { for _, t := range tl.Fields().Slice() {
s := t.Sym s := t.Sym
@ -1250,30 +1250,30 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
if rcvr.IsPtr() && rcvr.Elem() == method.Type.Recv().Type && rcvr.Elem().Sym != nil { if rcvr.IsPtr() && rcvr.Elem() == method.Type.Recv().Type && rcvr.Elem().Sym != nil {
inlcalls(fn) inlcalls(fn)
} }
escapeFuncs([]*ir.Node{fn}, false) escapeFuncs([]ir.Node{fn}, false)
Curfn = nil Curfn = nil
xtop = append(xtop, fn) xtop = append(xtop, fn)
} }
func paramNnames(ft *types.Type) []*ir.Node { func paramNnames(ft *types.Type) []ir.Node {
args := make([]*ir.Node, ft.NumParams()) args := make([]ir.Node, ft.NumParams())
for i, f := range ft.Params().FieldSlice() { for i, f := range ft.Params().FieldSlice() {
args[i] = ir.AsNode(f.Nname) args[i] = ir.AsNode(f.Nname)
} }
return args return args
} }
func hashmem(t *types.Type) *ir.Node { func hashmem(t *types.Type) ir.Node {
sym := Runtimepkg.Lookup("memhash") sym := Runtimepkg.Lookup("memhash")
n := NewName(sym) n := NewName(sym)
setNodeNameFunc(n) setNodeNameFunc(n)
n.SetType(functype(nil, []*ir.Node{ n.SetType(functype(nil, []ir.Node{
anonfield(types.NewPtr(t)), anonfield(types.NewPtr(t)),
anonfield(types.Types[types.TUINTPTR]), anonfield(types.Types[types.TUINTPTR]),
anonfield(types.Types[types.TUINTPTR]), anonfield(types.Types[types.TUINTPTR]),
}, []*ir.Node{ }, []ir.Node{
anonfield(types.Types[types.TUINTPTR]), anonfield(types.Types[types.TUINTPTR]),
})) }))
return n return n
@ -1393,15 +1393,15 @@ func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool
return true return true
} }
func listtreecopy(l []*ir.Node, pos src.XPos) []*ir.Node { func listtreecopy(l []ir.Node, pos src.XPos) []ir.Node {
var out []*ir.Node var out []ir.Node
for _, n := range l { for _, n := range l {
out = append(out, treecopy(n, pos)) out = append(out, treecopy(n, pos))
} }
return out return out
} }
func liststmt(l []*ir.Node) *ir.Node { func liststmt(l []ir.Node) ir.Node {
n := ir.Nod(ir.OBLOCK, nil, nil) n := ir.Nod(ir.OBLOCK, nil, nil)
n.PtrList().Set(l) n.PtrList().Set(l)
if len(l) != 0 { if len(l) != 0 {
@ -1410,7 +1410,7 @@ func liststmt(l []*ir.Node) *ir.Node {
return n return n
} }
func ngotype(n *ir.Node) *types.Sym { func ngotype(n ir.Node) *types.Sym {
if n.Type() != nil { if n.Type() != nil {
return typenamesym(n.Type()) return typenamesym(n.Type())
} }
@ -1419,7 +1419,7 @@ func ngotype(n *ir.Node) *types.Sym {
// The result of addinit MUST be assigned back to n, e.g. // The result of addinit MUST be assigned back to n, e.g.
// n.Left = addinit(n.Left, init) // n.Left = addinit(n.Left, init)
func addinit(n *ir.Node, init []*ir.Node) *ir.Node { func addinit(n ir.Node, init []ir.Node) ir.Node {
if len(init) == 0 { if len(init) == 0 {
return n return n
} }
@ -1518,7 +1518,7 @@ func isdirectiface(t *types.Type) bool {
} }
// itabType loads the _type field from a runtime.itab struct. // itabType loads the _type field from a runtime.itab struct.
func itabType(itab *ir.Node) *ir.Node { func itabType(itab ir.Node) ir.Node {
typ := nodSym(ir.ODOTPTR, itab, nil) typ := nodSym(ir.ODOTPTR, itab, nil)
typ.SetType(types.NewPtr(types.Types[types.TUINT8])) typ.SetType(types.NewPtr(types.Types[types.TUINT8]))
typ.SetTypecheck(1) typ.SetTypecheck(1)
@ -1530,7 +1530,7 @@ func itabType(itab *ir.Node) *ir.Node {
// ifaceData loads the data field from an interface. // ifaceData loads the data field from an interface.
// The concrete type must be known to have type t. // The concrete type must be known to have type t.
// It follows the pointer if !isdirectiface(t). // It follows the pointer if !isdirectiface(t).
func ifaceData(pos src.XPos, n *ir.Node, t *types.Type) *ir.Node { func ifaceData(pos src.XPos, n ir.Node, t *types.Type) ir.Node {
if t.IsInterface() { if t.IsInterface() {
base.Fatalf("ifaceData interface: %v", t) base.Fatalf("ifaceData interface: %v", t)
} }

View file

@ -15,7 +15,7 @@ import (
) )
// typecheckswitch typechecks a switch statement. // typecheckswitch typechecks a switch statement.
func typecheckswitch(n *ir.Node) { func typecheckswitch(n ir.Node) {
typecheckslice(n.Init().Slice(), ctxStmt) typecheckslice(n.Init().Slice(), ctxStmt)
if n.Left() != nil && n.Left().Op() == ir.OTYPESW { if n.Left() != nil && n.Left().Op() == ir.OTYPESW {
typecheckTypeSwitch(n) typecheckTypeSwitch(n)
@ -24,7 +24,7 @@ func typecheckswitch(n *ir.Node) {
} }
} }
func typecheckTypeSwitch(n *ir.Node) { func typecheckTypeSwitch(n ir.Node) {
n.Left().SetRight(typecheck(n.Left().Right(), ctxExpr)) n.Left().SetRight(typecheck(n.Left().Right(), ctxExpr))
t := n.Left().Right().Type() t := n.Left().Right().Type()
if t != nil && !t.IsInterface() { if t != nil && !t.IsInterface() {
@ -39,7 +39,7 @@ func typecheckTypeSwitch(n *ir.Node) {
base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym()) base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
} }
var defCase, nilCase *ir.Node var defCase, nilCase ir.Node
var ts typeSet var ts typeSet
for _, ncase := range n.List().Slice() { for _, ncase := range n.List().Slice() {
ls := ncase.List().Slice() ls := ncase.List().Slice()
@ -144,7 +144,7 @@ func (s *typeSet) add(pos src.XPos, typ *types.Type) {
s.m[ls] = append(prevs, typeSetEntry{pos, typ}) s.m[ls] = append(prevs, typeSetEntry{pos, typ})
} }
func typecheckExprSwitch(n *ir.Node) { func typecheckExprSwitch(n ir.Node) {
t := types.Types[types.TBOOL] t := types.Types[types.TBOOL]
if n.Left() != nil { if n.Left() != nil {
n.SetLeft(typecheck(n.Left(), ctxExpr)) n.SetLeft(typecheck(n.Left(), ctxExpr))
@ -172,7 +172,7 @@ func typecheckExprSwitch(n *ir.Node) {
} }
} }
var defCase *ir.Node var defCase ir.Node
var cs constSet var cs constSet
for _, ncase := range n.List().Slice() { for _, ncase := range n.List().Slice() {
ls := ncase.List().Slice() ls := ncase.List().Slice()
@ -225,7 +225,7 @@ func typecheckExprSwitch(n *ir.Node) {
} }
// walkswitch walks a switch statement. // walkswitch walks a switch statement.
func walkswitch(sw *ir.Node) { func walkswitch(sw ir.Node) {
// Guard against double walk, see #25776. // Guard against double walk, see #25776.
if sw.List().Len() == 0 && sw.Body().Len() > 0 { if sw.List().Len() == 0 && sw.Body().Len() > 0 {
return // Was fatal, but eliminating every possible source of double-walking is hard return // Was fatal, but eliminating every possible source of double-walking is hard
@ -240,7 +240,7 @@ func walkswitch(sw *ir.Node) {
// walkExprSwitch generates an AST implementing sw. sw is an // walkExprSwitch generates an AST implementing sw. sw is an
// expression switch. // expression switch.
func walkExprSwitch(sw *ir.Node) { func walkExprSwitch(sw ir.Node) {
lno := setlineno(sw) lno := setlineno(sw)
cond := sw.Left() cond := sw.Left()
@ -275,7 +275,7 @@ func walkExprSwitch(sw *ir.Node) {
exprname: cond, exprname: cond,
} }
var defaultGoto *ir.Node var defaultGoto ir.Node
var body ir.Nodes var body ir.Nodes
for _, ncase := range sw.List().Slice() { for _, ncase := range sw.List().Slice() {
label := autolabel(".s") label := autolabel(".s")
@ -318,7 +318,7 @@ func walkExprSwitch(sw *ir.Node) {
// An exprSwitch walks an expression switch. // An exprSwitch walks an expression switch.
type exprSwitch struct { type exprSwitch struct {
exprname *ir.Node // value being switched on exprname ir.Node // value being switched on
done ir.Nodes done ir.Nodes
clauses []exprClause clauses []exprClause
@ -326,11 +326,11 @@ type exprSwitch struct {
type exprClause struct { type exprClause struct {
pos src.XPos pos src.XPos
lo, hi *ir.Node lo, hi ir.Node
jmp *ir.Node jmp ir.Node
} }
func (s *exprSwitch) Add(pos src.XPos, expr, jmp *ir.Node) { func (s *exprSwitch) Add(pos src.XPos, expr, jmp ir.Node) {
c := exprClause{pos: pos, lo: expr, hi: expr, jmp: jmp} c := exprClause{pos: pos, lo: expr, hi: expr, jmp: jmp}
if okforcmp[s.exprname.Type().Etype] && expr.Op() == ir.OLITERAL { if okforcmp[s.exprname.Type().Etype] && expr.Op() == ir.OLITERAL {
s.clauses = append(s.clauses, c) s.clauses = append(s.clauses, c)
@ -390,10 +390,10 @@ func (s *exprSwitch) flush() {
// Perform two-level binary search. // Perform two-level binary search.
binarySearch(len(runs), &s.done, binarySearch(len(runs), &s.done,
func(i int) *ir.Node { func(i int) ir.Node {
return ir.Nod(ir.OLE, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(runs[i-1]))) return ir.Nod(ir.OLE, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(runs[i-1])))
}, },
func(i int, nif *ir.Node) { func(i int, nif ir.Node) {
run := runs[i] run := runs[i]
nif.SetLeft(ir.Nod(ir.OEQ, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(run)))) nif.SetLeft(ir.Nod(ir.OEQ, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(run))))
s.search(run, nif.PtrBody()) s.search(run, nif.PtrBody())
@ -425,10 +425,10 @@ func (s *exprSwitch) flush() {
func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) { func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
binarySearch(len(cc), out, binarySearch(len(cc), out,
func(i int) *ir.Node { func(i int) ir.Node {
return ir.Nod(ir.OLE, s.exprname, cc[i-1].hi) return ir.Nod(ir.OLE, s.exprname, cc[i-1].hi)
}, },
func(i int, nif *ir.Node) { func(i int, nif ir.Node) {
c := &cc[i] c := &cc[i]
nif.SetLeft(c.test(s.exprname)) nif.SetLeft(c.test(s.exprname))
nif.PtrBody().Set1(c.jmp) nif.PtrBody().Set1(c.jmp)
@ -436,7 +436,7 @@ func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
) )
} }
func (c *exprClause) test(exprname *ir.Node) *ir.Node { func (c *exprClause) test(exprname ir.Node) ir.Node {
// Integer range. // Integer range.
if c.hi != c.lo { if c.hi != c.lo {
low := ir.NodAt(c.pos, ir.OGE, exprname, c.lo) low := ir.NodAt(c.pos, ir.OGE, exprname, c.lo)
@ -456,7 +456,7 @@ func (c *exprClause) test(exprname *ir.Node) *ir.Node {
return ir.NodAt(c.pos, ir.OEQ, exprname, c.lo) return ir.NodAt(c.pos, ir.OEQ, exprname, c.lo)
} }
func allCaseExprsAreSideEffectFree(sw *ir.Node) bool { func allCaseExprsAreSideEffectFree(sw ir.Node) bool {
// In theory, we could be more aggressive, allowing any // In theory, we could be more aggressive, allowing any
// side-effect-free expressions in cases, but it's a bit // side-effect-free expressions in cases, but it's a bit
// tricky because some of that information is unavailable due // tricky because some of that information is unavailable due
@ -478,7 +478,7 @@ func allCaseExprsAreSideEffectFree(sw *ir.Node) bool {
} }
// hasFall reports whether stmts ends with a "fallthrough" statement. // hasFall reports whether stmts ends with a "fallthrough" statement.
func hasFall(stmts []*ir.Node) (bool, src.XPos) { func hasFall(stmts []ir.Node) (bool, src.XPos) {
// Search backwards for the index of the fallthrough // Search backwards for the index of the fallthrough
// statement. Do not assume it'll be in the last // statement. Do not assume it'll be in the last
// position, since in some cases (e.g. when the statement // position, since in some cases (e.g. when the statement
@ -497,7 +497,7 @@ func hasFall(stmts []*ir.Node) (bool, src.XPos) {
// walkTypeSwitch generates an AST that implements sw, where sw is a // walkTypeSwitch generates an AST that implements sw, where sw is a
// type switch. // type switch.
func walkTypeSwitch(sw *ir.Node) { func walkTypeSwitch(sw ir.Node) {
var s typeSwitch var s typeSwitch
s.facename = sw.Left().Right() s.facename = sw.Left().Right()
sw.SetLeft(nil) sw.SetLeft(nil)
@ -538,10 +538,10 @@ func walkTypeSwitch(sw *ir.Node) {
s.hashname = copyexpr(dotHash, dotHash.Type(), sw.PtrBody()) s.hashname = copyexpr(dotHash, dotHash.Type(), sw.PtrBody())
br := ir.Nod(ir.OBREAK, nil, nil) br := ir.Nod(ir.OBREAK, nil, nil)
var defaultGoto, nilGoto *ir.Node var defaultGoto, nilGoto ir.Node
var body ir.Nodes var body ir.Nodes
for _, ncase := range sw.List().Slice() { for _, ncase := range sw.List().Slice() {
var caseVar *ir.Node var caseVar ir.Node
if ncase.Rlist().Len() != 0 { if ncase.Rlist().Len() != 0 {
caseVar = ncase.Rlist().First() caseVar = ncase.Rlist().First()
} }
@ -592,7 +592,7 @@ func walkTypeSwitch(sw *ir.Node) {
} }
val = ifaceData(ncase.Pos(), s.facename, singleType) val = ifaceData(ncase.Pos(), s.facename, singleType)
} }
l := []*ir.Node{ l := []ir.Node{
ir.NodAt(ncase.Pos(), ir.ODCL, caseVar, nil), ir.NodAt(ncase.Pos(), ir.ODCL, caseVar, nil),
ir.NodAt(ncase.Pos(), ir.OAS, caseVar, val), ir.NodAt(ncase.Pos(), ir.OAS, caseVar, val),
} }
@ -622,9 +622,9 @@ func walkTypeSwitch(sw *ir.Node) {
// A typeSwitch walks a type switch. // A typeSwitch walks a type switch.
type typeSwitch struct { type typeSwitch struct {
// Temporary variables (i.e., ONAMEs) used by type switch dispatch logic: // Temporary variables (i.e., ONAMEs) used by type switch dispatch logic:
facename *ir.Node // value being type-switched on facename ir.Node // value being type-switched on
hashname *ir.Node // type hash of the value being type-switched on hashname ir.Node // type hash of the value being type-switched on
okname *ir.Node // boolean used for comma-ok type assertions okname ir.Node // boolean used for comma-ok type assertions
done ir.Nodes done ir.Nodes
clauses []typeClause clauses []typeClause
@ -635,10 +635,10 @@ type typeClause struct {
body ir.Nodes body ir.Nodes
} }
func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp *ir.Node) { func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp ir.Node) {
var body ir.Nodes var body ir.Nodes
if caseVar != nil { if caseVar != nil {
l := []*ir.Node{ l := []ir.Node{
ir.NodAt(pos, ir.ODCL, caseVar, nil), ir.NodAt(pos, ir.ODCL, caseVar, nil),
ir.NodAt(pos, ir.OAS, caseVar, nil), ir.NodAt(pos, ir.OAS, caseVar, nil),
} }
@ -703,10 +703,10 @@ func (s *typeSwitch) flush() {
cc = merged cc = merged
binarySearch(len(cc), &s.done, binarySearch(len(cc), &s.done,
func(i int) *ir.Node { func(i int) ir.Node {
return ir.Nod(ir.OLE, s.hashname, nodintconst(int64(cc[i-1].hash))) return ir.Nod(ir.OLE, s.hashname, nodintconst(int64(cc[i-1].hash)))
}, },
func(i int, nif *ir.Node) { func(i int, nif ir.Node) {
// TODO(mdempsky): Omit hash equality check if // TODO(mdempsky): Omit hash equality check if
// there's only one type. // there's only one type.
c := cc[i] c := cc[i]
@ -725,7 +725,7 @@ func (s *typeSwitch) flush() {
// //
// leaf(i, nif) should setup nif (an OIF node) to test case i. In // leaf(i, nif) should setup nif (an OIF node) to test case i. In
// particular, it should set nif.Left and nif.Nbody. // particular, it should set nif.Left and nif.Nbody.
func binarySearch(n int, out *ir.Nodes, less func(i int) *ir.Node, leaf func(i int, nif *ir.Node)) { func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i int, nif ir.Node)) {
const binarySearchMin = 4 // minimum number of cases for binary search const binarySearchMin = 4 // minimum number of cases for binary search
var do func(lo, hi int, out *ir.Nodes) var do func(lo, hi int, out *ir.Nodes)

View file

@ -20,7 +20,7 @@ const enableTrace = false
var traceIndent []byte var traceIndent []byte
var skipDowidthForTracing bool var skipDowidthForTracing bool
func tracePrint(title string, n *ir.Node) func(np **ir.Node) { func tracePrint(title string, n ir.Node) func(np *ir.Node) {
indent := traceIndent indent := traceIndent
// guard against nil // guard against nil
@ -37,7 +37,7 @@ func tracePrint(title string, n *ir.Node) func(np **ir.Node) {
fmt.Printf("%s: %s%s %p %s %v tc=%d\n", pos, indent, title, n, op, n, tc) fmt.Printf("%s: %s%s %p %s %v tc=%d\n", pos, indent, title, n, op, n, tc)
traceIndent = append(traceIndent, ". "...) traceIndent = append(traceIndent, ". "...)
return func(np **ir.Node) { return func(np *ir.Node) {
traceIndent = traceIndent[:len(traceIndent)-2] traceIndent = traceIndent[:len(traceIndent)-2]
// if we have a result, use that // if we have a result, use that
@ -77,10 +77,10 @@ const (
// marks variables that escape the local frame. // marks variables that escape the local frame.
// rewrites n.Op to be more specific in some cases. // rewrites n.Op to be more specific in some cases.
var typecheckdefstack []*ir.Node var typecheckdefstack []ir.Node
// resolve ONONAME to definition, if any. // resolve ONONAME to definition, if any.
func resolve(n *ir.Node) (res *ir.Node) { func resolve(n ir.Node) (res ir.Node) {
if n == nil || n.Op() != ir.ONONAME { if n == nil || n.Op() != ir.ONONAME {
return n return n
} }
@ -115,7 +115,7 @@ func resolve(n *ir.Node) (res *ir.Node) {
return r return r
} }
func typecheckslice(l []*ir.Node, top int) { func typecheckslice(l []ir.Node, top int) {
for i := range l { for i := range l {
l[i] = typecheck(l[i], top) l[i] = typecheck(l[i], top)
} }
@ -166,7 +166,7 @@ func typekind(t *types.Type) string {
return fmt.Sprintf("etype=%d", et) return fmt.Sprintf("etype=%d", et)
} }
func cycleFor(start *ir.Node) []*ir.Node { func cycleFor(start ir.Node) []ir.Node {
// Find the start node in typecheck_tcstack. // Find the start node in typecheck_tcstack.
// We know that it must exist because each time we mark // We know that it must exist because each time we mark
// a node with n.SetTypecheck(2) we push it on the stack, // a node with n.SetTypecheck(2) we push it on the stack,
@ -179,7 +179,7 @@ func cycleFor(start *ir.Node) []*ir.Node {
} }
// collect all nodes with same Op // collect all nodes with same Op
var cycle []*ir.Node var cycle []ir.Node
for _, n := range typecheck_tcstack[i:] { for _, n := range typecheck_tcstack[i:] {
if n.Op() == start.Op() { if n.Op() == start.Op() {
cycle = append(cycle, n) cycle = append(cycle, n)
@ -189,7 +189,7 @@ func cycleFor(start *ir.Node) []*ir.Node {
return cycle return cycle
} }
func cycleTrace(cycle []*ir.Node) string { func cycleTrace(cycle []ir.Node) string {
var s string var s string
for i, n := range cycle { for i, n := range cycle {
s += fmt.Sprintf("\n\t%v: %v uses %v", ir.Line(n), n, cycle[(i+1)%len(cycle)]) s += fmt.Sprintf("\n\t%v: %v uses %v", ir.Line(n), n, cycle[(i+1)%len(cycle)])
@ -197,12 +197,12 @@ func cycleTrace(cycle []*ir.Node) string {
return s return s
} }
var typecheck_tcstack []*ir.Node var typecheck_tcstack []ir.Node
// typecheck type checks node n. // typecheck type checks node n.
// The result of typecheck MUST be assigned back to n, e.g. // The result of typecheck MUST be assigned back to n, e.g.
// n.Left = typecheck(n.Left, top) // n.Left = typecheck(n.Left, top)
func typecheck(n *ir.Node, top int) (res *ir.Node) { func typecheck(n ir.Node, top int) (res ir.Node) {
// cannot type check until all the source has been parsed // cannot type check until all the source has been parsed
if !typecheckok { if !typecheckok {
base.Fatalf("early typecheck") base.Fatalf("early typecheck")
@ -317,7 +317,7 @@ func typecheck(n *ir.Node, top int) (res *ir.Node) {
// value of type int (see also checkmake for comparison). // value of type int (see also checkmake for comparison).
// The result of indexlit MUST be assigned back to n, e.g. // The result of indexlit MUST be assigned back to n, e.g.
// n.Left = indexlit(n.Left) // n.Left = indexlit(n.Left)
func indexlit(n *ir.Node) *ir.Node { func indexlit(n ir.Node) ir.Node {
if n != nil && n.Type() != nil && n.Type().Etype == types.TIDEAL { if n != nil && n.Type() != nil && n.Type().Etype == types.TIDEAL {
return defaultlit(n, types.Types[types.TINT]) return defaultlit(n, types.Types[types.TINT])
} }
@ -326,7 +326,7 @@ func indexlit(n *ir.Node) *ir.Node {
// The result of typecheck1 MUST be assigned back to n, e.g. // The result of typecheck1 MUST be assigned back to n, e.g.
// n.Left = typecheck1(n.Left, top) // n.Left = typecheck1(n.Left, top)
func typecheck1(n *ir.Node, top int) (res *ir.Node) { func typecheck1(n ir.Node, top int) (res ir.Node) {
if enableTrace && base.Flag.LowerT { if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheck1", n)(&res) defer tracePrint("typecheck1", n)(&res)
} }
@ -569,9 +569,9 @@ func typecheck1(n *ir.Node, top int) (res *ir.Node) {
ir.OOROR, ir.OOROR,
ir.OSUB, ir.OSUB,
ir.OXOR: ir.OXOR:
var l *ir.Node var l ir.Node
var op ir.Op var op ir.Op
var r *ir.Node var r ir.Node
if n.Op() == ir.OASOP { if n.Op() == ir.OASOP {
ok |= ctxStmt ok |= ctxStmt
n.SetLeft(typecheck(n.Left(), ctxExpr)) n.SetLeft(typecheck(n.Left(), ctxExpr))
@ -1762,7 +1762,7 @@ func typecheck1(n *ir.Node, top int) (res *ir.Node) {
l = args[i] l = args[i]
i++ i++
l = typecheck(l, ctxExpr) l = typecheck(l, ctxExpr)
var r *ir.Node var r ir.Node
if i < len(args) { if i < len(args) {
r = args[i] r = args[i]
i++ i++
@ -2129,7 +2129,7 @@ func typecheck1(n *ir.Node, top int) (res *ir.Node) {
return n return n
} }
func typecheckargs(n *ir.Node) { func typecheckargs(n ir.Node) {
if n.List().Len() != 1 || n.IsDDD() { if n.List().Len() != 1 || n.IsDDD() {
typecheckslice(n.List().Slice(), ctxExpr) typecheckslice(n.List().Slice(), ctxExpr)
return return
@ -2174,7 +2174,7 @@ func typecheckargs(n *ir.Node) {
n.PtrInit().Append(as) n.PtrInit().Append(as)
} }
func checksliceindex(l *ir.Node, r *ir.Node, tp *types.Type) bool { func checksliceindex(l ir.Node, r ir.Node, tp *types.Type) bool {
t := r.Type() t := r.Type()
if t == nil { if t == nil {
return false return false
@ -2204,7 +2204,7 @@ func checksliceindex(l *ir.Node, r *ir.Node, tp *types.Type) bool {
return true return true
} }
func checksliceconst(lo *ir.Node, hi *ir.Node) bool { func checksliceconst(lo ir.Node, hi ir.Node) bool {
if lo != nil && hi != nil && lo.Op() == ir.OLITERAL && hi.Op() == ir.OLITERAL && constant.Compare(lo.Val(), token.GTR, hi.Val()) { if lo != nil && hi != nil && lo.Op() == ir.OLITERAL && hi.Op() == ir.OLITERAL && constant.Compare(lo.Val(), token.GTR, hi.Val()) {
base.Errorf("invalid slice index: %v > %v", lo, hi) base.Errorf("invalid slice index: %v > %v", lo, hi)
return false return false
@ -2213,7 +2213,7 @@ func checksliceconst(lo *ir.Node, hi *ir.Node) bool {
return true return true
} }
func checkdefergo(n *ir.Node) { func checkdefergo(n ir.Node) {
what := "defer" what := "defer"
if n.Op() == ir.OGO { if n.Op() == ir.OGO {
what = "go" what = "go"
@ -2268,7 +2268,7 @@ func checkdefergo(n *ir.Node) {
// The result of implicitstar MUST be assigned back to n, e.g. // The result of implicitstar MUST be assigned back to n, e.g.
// n.Left = implicitstar(n.Left) // n.Left = implicitstar(n.Left)
func implicitstar(n *ir.Node) *ir.Node { func implicitstar(n ir.Node) ir.Node {
// insert implicit * if needed for fixed array // insert implicit * if needed for fixed array
t := n.Type() t := n.Type()
if t == nil || !t.IsPtr() { if t == nil || !t.IsPtr() {
@ -2287,7 +2287,7 @@ func implicitstar(n *ir.Node) *ir.Node {
return n return n
} }
func onearg(n *ir.Node, f string, args ...interface{}) bool { func onearg(n ir.Node, f string, args ...interface{}) bool {
if n.Left() != nil { if n.Left() != nil {
return true return true
} }
@ -2310,7 +2310,7 @@ func onearg(n *ir.Node, f string, args ...interface{}) bool {
return true return true
} }
func twoarg(n *ir.Node) bool { func twoarg(n ir.Node) bool {
if n.Left() != nil { if n.Left() != nil {
return true return true
} }
@ -2328,7 +2328,7 @@ func twoarg(n *ir.Node) bool {
return true return true
} }
func lookdot1(errnode *ir.Node, s *types.Sym, t *types.Type, fs *types.Fields, dostrcmp int) *types.Field { func lookdot1(errnode ir.Node, s *types.Sym, t *types.Type, fs *types.Fields, dostrcmp int) *types.Field {
var r *types.Field var r *types.Field
for _, f := range fs.Slice() { for _, f := range fs.Slice() {
if dostrcmp != 0 && f.Sym.Name == s.Name { if dostrcmp != 0 && f.Sym.Name == s.Name {
@ -2359,7 +2359,7 @@ func lookdot1(errnode *ir.Node, s *types.Sym, t *types.Type, fs *types.Fields, d
// typecheckMethodExpr checks selector expressions (ODOT) where the // typecheckMethodExpr checks selector expressions (ODOT) where the
// base expression is a type expression (OTYPE). // base expression is a type expression (OTYPE).
func typecheckMethodExpr(n *ir.Node) (res *ir.Node) { func typecheckMethodExpr(n ir.Node) (res ir.Node) {
if enableTrace && base.Flag.LowerT { if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckMethodExpr", n)(&res) defer tracePrint("typecheckMethodExpr", n)(&res)
} }
@ -2447,7 +2447,7 @@ func derefall(t *types.Type) *types.Type {
return t return t
} }
func lookdot(n *ir.Node, t *types.Type, dostrcmp int) *types.Field { func lookdot(n ir.Node, t *types.Type, dostrcmp int) *types.Field {
s := n.Sym() s := n.Sym()
dowidth(t) dowidth(t)
@ -2572,7 +2572,7 @@ func hasddd(t *types.Type) bool {
} }
// typecheck assignment: type list = expression list // typecheck assignment: type list = expression list
func typecheckaste(op ir.Op, call *ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes, desc func() string) { func typecheckaste(op ir.Op, call ir.Node, isddd bool, tstruct *types.Type, nl ir.Nodes, desc func() string) {
var t *types.Type var t *types.Type
var i int var i int
@ -2583,7 +2583,7 @@ func typecheckaste(op ir.Op, call *ir.Node, isddd bool, tstruct *types.Type, nl
return return
} }
var n *ir.Node var n ir.Node
if nl.Len() == 1 { if nl.Len() == 1 {
n = nl.First() n = nl.First()
} }
@ -2774,7 +2774,7 @@ func iscomptype(t *types.Type) bool {
// pushtype adds elided type information for composite literals if // pushtype adds elided type information for composite literals if
// appropriate, and returns the resulting expression. // appropriate, and returns the resulting expression.
func pushtype(n *ir.Node, t *types.Type) *ir.Node { func pushtype(n ir.Node, t *types.Type) ir.Node {
if n == nil || n.Op() != ir.OCOMPLIT || n.Right() != nil { if n == nil || n.Op() != ir.OCOMPLIT || n.Right() != nil {
return n return n
} }
@ -2797,7 +2797,7 @@ func pushtype(n *ir.Node, t *types.Type) *ir.Node {
// The result of typecheckcomplit MUST be assigned back to n, e.g. // The result of typecheckcomplit MUST be assigned back to n, e.g.
// n.Left = typecheckcomplit(n.Left) // n.Left = typecheckcomplit(n.Left)
func typecheckcomplit(n *ir.Node) (res *ir.Node) { func typecheckcomplit(n ir.Node) (res ir.Node) {
if enableTrace && base.Flag.LowerT { if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckcomplit", n)(&res) defer tracePrint("typecheckcomplit", n)(&res)
} }
@ -3008,7 +3008,7 @@ func typecheckcomplit(n *ir.Node) (res *ir.Node) {
} }
// typecheckarraylit type-checks a sequence of slice/array literal elements. // typecheckarraylit type-checks a sequence of slice/array literal elements.
func typecheckarraylit(elemType *types.Type, bound int64, elts []*ir.Node, ctx string) int64 { func typecheckarraylit(elemType *types.Type, bound int64, elts []ir.Node, ctx string) int64 {
// If there are key/value pairs, create a map to keep seen // If there are key/value pairs, create a map to keep seen
// keys so we can check for duplicate indices. // keys so we can check for duplicate indices.
var indices map[int64]bool var indices map[int64]bool
@ -3023,7 +3023,7 @@ func typecheckarraylit(elemType *types.Type, bound int64, elts []*ir.Node, ctx s
for i, elt := range elts { for i, elt := range elts {
setlineno(elt) setlineno(elt)
r := elts[i] r := elts[i]
var kv *ir.Node var kv ir.Node
if elt.Op() == ir.OKEY { if elt.Op() == ir.OKEY {
elt.SetLeft(typecheck(elt.Left(), ctxExpr)) elt.SetLeft(typecheck(elt.Left(), ctxExpr))
key = indexconst(elt.Left()) key = indexconst(elt.Left())
@ -3086,7 +3086,7 @@ func nonexported(sym *types.Sym) bool {
} }
// lvalue etc // lvalue etc
func islvalue(n *ir.Node) bool { func islvalue(n ir.Node) bool {
switch n.Op() { switch n.Op() {
case ir.OINDEX: case ir.OINDEX:
if n.Left().Type() != nil && n.Left().Type().IsArray() { if n.Left().Type() != nil && n.Left().Type().IsArray() {
@ -3112,13 +3112,13 @@ func islvalue(n *ir.Node) bool {
return false return false
} }
func checklvalue(n *ir.Node, verb string) { func checklvalue(n ir.Node, verb string) {
if !islvalue(n) { if !islvalue(n) {
base.Errorf("cannot %s %v", verb, n) base.Errorf("cannot %s %v", verb, n)
} }
} }
func checkassign(stmt *ir.Node, n *ir.Node) { func checkassign(stmt ir.Node, n ir.Node) {
// Variables declared in ORANGE are assigned on every iteration. // Variables declared in ORANGE are assigned on every iteration.
if n.Name() == nil || n.Name().Defn != stmt || stmt.Op() == ir.ORANGE { if n.Name() == nil || n.Name().Defn != stmt || stmt.Op() == ir.ORANGE {
r := outervalue(n) r := outervalue(n)
@ -3156,7 +3156,7 @@ func checkassign(stmt *ir.Node, n *ir.Node) {
n.SetType(nil) n.SetType(nil)
} }
func checkassignlist(stmt *ir.Node, l ir.Nodes) { func checkassignlist(stmt ir.Node, l ir.Nodes) {
for _, n := range l.Slice() { for _, n := range l.Slice() {
checkassign(stmt, n) checkassign(stmt, n)
} }
@ -3177,7 +3177,7 @@ func checkassignlist(stmt *ir.Node, l ir.Nodes) {
// currently OK, since the only place samesafeexpr gets used on an // currently OK, since the only place samesafeexpr gets used on an
// lvalue expression is for OSLICE and OAPPEND optimizations, and it // lvalue expression is for OSLICE and OAPPEND optimizations, and it
// is correct in those settings. // is correct in those settings.
func samesafeexpr(l *ir.Node, r *ir.Node) bool { func samesafeexpr(l ir.Node, r ir.Node) bool {
if l.Op() != r.Op() || !types.Identical(l.Type(), r.Type()) { if l.Op() != r.Op() || !types.Identical(l.Type(), r.Type()) {
return false return false
} }
@ -3215,7 +3215,7 @@ func samesafeexpr(l *ir.Node, r *ir.Node) bool {
// type check assignment. // type check assignment.
// if this assignment is the definition of a var on the left side, // if this assignment is the definition of a var on the left side,
// fill in the var's type. // fill in the var's type.
func typecheckas(n *ir.Node) { func typecheckas(n ir.Node) {
if enableTrace && base.Flag.LowerT { if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckas", n)(nil) defer tracePrint("typecheckas", n)(nil)
} }
@ -3266,14 +3266,14 @@ func typecheckas(n *ir.Node) {
} }
} }
func checkassignto(src *types.Type, dst *ir.Node) { func checkassignto(src *types.Type, dst ir.Node) {
if op, why := assignop(src, dst.Type()); op == ir.OXXX { if op, why := assignop(src, dst.Type()); op == ir.OXXX {
base.Errorf("cannot assign %v to %L in multiple assignment%s", src, dst, why) base.Errorf("cannot assign %v to %L in multiple assignment%s", src, dst, why)
return return
} }
} }
func typecheckas2(n *ir.Node) { func typecheckas2(n ir.Node) {
if enableTrace && base.Flag.LowerT { if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckas2", n)(nil) defer tracePrint("typecheckas2", n)(nil)
} }
@ -3298,8 +3298,8 @@ func typecheckas2(n *ir.Node) {
} }
checkassignlist(n, n.List()) checkassignlist(n, n.List())
var l *ir.Node var l ir.Node
var r *ir.Node var r ir.Node
if cl == cr { if cl == cr {
// easy // easy
ls := n.List().Slice() ls := n.List().Slice()
@ -3406,7 +3406,7 @@ out:
} }
// type check function definition // type check function definition
func typecheckfunc(n *ir.Node) { func typecheckfunc(n ir.Node) {
if enableTrace && base.Flag.LowerT { if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckfunc", n)(nil) defer tracePrint("typecheckfunc", n)(nil)
} }
@ -3441,12 +3441,12 @@ func typecheckfunc(n *ir.Node) {
// The result of stringtoruneslit MUST be assigned back to n, e.g. // The result of stringtoruneslit MUST be assigned back to n, e.g.
// n.Left = stringtoruneslit(n.Left) // n.Left = stringtoruneslit(n.Left)
func stringtoruneslit(n *ir.Node) *ir.Node { func stringtoruneslit(n ir.Node) ir.Node {
if n.Left().Op() != ir.OLITERAL || n.Left().Val().Kind() != constant.String { if n.Left().Op() != ir.OLITERAL || n.Left().Val().Kind() != constant.String {
base.Fatalf("stringtoarraylit %v", n) base.Fatalf("stringtoarraylit %v", n)
} }
var l []*ir.Node var l []ir.Node
i := 0 i := 0
for _, r := range n.Left().StringVal() { for _, r := range n.Left().StringVal() {
l = append(l, ir.Nod(ir.OKEY, nodintconst(int64(i)), nodintconst(int64(r)))) l = append(l, ir.Nod(ir.OKEY, nodintconst(int64(i)), nodintconst(int64(r))))
@ -3459,7 +3459,7 @@ func stringtoruneslit(n *ir.Node) *ir.Node {
return nn return nn
} }
var mapqueue []*ir.Node var mapqueue []ir.Node
func checkMapKeys() { func checkMapKeys() {
for _, n := range mapqueue { for _, n := range mapqueue {
@ -3520,7 +3520,7 @@ func setUnderlying(t, underlying *types.Type) {
} }
} }
func typecheckdeftype(n *ir.Node) { func typecheckdeftype(n ir.Node) {
if enableTrace && base.Flag.LowerT { if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckdeftype", n)(nil) defer tracePrint("typecheckdeftype", n)(nil)
} }
@ -3540,7 +3540,7 @@ func typecheckdeftype(n *ir.Node) {
} }
} }
func typecheckdef(n *ir.Node) { func typecheckdef(n ir.Node) {
if enableTrace && base.Flag.LowerT { if enableTrace && base.Flag.LowerT {
defer tracePrint("typecheckdef", n)(nil) defer tracePrint("typecheckdef", n)(nil)
} }
@ -3727,7 +3727,7 @@ ret:
n.SetWalkdef(1) n.SetWalkdef(1)
} }
func checkmake(t *types.Type, arg string, np **ir.Node) bool { func checkmake(t *types.Type, arg string, np *ir.Node) bool {
n := *np n := *np
if !n.Type().IsInteger() && n.Type().Etype != types.TIDEAL { if !n.Type().IsInteger() && n.Type().Etype != types.TIDEAL {
base.Errorf("non-integer %s argument in make(%v) - %v", arg, t, n.Type()) base.Errorf("non-integer %s argument in make(%v) - %v", arg, t, n.Type())
@ -3759,7 +3759,7 @@ func checkmake(t *types.Type, arg string, np **ir.Node) bool {
return true return true
} }
func markbreak(n *ir.Node, implicit *ir.Node) { func markbreak(n ir.Node, implicit ir.Node) {
if n == nil { if n == nil {
return return
} }
@ -3789,7 +3789,7 @@ func markbreak(n *ir.Node, implicit *ir.Node) {
} }
} }
func markbreaklist(l ir.Nodes, implicit *ir.Node) { func markbreaklist(l ir.Nodes, implicit ir.Node) {
s := l.Slice() s := l.Slice()
for i := 0; i < len(s); i++ { for i := 0; i < len(s); i++ {
n := s[i] n := s[i]
@ -3823,7 +3823,7 @@ func isTermNodes(l ir.Nodes) bool {
// Isterminating reports whether the node n, the last one in a // Isterminating reports whether the node n, the last one in a
// statement list, is a terminating statement. // statement list, is a terminating statement.
func isTermNode(n *ir.Node) bool { func isTermNode(n ir.Node) bool {
switch n.Op() { switch n.Op() {
// NOTE: OLABEL is treated as a separate statement, // NOTE: OLABEL is treated as a separate statement,
// not a separate prefix, so skipping to the last statement // not a separate prefix, so skipping to the last statement
@ -3872,7 +3872,7 @@ func isTermNode(n *ir.Node) bool {
} }
// checkreturn makes sure that fn terminates appropriately. // checkreturn makes sure that fn terminates appropriately.
func checkreturn(fn *ir.Node) { func checkreturn(fn ir.Node) {
if fn.Type().NumResults() != 0 && fn.Body().Len() != 0 { if fn.Type().NumResults() != 0 && fn.Body().Len() != 0 {
markbreaklist(fn.Body(), nil) markbreaklist(fn.Body(), nil)
if !isTermNodes(fn.Body()) { if !isTermNodes(fn.Body()) {
@ -3881,12 +3881,12 @@ func checkreturn(fn *ir.Node) {
} }
} }
func deadcode(fn *ir.Node) { func deadcode(fn ir.Node) {
deadcodeslice(fn.PtrBody()) deadcodeslice(fn.PtrBody())
deadcodefn(fn) deadcodefn(fn)
} }
func deadcodefn(fn *ir.Node) { func deadcodefn(fn ir.Node) {
if fn.Body().Len() == 0 { if fn.Body().Len() == 0 {
return return
} }
@ -3909,7 +3909,7 @@ func deadcodefn(fn *ir.Node) {
} }
} }
fn.PtrBody().Set([]*ir.Node{ir.Nod(ir.OEMPTY, nil, nil)}) fn.PtrBody().Set([]ir.Node{ir.Nod(ir.OEMPTY, nil, nil)})
} }
func deadcodeslice(nn *ir.Nodes) { func deadcodeslice(nn *ir.Nodes) {
@ -3965,7 +3965,7 @@ func deadcodeslice(nn *ir.Nodes) {
} }
} }
func deadcodeexpr(n *ir.Node) *ir.Node { func deadcodeexpr(n ir.Node) ir.Node {
// Perform dead-code elimination on short-circuited boolean // Perform dead-code elimination on short-circuited boolean
// expressions involving constants with the intent of // expressions involving constants with the intent of
// producing a constant 'if' condition. // producing a constant 'if' condition.
@ -3995,7 +3995,7 @@ func deadcodeexpr(n *ir.Node) *ir.Node {
} }
// setTypeNode sets n to an OTYPE node representing t. // setTypeNode sets n to an OTYPE node representing t.
func setTypeNode(n *ir.Node, t *types.Type) { func setTypeNode(n ir.Node, t *types.Type) {
n.SetOp(ir.OTYPE) n.SetOp(ir.OTYPE)
n.SetType(t) n.SetType(t)
n.Type().Nod = n n.Type().Nod = n
@ -4037,12 +4037,12 @@ func curpkg() *types.Pkg {
// MethodName returns the ONAME representing the method // MethodName returns the ONAME representing the method
// referenced by expression n, which must be a method selector, // referenced by expression n, which must be a method selector,
// method expression, or method value. // method expression, or method value.
func methodExprName(n *ir.Node) *ir.Node { func methodExprName(n ir.Node) ir.Node {
return ir.AsNode(methodExprFunc(n).Nname) return ir.AsNode(methodExprFunc(n).Nname)
} }
// MethodFunc is like MethodName, but returns the types.Field instead. // MethodFunc is like MethodName, but returns the types.Field instead.
func methodExprFunc(n *ir.Node) *types.Field { func methodExprFunc(n ir.Node) *types.Field {
switch n.Op() { switch n.Op() {
case ir.ODOTMETH, ir.OMETHEXPR: case ir.ODOTMETH, ir.OMETHEXPR:
return n.Opt().(*types.Field) return n.Opt().(*types.Field)

View file

@ -10,7 +10,7 @@ import (
) )
// evalunsafe evaluates a package unsafe operation and returns the result. // evalunsafe evaluates a package unsafe operation and returns the result.
func evalunsafe(n *ir.Node) int64 { func evalunsafe(n ir.Node) int64 {
switch n.Op() { switch n.Op() {
case ir.OALIGNOF, ir.OSIZEOF: case ir.OALIGNOF, ir.OSIZEOF:
n.SetLeft(typecheck(n.Left(), ctxExpr)) n.SetLeft(typecheck(n.Left(), ctxExpr))

View file

@ -22,7 +22,7 @@ import (
const tmpstringbufsize = 32 const tmpstringbufsize = 32
const zeroValSize = 1024 // must match value of runtime/map.go:maxZero const zeroValSize = 1024 // must match value of runtime/map.go:maxZero
func walk(fn *ir.Node) { func walk(fn ir.Node) {
Curfn = fn Curfn = fn
errorsBefore := base.Errors() errorsBefore := base.Errors()
@ -81,13 +81,13 @@ func walk(fn *ir.Node) {
} }
} }
func walkstmtlist(s []*ir.Node) { func walkstmtlist(s []ir.Node) {
for i := range s { for i := range s {
s[i] = walkstmt(s[i]) s[i] = walkstmt(s[i])
} }
} }
func paramoutheap(fn *ir.Node) bool { func paramoutheap(fn ir.Node) bool {
for _, ln := range fn.Func().Dcl { for _, ln := range fn.Func().Dcl {
switch ln.Class() { switch ln.Class() {
case ir.PPARAMOUT: case ir.PPARAMOUT:
@ -106,7 +106,7 @@ func paramoutheap(fn *ir.Node) bool {
// The result of walkstmt MUST be assigned back to n, e.g. // The result of walkstmt MUST be assigned back to n, e.g.
// n.Left = walkstmt(n.Left) // n.Left = walkstmt(n.Left)
func walkstmt(n *ir.Node) *ir.Node { func walkstmt(n ir.Node) ir.Node {
if n == nil { if n == nil {
return n return n
} }
@ -275,7 +275,7 @@ func walkstmt(n *ir.Node) *ir.Node {
if (Curfn.Type().FuncType().Outnamed && n.List().Len() > 1) || paramoutheap(Curfn) { if (Curfn.Type().FuncType().Outnamed && n.List().Len() > 1) || paramoutheap(Curfn) {
// assign to the function out parameters, // assign to the function out parameters,
// so that reorder3 can fix up conflicts // so that reorder3 can fix up conflicts
var rl []*ir.Node var rl []ir.Node
for _, ln := range Curfn.Func().Dcl { for _, ln := range Curfn.Func().Dcl {
cl := ln.Class() cl := ln.Class()
@ -308,7 +308,7 @@ func walkstmt(n *ir.Node) *ir.Node {
// For each return parameter (lhs), assign the corresponding result (rhs). // For each return parameter (lhs), assign the corresponding result (rhs).
lhs := Curfn.Type().Results() lhs := Curfn.Type().Results()
rhs := n.List().Slice() rhs := n.List().Slice()
res := make([]*ir.Node, lhs.NumFields()) res := make([]ir.Node, lhs.NumFields())
for i, nl := range lhs.FieldSlice() { for i, nl := range lhs.FieldSlice() {
nname := ir.AsNode(nl.Nname) nname := ir.AsNode(nl.Nname)
if isParamHeapCopy(nname) { if isParamHeapCopy(nname) {
@ -346,20 +346,20 @@ func walkstmt(n *ir.Node) *ir.Node {
// the types expressions are calculated. // the types expressions are calculated.
// compile-time constants are evaluated. // compile-time constants are evaluated.
// complex side effects like statements are appended to init // complex side effects like statements are appended to init
func walkexprlist(s []*ir.Node, init *ir.Nodes) { func walkexprlist(s []ir.Node, init *ir.Nodes) {
for i := range s { for i := range s {
s[i] = walkexpr(s[i], init) s[i] = walkexpr(s[i], init)
} }
} }
func walkexprlistsafe(s []*ir.Node, init *ir.Nodes) { func walkexprlistsafe(s []ir.Node, init *ir.Nodes) {
for i, n := range s { for i, n := range s {
s[i] = safeexpr(n, init) s[i] = safeexpr(n, init)
s[i] = walkexpr(s[i], init) s[i] = walkexpr(s[i], init)
} }
} }
func walkexprlistcheap(s []*ir.Node, init *ir.Nodes) { func walkexprlistcheap(s []ir.Node, init *ir.Nodes) {
for i, n := range s { for i, n := range s {
s[i] = cheapexpr(n, init) s[i] = cheapexpr(n, init)
s[i] = walkexpr(s[i], init) s[i] = walkexpr(s[i], init)
@ -413,7 +413,7 @@ func convFuncName(from, to *types.Type) (fnname string, needsaddr bool) {
// The result of walkexpr MUST be assigned back to n, e.g. // The result of walkexpr MUST be assigned back to n, e.g.
// n.Left = walkexpr(n.Left, init) // n.Left = walkexpr(n.Left, init)
func walkexpr(n *ir.Node, init *ir.Nodes) *ir.Node { func walkexpr(n ir.Node, init *ir.Nodes) ir.Node {
if n == nil { if n == nil {
return n return n
} }
@ -700,7 +700,7 @@ opswitch:
r := n.Right() r := n.Right()
walkexprlistsafe(n.List().Slice(), init) walkexprlistsafe(n.List().Slice(), init)
r.SetLeft(walkexpr(r.Left(), init)) r.SetLeft(walkexpr(r.Left(), init))
var n1 *ir.Node var n1 ir.Node
if ir.IsBlank(n.List().First()) { if ir.IsBlank(n.List().First()) {
n1 = nodnil() n1 = nodnil()
} else { } else {
@ -723,7 +723,7 @@ opswitch:
t := r.Left().Type() t := r.Left().Type()
fast := mapfast(t) fast := mapfast(t)
var key *ir.Node var key ir.Node
if fast != mapslow { if fast != mapslow {
// fast versions take key by value // fast versions take key by value
key = r.Right() key = r.Right()
@ -802,7 +802,7 @@ opswitch:
} }
// typeword generates the type word of the interface value. // typeword generates the type word of the interface value.
typeword := func() *ir.Node { typeword := func() ir.Node {
if toType.IsEmptyInterface() { if toType.IsEmptyInterface() {
return typename(fromType) return typename(fromType)
} }
@ -832,7 +832,7 @@ opswitch:
// Optimize convT2{E,I} for many cases in which T is not pointer-shaped, // Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
// by using an existing addressable value identical to n.Left // by using an existing addressable value identical to n.Left
// or creating one on the stack. // or creating one on the stack.
var value *ir.Node var value ir.Node
switch { switch {
case fromType.Size() == 0: case fromType.Size() == 0:
// n.Left is zero-sized. Use zerobase. // n.Left is zero-sized. Use zerobase.
@ -918,7 +918,7 @@ opswitch:
break break
} }
var tab *ir.Node var tab ir.Node
if fromType.IsInterface() { if fromType.IsInterface() {
// convI2I // convI2I
tab = typename(toType) tab = typename(toType)
@ -1208,7 +1208,7 @@ opswitch:
hint := n.Left() hint := n.Left()
// var h *hmap // var h *hmap
var h *ir.Node var h ir.Node
if n.Esc() == EscNone { if n.Esc() == EscNone {
// Allocate hmap on stack. // Allocate hmap on stack.
@ -1494,7 +1494,7 @@ opswitch:
// Allocate a [n]byte of the right size. // Allocate a [n]byte of the right size.
t := types.NewArray(types.Types[types.TUINT8], int64(len(sc))) t := types.NewArray(types.Types[types.TUINT8], int64(len(sc)))
var a *ir.Node var a ir.Node
if n.Esc() == EscNone && len(sc) <= int(maxImplicitStackVarSize) { if n.Esc() == EscNone && len(sc) <= int(maxImplicitStackVarSize) {
a = ir.Nod(ir.OADDR, temp(t), nil) a = ir.Nod(ir.OADDR, temp(t), nil)
} else { } else {
@ -1619,7 +1619,7 @@ func markTypeUsedInInterface(t *types.Type, from *obj.LSym) {
// markUsedIfaceMethod marks that an interface method is used in the current // markUsedIfaceMethod marks that an interface method is used in the current
// function. n is OCALLINTER node. // function. n is OCALLINTER node.
func markUsedIfaceMethod(n *ir.Node) { func markUsedIfaceMethod(n ir.Node) {
ityp := n.Left().Left().Type() ityp := n.Left().Left().Type()
tsym := typenamesym(ityp).Linksym() tsym := typenamesym(ityp).Linksym()
r := obj.Addrel(Curfn.Func().LSym) r := obj.Addrel(Curfn.Func().LSym)
@ -1678,7 +1678,7 @@ func rtconvfn(src, dst *types.Type) (param, result types.EType) {
} }
// TODO(josharian): combine this with its caller and simplify // TODO(josharian): combine this with its caller and simplify
func reduceSlice(n *ir.Node) *ir.Node { func reduceSlice(n ir.Node) ir.Node {
low, high, max := n.SliceBounds() low, high, max := n.SliceBounds()
if high != nil && high.Op() == ir.OLEN && samesafeexpr(n.Left(), high.Left()) { if high != nil && high.Op() == ir.OLEN && samesafeexpr(n.Left(), high.Left()) {
// Reduce x[i:len(x)] to x[i:]. // Reduce x[i:len(x)] to x[i:].
@ -1695,7 +1695,7 @@ func reduceSlice(n *ir.Node) *ir.Node {
return n return n
} }
func ascompatee1(l *ir.Node, r *ir.Node, init *ir.Nodes) *ir.Node { func ascompatee1(l ir.Node, r ir.Node, init *ir.Nodes) ir.Node {
// convas will turn map assigns into function calls, // convas will turn map assigns into function calls,
// making it impossible for reorder3 to work. // making it impossible for reorder3 to work.
n := ir.Nod(ir.OAS, l, r) n := ir.Nod(ir.OAS, l, r)
@ -1707,7 +1707,7 @@ func ascompatee1(l *ir.Node, r *ir.Node, init *ir.Nodes) *ir.Node {
return convas(n, init) return convas(n, init)
} }
func ascompatee(op ir.Op, nl, nr []*ir.Node, init *ir.Nodes) []*ir.Node { func ascompatee(op ir.Op, nl, nr []ir.Node, init *ir.Nodes) []ir.Node {
// check assign expression list to // check assign expression list to
// an expression list. called in // an expression list. called in
// expr-list = expr-list // expr-list = expr-list
@ -1720,7 +1720,7 @@ func ascompatee(op ir.Op, nl, nr []*ir.Node, init *ir.Nodes) []*ir.Node {
nr[i1] = safeexpr(nr[i1], init) nr[i1] = safeexpr(nr[i1], init)
} }
var nn []*ir.Node var nn []ir.Node
i := 0 i := 0
for ; i < len(nl); i++ { for ; i < len(nl); i++ {
if i >= len(nr) { if i >= len(nr) {
@ -1744,7 +1744,7 @@ func ascompatee(op ir.Op, nl, nr []*ir.Node, init *ir.Nodes) []*ir.Node {
} }
// fncall reports whether assigning an rvalue of type rt to an lvalue l might involve a function call. // fncall reports whether assigning an rvalue of type rt to an lvalue l might involve a function call.
func fncall(l *ir.Node, rt *types.Type) bool { func fncall(l ir.Node, rt *types.Type) bool {
if l.HasCall() || l.Op() == ir.OINDEXMAP { if l.HasCall() || l.Op() == ir.OINDEXMAP {
return true return true
} }
@ -1758,7 +1758,7 @@ func fncall(l *ir.Node, rt *types.Type) bool {
// check assign type list to // check assign type list to
// an expression list. called in // an expression list. called in
// expr-list = func() // expr-list = func()
func ascompatet(nl ir.Nodes, nr *types.Type) []*ir.Node { func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
if nl.Len() != nr.NumFields() { if nl.Len() != nr.NumFields() {
base.Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields()) base.Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
} }
@ -1800,8 +1800,8 @@ func ascompatet(nl ir.Nodes, nr *types.Type) []*ir.Node {
} }
// package all the arguments that match a ... T parameter into a []T. // package all the arguments that match a ... T parameter into a []T.
func mkdotargslice(typ *types.Type, args []*ir.Node) *ir.Node { func mkdotargslice(typ *types.Type, args []ir.Node) ir.Node {
var n *ir.Node var n ir.Node
if len(args) == 0 { if len(args) == 0 {
n = nodnil() n = nodnil()
n.SetType(typ) n.SetType(typ)
@ -1820,7 +1820,7 @@ func mkdotargslice(typ *types.Type, args []*ir.Node) *ir.Node {
// fixVariadicCall rewrites calls to variadic functions to use an // fixVariadicCall rewrites calls to variadic functions to use an
// explicit ... argument if one is not already present. // explicit ... argument if one is not already present.
func fixVariadicCall(call *ir.Node) { func fixVariadicCall(call ir.Node) {
fntype := call.Left().Type() fntype := call.Left().Type()
if !fntype.IsVariadic() || call.IsDDD() { if !fntype.IsVariadic() || call.IsDDD() {
return return
@ -1840,7 +1840,7 @@ func fixVariadicCall(call *ir.Node) {
call.SetIsDDD(true) call.SetIsDDD(true)
} }
func walkCall(n *ir.Node, init *ir.Nodes) { func walkCall(n ir.Node, init *ir.Nodes) {
if n.Rlist().Len() != 0 { if n.Rlist().Len() != 0 {
return // already walked return // already walked
} }
@ -1853,7 +1853,7 @@ func walkCall(n *ir.Node, init *ir.Nodes) {
// If this is a method call, add the receiver at the beginning of the args. // If this is a method call, add the receiver at the beginning of the args.
if n.Op() == ir.OCALLMETH { if n.Op() == ir.OCALLMETH {
withRecv := make([]*ir.Node, len(args)+1) withRecv := make([]ir.Node, len(args)+1)
withRecv[0] = n.Left().Left() withRecv[0] = n.Left().Left()
n.Left().SetLeft(nil) n.Left().SetLeft(nil)
copy(withRecv[1:], args) copy(withRecv[1:], args)
@ -1864,7 +1864,7 @@ func walkCall(n *ir.Node, init *ir.Nodes) {
// store that argument into a temporary variable, // store that argument into a temporary variable,
// to prevent that calls from clobbering arguments already on the stack. // to prevent that calls from clobbering arguments already on the stack.
// When instrumenting, all arguments might require function calls. // When instrumenting, all arguments might require function calls.
var tempAssigns []*ir.Node var tempAssigns []ir.Node
for i, arg := range args { for i, arg := range args {
updateHasCall(arg) updateHasCall(arg)
// Determine param type. // Determine param type.
@ -1894,14 +1894,14 @@ func walkCall(n *ir.Node, init *ir.Nodes) {
} }
// generate code for print // generate code for print
func walkprint(nn *ir.Node, init *ir.Nodes) *ir.Node { func walkprint(nn ir.Node, init *ir.Nodes) ir.Node {
// Hoist all the argument evaluation up before the lock. // Hoist all the argument evaluation up before the lock.
walkexprlistcheap(nn.List().Slice(), init) walkexprlistcheap(nn.List().Slice(), init)
// For println, add " " between elements and "\n" at the end. // For println, add " " between elements and "\n" at the end.
if nn.Op() == ir.OPRINTN { if nn.Op() == ir.OPRINTN {
s := nn.List().Slice() s := nn.List().Slice()
t := make([]*ir.Node, 0, len(s)*2) t := make([]ir.Node, 0, len(s)*2)
for i, n := range s { for i, n := range s {
if i != 0 { if i != 0 {
t = append(t, nodstr(" ")) t = append(t, nodstr(" "))
@ -1914,7 +1914,7 @@ func walkprint(nn *ir.Node, init *ir.Nodes) *ir.Node {
// Collapse runs of constant strings. // Collapse runs of constant strings.
s := nn.List().Slice() s := nn.List().Slice()
t := make([]*ir.Node, 0, len(s)) t := make([]ir.Node, 0, len(s))
for i := 0; i < len(s); { for i := 0; i < len(s); {
var strs []string var strs []string
for i < len(s) && ir.IsConst(s[i], constant.String) { for i < len(s) && ir.IsConst(s[i], constant.String) {
@ -1931,7 +1931,7 @@ func walkprint(nn *ir.Node, init *ir.Nodes) *ir.Node {
} }
nn.PtrList().Set(t) nn.PtrList().Set(t)
calls := []*ir.Node{mkcall("printlock", nil, init)} calls := []ir.Node{mkcall("printlock", nil, init)}
for i, n := range nn.List().Slice() { for i, n := range nn.List().Slice() {
if n.Op() == ir.OLITERAL { if n.Op() == ir.OLITERAL {
if n.Type() == types.UntypedRune { if n.Type() == types.UntypedRune {
@ -1956,7 +1956,7 @@ func walkprint(nn *ir.Node, init *ir.Nodes) *ir.Node {
continue continue
} }
var on *ir.Node var on ir.Node
switch n.Type().Etype { switch n.Type().Etype {
case types.TINTER: case types.TINTER:
if n.Type().IsEmptyInterface() { if n.Type().IsEmptyInterface() {
@ -2037,7 +2037,7 @@ func walkprint(nn *ir.Node, init *ir.Nodes) *ir.Node {
return r return r
} }
func callnew(t *types.Type) *ir.Node { func callnew(t *types.Type) ir.Node {
dowidth(t) dowidth(t)
n := ir.Nod(ir.ONEWOBJ, typename(t), nil) n := ir.Nod(ir.ONEWOBJ, typename(t), nil)
n.SetType(types.NewPtr(t)) n.SetType(types.NewPtr(t))
@ -2048,7 +2048,7 @@ func callnew(t *types.Type) *ir.Node {
// isReflectHeaderDataField reports whether l is an expression p.Data // isReflectHeaderDataField reports whether l is an expression p.Data
// where p has type reflect.SliceHeader or reflect.StringHeader. // where p has type reflect.SliceHeader or reflect.StringHeader.
func isReflectHeaderDataField(l *ir.Node) bool { func isReflectHeaderDataField(l ir.Node) bool {
if l.Type() != types.Types[types.TUINTPTR] { if l.Type() != types.Types[types.TUINTPTR] {
return false return false
} }
@ -2069,7 +2069,7 @@ func isReflectHeaderDataField(l *ir.Node) bool {
return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader" return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
} }
func convas(n *ir.Node, init *ir.Nodes) *ir.Node { func convas(n ir.Node, init *ir.Nodes) ir.Node {
if n.Op() != ir.OAS { if n.Op() != ir.OAS {
base.Fatalf("convas: not OAS %v", n.Op()) base.Fatalf("convas: not OAS %v", n.Op())
} }
@ -2107,11 +2107,11 @@ func convas(n *ir.Node, init *ir.Nodes) *ir.Node {
// be later use of an earlier lvalue. // be later use of an earlier lvalue.
// //
// function calls have been removed. // function calls have been removed.
func reorder3(all []*ir.Node) []*ir.Node { func reorder3(all []ir.Node) []ir.Node {
// If a needed expression may be affected by an // If a needed expression may be affected by an
// earlier assignment, make an early copy of that // earlier assignment, make an early copy of that
// expression and use the copy instead. // expression and use the copy instead.
var early []*ir.Node var early []ir.Node
var mapinit ir.Nodes var mapinit ir.Nodes
for i, n := range all { for i, n := range all {
@ -2166,7 +2166,7 @@ func reorder3(all []*ir.Node) []*ir.Node {
// replace *np with that temp. // replace *np with that temp.
// The result of reorder3save MUST be assigned back to n, e.g. // The result of reorder3save MUST be assigned back to n, e.g.
// n.Left = reorder3save(n.Left, all, i, early) // n.Left = reorder3save(n.Left, all, i, early)
func reorder3save(n *ir.Node, all []*ir.Node, i int, early *[]*ir.Node) *ir.Node { func reorder3save(n ir.Node, all []ir.Node, i int, early *[]ir.Node) ir.Node {
if !aliased(n, all[:i]) { if !aliased(n, all[:i]) {
return n return n
} }
@ -2180,7 +2180,7 @@ func reorder3save(n *ir.Node, all []*ir.Node, i int, early *[]*ir.Node) *ir.Node
// what's the outer value that a write to n affects? // what's the outer value that a write to n affects?
// outer value means containing struct or array. // outer value means containing struct or array.
func outervalue(n *ir.Node) *ir.Node { func outervalue(n ir.Node) ir.Node {
for { for {
switch n.Op() { switch n.Op() {
case ir.OXDOT: case ir.OXDOT:
@ -2201,7 +2201,7 @@ func outervalue(n *ir.Node) *ir.Node {
// Is it possible that the computation of r might be // Is it possible that the computation of r might be
// affected by assignments in all? // affected by assignments in all?
func aliased(r *ir.Node, all []*ir.Node) bool { func aliased(r ir.Node, all []ir.Node) bool {
if r == nil { if r == nil {
return false return false
} }
@ -2275,7 +2275,7 @@ func aliased(r *ir.Node, all []*ir.Node) bool {
// does the evaluation of n only refer to variables // does the evaluation of n only refer to variables
// whose addresses have not been taken? // whose addresses have not been taken?
// (and no other memory) // (and no other memory)
func varexpr(n *ir.Node) bool { func varexpr(n ir.Node) bool {
if n == nil { if n == nil {
return true return true
} }
@ -2327,7 +2327,7 @@ func varexpr(n *ir.Node) bool {
} }
// is the name l mentioned in r? // is the name l mentioned in r?
func vmatch2(l *ir.Node, r *ir.Node) bool { func vmatch2(l ir.Node, r ir.Node) bool {
if r == nil { if r == nil {
return false return false
} }
@ -2356,7 +2356,7 @@ func vmatch2(l *ir.Node, r *ir.Node) bool {
// is any name mentioned in l also mentioned in r? // is any name mentioned in l also mentioned in r?
// called by sinit.go // called by sinit.go
func vmatch1(l *ir.Node, r *ir.Node) bool { func vmatch1(l ir.Node, r ir.Node) bool {
// isolate all left sides // isolate all left sides
if l == nil || r == nil { if l == nil || r == nil {
return false return false
@ -2397,8 +2397,8 @@ func vmatch1(l *ir.Node, r *ir.Node) bool {
// paramstoheap returns code to allocate memory for heap-escaped parameters // paramstoheap returns code to allocate memory for heap-escaped parameters
// and to copy non-result parameters' values from the stack. // and to copy non-result parameters' values from the stack.
func paramstoheap(params *types.Type) []*ir.Node { func paramstoheap(params *types.Type) []ir.Node {
var nn []*ir.Node var nn []ir.Node
for _, t := range params.Fields().Slice() { for _, t := range params.Fields().Slice() {
v := ir.AsNode(t.Nname) v := ir.AsNode(t.Nname)
if v != nil && v.Sym() != nil && strings.HasPrefix(v.Sym().Name, "~r") { // unnamed result if v != nil && v.Sym() != nil && strings.HasPrefix(v.Sym().Name, "~r") { // unnamed result
@ -2451,8 +2451,8 @@ func zeroResults() {
// returnsfromheap returns code to copy values for heap-escaped parameters // returnsfromheap returns code to copy values for heap-escaped parameters
// back to the stack. // back to the stack.
func returnsfromheap(params *types.Type) []*ir.Node { func returnsfromheap(params *types.Type) []ir.Node {
var nn []*ir.Node var nn []ir.Node
for _, t := range params.Fields().Slice() { for _, t := range params.Fields().Slice() {
v := ir.AsNode(t.Nname) v := ir.AsNode(t.Nname)
if v == nil { if v == nil {
@ -2481,7 +2481,7 @@ func heapmoves() {
base.Pos = lno base.Pos = lno
} }
func vmkcall(fn *ir.Node, t *types.Type, init *ir.Nodes, va []*ir.Node) *ir.Node { func vmkcall(fn ir.Node, t *types.Type, init *ir.Nodes, va []ir.Node) ir.Node {
if fn.Type() == nil || fn.Type().Etype != types.TFUNC { if fn.Type() == nil || fn.Type().Etype != types.TFUNC {
base.Fatalf("mkcall %v %v", fn, fn.Type()) base.Fatalf("mkcall %v %v", fn, fn.Type())
} }
@ -2503,15 +2503,15 @@ func vmkcall(fn *ir.Node, t *types.Type, init *ir.Nodes, va []*ir.Node) *ir.Node
return r return r
} }
func mkcall(name string, t *types.Type, init *ir.Nodes, args ...*ir.Node) *ir.Node { func mkcall(name string, t *types.Type, init *ir.Nodes, args ...ir.Node) ir.Node {
return vmkcall(syslook(name), t, init, args) return vmkcall(syslook(name), t, init, args)
} }
func mkcall1(fn *ir.Node, t *types.Type, init *ir.Nodes, args ...*ir.Node) *ir.Node { func mkcall1(fn ir.Node, t *types.Type, init *ir.Nodes, args ...ir.Node) ir.Node {
return vmkcall(fn, t, init, args) return vmkcall(fn, t, init, args)
} }
func conv(n *ir.Node, t *types.Type) *ir.Node { func conv(n ir.Node, t *types.Type) ir.Node {
if types.Identical(n.Type(), t) { if types.Identical(n.Type(), t) {
return n return n
} }
@ -2523,7 +2523,7 @@ func conv(n *ir.Node, t *types.Type) *ir.Node {
// convnop converts node n to type t using the OCONVNOP op // convnop converts node n to type t using the OCONVNOP op
// and typechecks the result with ctxExpr. // and typechecks the result with ctxExpr.
func convnop(n *ir.Node, t *types.Type) *ir.Node { func convnop(n ir.Node, t *types.Type) ir.Node {
if types.Identical(n.Type(), t) { if types.Identical(n.Type(), t) {
return n return n
} }
@ -2536,7 +2536,7 @@ func convnop(n *ir.Node, t *types.Type) *ir.Node {
// byteindex converts n, which is byte-sized, to an int used to index into an array. // byteindex converts n, which is byte-sized, to an int used to index into an array.
// We cannot use conv, because we allow converting bool to int here, // We cannot use conv, because we allow converting bool to int here,
// which is forbidden in user code. // which is forbidden in user code.
func byteindex(n *ir.Node) *ir.Node { func byteindex(n ir.Node) ir.Node {
// We cannot convert from bool to int directly. // We cannot convert from bool to int directly.
// While converting from int8 to int is possible, it would yield // While converting from int8 to int is possible, it would yield
// the wrong result for negative values. // the wrong result for negative values.
@ -2552,7 +2552,7 @@ func byteindex(n *ir.Node) *ir.Node {
return n return n
} }
func chanfn(name string, n int, t *types.Type) *ir.Node { func chanfn(name string, n int, t *types.Type) ir.Node {
if !t.IsChan() { if !t.IsChan() {
base.Fatalf("chanfn %v", t) base.Fatalf("chanfn %v", t)
} }
@ -2568,7 +2568,7 @@ func chanfn(name string, n int, t *types.Type) *ir.Node {
return fn return fn
} }
func mapfn(name string, t *types.Type) *ir.Node { func mapfn(name string, t *types.Type) ir.Node {
if !t.IsMap() { if !t.IsMap() {
base.Fatalf("mapfn %v", t) base.Fatalf("mapfn %v", t)
} }
@ -2577,7 +2577,7 @@ func mapfn(name string, t *types.Type) *ir.Node {
return fn return fn
} }
func mapfndel(name string, t *types.Type) *ir.Node { func mapfndel(name string, t *types.Type) ir.Node {
if !t.IsMap() { if !t.IsMap() {
base.Fatalf("mapfn %v", t) base.Fatalf("mapfn %v", t)
} }
@ -2636,13 +2636,13 @@ func mapfast(t *types.Type) int {
return mapslow return mapslow
} }
func writebarrierfn(name string, l *types.Type, r *types.Type) *ir.Node { func writebarrierfn(name string, l *types.Type, r *types.Type) ir.Node {
fn := syslook(name) fn := syslook(name)
fn = substArgTypes(fn, l, r) fn = substArgTypes(fn, l, r)
return fn return fn
} }
func addstr(n *ir.Node, init *ir.Nodes) *ir.Node { func addstr(n ir.Node, init *ir.Nodes) ir.Node {
// order.expr rewrote OADDSTR to have a list of strings. // order.expr rewrote OADDSTR to have a list of strings.
c := n.List().Len() c := n.List().Len()
@ -2668,7 +2668,7 @@ func addstr(n *ir.Node, init *ir.Nodes) *ir.Node {
} }
// build list of string arguments // build list of string arguments
args := []*ir.Node{buf} args := []ir.Node{buf}
for _, n2 := range n.List().Slice() { for _, n2 := range n.List().Slice() {
args = append(args, conv(n2, types.Types[types.TSTRING])) args = append(args, conv(n2, types.Types[types.TSTRING]))
} }
@ -2688,7 +2688,7 @@ func addstr(n *ir.Node, init *ir.Nodes) *ir.Node {
prealloc[slice] = prealloc[n] prealloc[slice] = prealloc[n]
} }
slice.PtrList().Set(args[1:]) // skip buf arg slice.PtrList().Set(args[1:]) // skip buf arg
args = []*ir.Node{buf, slice} args = []ir.Node{buf, slice}
slice.SetEsc(EscNone) slice.SetEsc(EscNone)
} }
@ -2702,7 +2702,7 @@ func addstr(n *ir.Node, init *ir.Nodes) *ir.Node {
return r return r
} }
func walkAppendArgs(n *ir.Node, init *ir.Nodes) { func walkAppendArgs(n ir.Node, init *ir.Nodes) {
walkexprlistsafe(n.List().Slice(), init) walkexprlistsafe(n.List().Slice(), init)
// walkexprlistsafe will leave OINDEX (s[n]) alone if both s // walkexprlistsafe will leave OINDEX (s[n]) alone if both s
@ -2728,7 +2728,7 @@ func walkAppendArgs(n *ir.Node, init *ir.Nodes) {
// s // s
// //
// l2 is allowed to be a string. // l2 is allowed to be a string.
func appendslice(n *ir.Node, init *ir.Nodes) *ir.Node { func appendslice(n ir.Node, init *ir.Nodes) ir.Node {
walkAppendArgs(n, init) walkAppendArgs(n, init)
l1 := n.List().First() l1 := n.List().First()
@ -2768,7 +2768,7 @@ func appendslice(n *ir.Node, init *ir.Nodes) *ir.Node {
nt.SetBounded(true) nt.SetBounded(true)
nodes.Append(ir.Nod(ir.OAS, s, nt)) nodes.Append(ir.Nod(ir.OAS, s, nt))
var ncopy *ir.Node var ncopy ir.Node
if elemtype.HasPointers() { if elemtype.HasPointers() {
// copy(s[len(l1):], l2) // copy(s[len(l1):], l2)
nptr1 := ir.Nod(ir.OSLICE, s, nil) nptr1 := ir.Nod(ir.OSLICE, s, nil)
@ -2828,7 +2828,7 @@ func appendslice(n *ir.Node, init *ir.Nodes) *ir.Node {
// isAppendOfMake reports whether n is of the form append(x , make([]T, y)...). // isAppendOfMake reports whether n is of the form append(x , make([]T, y)...).
// isAppendOfMake assumes n has already been typechecked. // isAppendOfMake assumes n has already been typechecked.
func isAppendOfMake(n *ir.Node) bool { func isAppendOfMake(n ir.Node) bool {
if base.Flag.N != 0 || instrumenting { if base.Flag.N != 0 || instrumenting {
return false return false
} }
@ -2887,7 +2887,7 @@ func isAppendOfMake(n *ir.Node) bool {
// } // }
// } // }
// s // s
func extendslice(n *ir.Node, init *ir.Nodes) *ir.Node { func extendslice(n ir.Node, init *ir.Nodes) ir.Node {
// isAppendOfMake made sure all possible positive values of l2 fit into an uint. // isAppendOfMake made sure all possible positive values of l2 fit into an uint.
// The case of l2 overflow when converting from e.g. uint to int is handled by an explicit // The case of l2 overflow when converting from e.g. uint to int is handled by an explicit
// check of l2 < 0 at runtime which is generated below. // check of l2 < 0 at runtime which is generated below.
@ -2900,7 +2900,7 @@ func extendslice(n *ir.Node, init *ir.Nodes) *ir.Node {
l1 := n.List().First() l1 := n.List().First()
l2 = n.List().Second() // re-read l2, as it may have been updated by walkAppendArgs l2 = n.List().Second() // re-read l2, as it may have been updated by walkAppendArgs
var nodes []*ir.Node var nodes []ir.Node
// if l2 >= 0 (likely happens), do nothing // if l2 >= 0 (likely happens), do nothing
nifneg := ir.Nod(ir.OIF, ir.Nod(ir.OGE, l2, nodintconst(0)), nil) nifneg := ir.Nod(ir.OIF, ir.Nod(ir.OGE, l2, nodintconst(0)), nil)
@ -3006,7 +3006,7 @@ func extendslice(n *ir.Node, init *ir.Nodes) *ir.Node {
// ... // ...
// } // }
// s // s
func walkappend(n *ir.Node, init *ir.Nodes, dst *ir.Node) *ir.Node { func walkappend(n ir.Node, init *ir.Nodes, dst ir.Node) ir.Node {
if !samesafeexpr(dst, n.List().First()) { if !samesafeexpr(dst, n.List().First()) {
n.List().SetFirst(safeexpr(n.List().First(), init)) n.List().SetFirst(safeexpr(n.List().First(), init))
n.List().SetFirst(walkexpr(n.List().First(), init)) n.List().SetFirst(walkexpr(n.List().First(), init))
@ -3042,7 +3042,7 @@ func walkappend(n *ir.Node, init *ir.Nodes, dst *ir.Node) *ir.Node {
return n return n
} }
var l []*ir.Node var l []ir.Node
ns := temp(nsrc.Type()) ns := temp(nsrc.Type())
l = append(l, ir.Nod(ir.OAS, ns, nsrc)) // s = src l = append(l, ir.Nod(ir.OAS, ns, nsrc)) // s = src
@ -3095,7 +3095,7 @@ func walkappend(n *ir.Node, init *ir.Nodes, dst *ir.Node) *ir.Node {
// //
// Also works if b is a string. // Also works if b is a string.
// //
func copyany(n *ir.Node, init *ir.Nodes, runtimecall bool) *ir.Node { func copyany(n ir.Node, init *ir.Nodes, runtimecall bool) ir.Node {
if n.Left().Type().Elem().HasPointers() { if n.Left().Type().Elem().HasPointers() {
Curfn.Func().SetWBPos(n.Pos()) Curfn.Func().SetWBPos(n.Pos())
fn := writebarrierfn("typedslicecopy", n.Left().Type().Elem(), n.Right().Type().Elem()) fn := writebarrierfn("typedslicecopy", n.Left().Type().Elem(), n.Right().Type().Elem())
@ -3126,7 +3126,7 @@ func copyany(n *ir.Node, init *ir.Nodes, runtimecall bool) *ir.Node {
n.SetRight(walkexpr(n.Right(), init)) n.SetRight(walkexpr(n.Right(), init))
nl := temp(n.Left().Type()) nl := temp(n.Left().Type())
nr := temp(n.Right().Type()) nr := temp(n.Right().Type())
var l []*ir.Node var l []ir.Node
l = append(l, ir.Nod(ir.OAS, nl, n.Left())) l = append(l, ir.Nod(ir.OAS, nl, n.Left()))
l = append(l, ir.Nod(ir.OAS, nr, n.Right())) l = append(l, ir.Nod(ir.OAS, nr, n.Right()))
@ -3165,7 +3165,7 @@ func copyany(n *ir.Node, init *ir.Nodes, runtimecall bool) *ir.Node {
return nlen return nlen
} }
func eqfor(t *types.Type) (n *ir.Node, needsize bool) { func eqfor(t *types.Type) (n ir.Node, needsize bool) {
// Should only arrive here with large memory or // Should only arrive here with large memory or
// a struct/array containing a non-memory field/element. // a struct/array containing a non-memory field/element.
// Small memory is handled inline, and single non-memory // Small memory is handled inline, and single non-memory
@ -3179,10 +3179,10 @@ func eqfor(t *types.Type) (n *ir.Node, needsize bool) {
sym := typesymprefix(".eq", t) sym := typesymprefix(".eq", t)
n := NewName(sym) n := NewName(sym)
setNodeNameFunc(n) setNodeNameFunc(n)
n.SetType(functype(nil, []*ir.Node{ n.SetType(functype(nil, []ir.Node{
anonfield(types.NewPtr(t)), anonfield(types.NewPtr(t)),
anonfield(types.NewPtr(t)), anonfield(types.NewPtr(t)),
}, []*ir.Node{ }, []ir.Node{
anonfield(types.Types[types.TBOOL]), anonfield(types.Types[types.TBOOL]),
})) }))
return n, false return n, false
@ -3193,7 +3193,7 @@ func eqfor(t *types.Type) (n *ir.Node, needsize bool) {
// The result of walkcompare MUST be assigned back to n, e.g. // The result of walkcompare MUST be assigned back to n, e.g.
// n.Left = walkcompare(n.Left, init) // n.Left = walkcompare(n.Left, init)
func walkcompare(n *ir.Node, init *ir.Nodes) *ir.Node { func walkcompare(n ir.Node, init *ir.Nodes) ir.Node {
if n.Left().Type().IsInterface() && n.Right().Type().IsInterface() && n.Left().Op() != ir.ONIL && n.Right().Op() != ir.ONIL { if n.Left().Type().IsInterface() && n.Right().Type().IsInterface() && n.Left().Op() != ir.ONIL && n.Right().Op() != ir.ONIL {
return walkcompareInterface(n, init) return walkcompareInterface(n, init)
} }
@ -3228,7 +3228,7 @@ func walkcompare(n *ir.Node, init *ir.Nodes) *ir.Node {
// l.tab == type(r) // l.tab == type(r)
// For non-empty interface, this is: // For non-empty interface, this is:
// l.tab != nil && l.tab._type == type(r) // l.tab != nil && l.tab._type == type(r)
var eqtype *ir.Node var eqtype ir.Node
tab := ir.Nod(ir.OITAB, l, nil) tab := ir.Nod(ir.OITAB, l, nil)
rtyp := typename(r.Type()) rtyp := typename(r.Type())
if l.Type().IsEmptyInterface() { if l.Type().IsEmptyInterface() {
@ -3354,8 +3354,8 @@ func walkcompare(n *ir.Node, init *ir.Nodes) *ir.Node {
if n.Op() == ir.ONE { if n.Op() == ir.ONE {
andor = ir.OOROR andor = ir.OOROR
} }
var expr *ir.Node var expr ir.Node
compare := func(el, er *ir.Node) { compare := func(el, er ir.Node) {
a := ir.Nod(n.Op(), el, er) a := ir.Nod(n.Op(), el, er)
if expr == nil { if expr == nil {
expr = a expr = a
@ -3447,7 +3447,7 @@ func walkcompare(n *ir.Node, init *ir.Nodes) *ir.Node {
return n return n
} }
func tracecmpArg(n *ir.Node, t *types.Type, init *ir.Nodes) *ir.Node { func tracecmpArg(n ir.Node, t *types.Type, init *ir.Nodes) ir.Node {
// Ugly hack to avoid "constant -1 overflows uintptr" errors, etc. // Ugly hack to avoid "constant -1 overflows uintptr" errors, etc.
if n.Op() == ir.OLITERAL && n.Type().IsSigned() && n.Int64Val() < 0 { if n.Op() == ir.OLITERAL && n.Type().IsSigned() && n.Int64Val() < 0 {
n = copyexpr(n, n.Type(), init) n = copyexpr(n, n.Type(), init)
@ -3456,11 +3456,11 @@ func tracecmpArg(n *ir.Node, t *types.Type, init *ir.Nodes) *ir.Node {
return conv(n, t) return conv(n, t)
} }
func walkcompareInterface(n *ir.Node, init *ir.Nodes) *ir.Node { func walkcompareInterface(n ir.Node, init *ir.Nodes) ir.Node {
n.SetRight(cheapexpr(n.Right(), init)) n.SetRight(cheapexpr(n.Right(), init))
n.SetLeft(cheapexpr(n.Left(), init)) n.SetLeft(cheapexpr(n.Left(), init))
eqtab, eqdata := eqinterface(n.Left(), n.Right()) eqtab, eqdata := eqinterface(n.Left(), n.Right())
var cmp *ir.Node var cmp ir.Node
if n.Op() == ir.OEQ { if n.Op() == ir.OEQ {
cmp = ir.Nod(ir.OANDAND, eqtab, eqdata) cmp = ir.Nod(ir.OANDAND, eqtab, eqdata)
} else { } else {
@ -3470,9 +3470,9 @@ func walkcompareInterface(n *ir.Node, init *ir.Nodes) *ir.Node {
return finishcompare(n, cmp, init) return finishcompare(n, cmp, init)
} }
func walkcompareString(n *ir.Node, init *ir.Nodes) *ir.Node { func walkcompareString(n ir.Node, init *ir.Nodes) ir.Node {
// Rewrite comparisons to short constant strings as length+byte-wise comparisons. // Rewrite comparisons to short constant strings as length+byte-wise comparisons.
var cs, ncs *ir.Node // const string, non-const string var cs, ncs ir.Node // const string, non-const string
switch { switch {
case ir.IsConst(n.Left(), constant.String) && ir.IsConst(n.Right(), constant.String): case ir.IsConst(n.Left(), constant.String) && ir.IsConst(n.Right(), constant.String):
// ignore; will be constant evaluated // ignore; will be constant evaluated
@ -3570,7 +3570,7 @@ func walkcompareString(n *ir.Node, init *ir.Nodes) *ir.Node {
} }
} }
var r *ir.Node var r ir.Node
if n.Op() == ir.OEQ || n.Op() == ir.ONE { if n.Op() == ir.OEQ || n.Op() == ir.ONE {
// prepare for rewrite below // prepare for rewrite below
n.SetLeft(cheapexpr(n.Left(), init)) n.SetLeft(cheapexpr(n.Left(), init))
@ -3597,7 +3597,7 @@ func walkcompareString(n *ir.Node, init *ir.Nodes) *ir.Node {
// The result of finishcompare MUST be assigned back to n, e.g. // The result of finishcompare MUST be assigned back to n, e.g.
// n.Left = finishcompare(n.Left, x, r, init) // n.Left = finishcompare(n.Left, x, r, init)
func finishcompare(n, r *ir.Node, init *ir.Nodes) *ir.Node { func finishcompare(n, r ir.Node, init *ir.Nodes) ir.Node {
r = typecheck(r, ctxExpr) r = typecheck(r, ctxExpr)
r = conv(r, n.Type()) r = conv(r, n.Type())
r = walkexpr(r, init) r = walkexpr(r, init)
@ -3605,7 +3605,7 @@ func finishcompare(n, r *ir.Node, init *ir.Nodes) *ir.Node {
} }
// return 1 if integer n must be in range [0, max), 0 otherwise // return 1 if integer n must be in range [0, max), 0 otherwise
func bounded(n *ir.Node, max int64) bool { func bounded(n ir.Node, max int64) bool {
if n.Type() == nil || !n.Type().IsInteger() { if n.Type() == nil || !n.Type().IsInteger() {
return false return false
} }
@ -3672,7 +3672,7 @@ func bounded(n *ir.Node, max int64) bool {
} }
// usemethod checks interface method calls for uses of reflect.Type.Method. // usemethod checks interface method calls for uses of reflect.Type.Method.
func usemethod(n *ir.Node) { func usemethod(n ir.Node) {
t := n.Left().Type() t := n.Left().Type()
// Looking for either of: // Looking for either of:
@ -3717,7 +3717,7 @@ func usemethod(n *ir.Node) {
} }
} }
func usefield(n *ir.Node) { func usefield(n ir.Node) {
if objabi.Fieldtrack_enabled == 0 { if objabi.Fieldtrack_enabled == 0 {
return return
} }
@ -3777,7 +3777,7 @@ func candiscardlist(l ir.Nodes) bool {
return true return true
} }
func candiscard(n *ir.Node) bool { func candiscard(n ir.Node) bool {
if n == nil { if n == nil {
return true return true
} }
@ -3891,7 +3891,7 @@ var wrapCall_prgen int
// The result of wrapCall MUST be assigned back to n, e.g. // The result of wrapCall MUST be assigned back to n, e.g.
// n.Left = wrapCall(n.Left, init) // n.Left = wrapCall(n.Left, init)
func wrapCall(n *ir.Node, init *ir.Nodes) *ir.Node { func wrapCall(n ir.Node, init *ir.Nodes) ir.Node {
if n.Init().Len() != 0 { if n.Init().Len() != 0 {
walkstmtlist(n.Init().Slice()) walkstmtlist(n.Init().Slice())
init.AppendNodes(n.PtrInit()) init.AppendNodes(n.PtrInit())
@ -3909,7 +3909,7 @@ func wrapCall(n *ir.Node, init *ir.Nodes) *ir.Node {
} }
// origArgs keeps track of what argument is uintptr-unsafe/unsafe-uintptr conversion. // origArgs keeps track of what argument is uintptr-unsafe/unsafe-uintptr conversion.
origArgs := make([]*ir.Node, n.List().Len()) origArgs := make([]ir.Node, n.List().Len())
t := ir.Nod(ir.OTFUNC, nil, nil) t := ir.Nod(ir.OTFUNC, nil, nil)
for i, arg := range n.List().Slice() { for i, arg := range n.List().Slice() {
s := lookupN("a", i) s := lookupN("a", i)
@ -3962,7 +3962,7 @@ func wrapCall(n *ir.Node, init *ir.Nodes) *ir.Node {
// type syntax expression n.Type. // type syntax expression n.Type.
// The result of substArgTypes MUST be assigned back to old, e.g. // The result of substArgTypes MUST be assigned back to old, e.g.
// n.Left = substArgTypes(n.Left, t1, t2) // n.Left = substArgTypes(n.Left, t1, t2)
func substArgTypes(old *ir.Node, types_ ...*types.Type) *ir.Node { func substArgTypes(old ir.Node, types_ ...*types.Type) ir.Node {
n := ir.Copy(old) n := ir.Copy(old)
for _, t := range types_ { for _, t := range types_ {
@ -3992,11 +3992,11 @@ func canMergeLoads() bool {
// isRuneCount reports whether n is of the form len([]rune(string)). // isRuneCount reports whether n is of the form len([]rune(string)).
// These are optimized into a call to runtime.countrunes. // These are optimized into a call to runtime.countrunes.
func isRuneCount(n *ir.Node) bool { func isRuneCount(n ir.Node) bool {
return base.Flag.N == 0 && !instrumenting && n.Op() == ir.OLEN && n.Left().Op() == ir.OSTR2RUNES return base.Flag.N == 0 && !instrumenting && n.Op() == ir.OLEN && n.Left().Op() == ir.OSTR2RUNES
} }
func walkCheckPtrAlignment(n *ir.Node, init *ir.Nodes, count *ir.Node) *ir.Node { func walkCheckPtrAlignment(n ir.Node, init *ir.Nodes, count ir.Node) ir.Node {
if !n.Type().IsPtr() { if !n.Type().IsPtr() {
base.Fatalf("expected pointer type: %v", n.Type()) base.Fatalf("expected pointer type: %v", n.Type())
} }
@ -4024,7 +4024,7 @@ func walkCheckPtrAlignment(n *ir.Node, init *ir.Nodes, count *ir.Node) *ir.Node
var walkCheckPtrArithmeticMarker byte var walkCheckPtrArithmeticMarker byte
func walkCheckPtrArithmetic(n *ir.Node, init *ir.Nodes) *ir.Node { func walkCheckPtrArithmetic(n ir.Node, init *ir.Nodes) ir.Node {
// Calling cheapexpr(n, init) below leads to a recursive call // Calling cheapexpr(n, init) below leads to a recursive call
// to walkexpr, which leads us back here again. Use n.Opt to // to walkexpr, which leads us back here again. Use n.Opt to
// prevent infinite loops. // prevent infinite loops.
@ -4055,9 +4055,9 @@ func walkCheckPtrArithmetic(n *ir.Node, init *ir.Nodes) *ir.Node {
// "It is valid both to add and to subtract offsets from a // "It is valid both to add and to subtract offsets from a
// pointer in this way. It is also valid to use &^ to round // pointer in this way. It is also valid to use &^ to round
// pointers, usually for alignment." // pointers, usually for alignment."
var originals []*ir.Node var originals []ir.Node
var walk func(n *ir.Node) var walk func(n ir.Node)
walk = func(n *ir.Node) { walk = func(n ir.Node) {
switch n.Op() { switch n.Op() {
case ir.OADD: case ir.OADD:
walk(n.Left()) walk(n.Left())
@ -4088,6 +4088,6 @@ func walkCheckPtrArithmetic(n *ir.Node, init *ir.Nodes) *ir.Node {
// checkPtr reports whether pointer checking should be enabled for // checkPtr reports whether pointer checking should be enabled for
// function fn at a given level. See debugHelpFooter for defined // function fn at a given level. See debugHelpFooter for defined
// levels. // levels.
func checkPtr(fn *ir.Node, level int) bool { func checkPtr(fn ir.Node, level int) bool {
return base.Debug.Checkptr >= level && fn.Func().Pragma&ir.NoCheckPtr == 0 return base.Debug.Checkptr >= level && fn.Func().Pragma&ir.NoCheckPtr == 0
} }

View file

@ -200,7 +200,7 @@ func (p *dumper) dump(x reflect.Value, depth int) {
typ := x.Type() typ := x.Type()
isNode := false isNode := false
if n, ok := x.Interface().(Node); ok { if n, ok := x.Interface().(node); ok {
isNode = true isNode = true
p.printf("%s %s {", n.op.String(), p.addr(x)) p.printf("%s %s {", n.op.String(), p.addr(x))
} else { } else {

View file

@ -243,7 +243,7 @@ func (o Op) oconv(s fmt.State, flag FmtFlag, mode FmtMode) {
type FmtMode int type FmtMode int
type fmtNode struct { type fmtNode struct {
x *Node x Node
m FmtMode m FmtMode
} }
@ -277,11 +277,11 @@ type fmtNodes struct {
func (f *fmtNodes) Format(s fmt.State, verb rune) { f.x.format(s, verb, f.m) } func (f *fmtNodes) Format(s fmt.State, verb rune) { f.x.format(s, verb, f.m) }
func (n *Node) Format(s fmt.State, verb rune) { func (n *node) Format(s fmt.State, verb rune) {
FmtNode(n, s, verb) FmtNode(n, s, verb)
} }
func FmtNode(n *Node, s fmt.State, verb rune) { func FmtNode(n Node, s fmt.State, verb rune) {
nodeFormat(n, s, verb, FErr) nodeFormat(n, s, verb, FErr)
} }
@ -311,7 +311,7 @@ func (m FmtMode) prepareArgs(args []interface{}) {
switch arg := arg.(type) { switch arg := arg.(type) {
case Op: case Op:
args[i] = &fmtOp{arg, m} args[i] = &fmtOp{arg, m}
case *Node: case Node:
args[i] = &fmtNode{arg, m} args[i] = &fmtNode{arg, m}
case nil: case nil:
args[i] = &fmtNode{nil, m} // assume this was a node interface args[i] = &fmtNode{nil, m} // assume this was a node interface
@ -329,7 +329,7 @@ func (m FmtMode) prepareArgs(args []interface{}) {
} }
} }
func nodeFormat(n *Node, s fmt.State, verb rune, mode FmtMode) { func nodeFormat(n Node, s fmt.State, verb rune, mode FmtMode) {
switch verb { switch verb {
case 'v', 'S', 'L': case 'v', 'S', 'L':
nconvFmt(n, s, fmtFlag(s, verb), mode) nconvFmt(n, s, fmtFlag(s, verb), mode)
@ -343,10 +343,10 @@ func nodeFormat(n *Node, s fmt.State, verb rune, mode FmtMode) {
} }
// EscFmt is set by the escape analysis code to add escape analysis details to the node print. // EscFmt is set by the escape analysis code to add escape analysis details to the node print.
var EscFmt func(n *Node, short bool) string var EscFmt func(n Node, short bool) string
// *Node details // *Node details
func jconvFmt(n *Node, s fmt.State, flag FmtFlag) { func jconvFmt(n Node, s fmt.State, flag FmtFlag) {
short := flag&FmtShort != 0 short := flag&FmtShort != 0
// Useful to see which nodes in an AST printout are actually identical // Useful to see which nodes in an AST printout are actually identical
@ -894,7 +894,7 @@ func StmtWithInit(op Op) bool {
return false return false
} }
func stmtFmt(n *Node, s fmt.State, mode FmtMode) { func stmtFmt(n Node, s fmt.State, mode FmtMode) {
// some statements allow for an init, but at most one, // some statements allow for an init, but at most one,
// but we may have an arbitrary number added, eg by typecheck // but we may have an arbitrary number added, eg by typecheck
// and inlining. If it doesn't fit the syntax, emit an enclosing // and inlining. If it doesn't fit the syntax, emit an enclosing
@ -1194,7 +1194,7 @@ var OpPrec = []int{
OEND: 0, OEND: 0,
} }
func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) { func exprFmt(n Node, s fmt.State, prec int, mode FmtMode) {
for n != nil && n.Implicit() && (n.Op() == ODEREF || n.Op() == OADDR) { for n != nil && n.Implicit() && (n.Op() == ODEREF || n.Op() == OADDR) {
n = n.Left() n = n.Left()
} }
@ -1556,7 +1556,7 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
} }
} }
func nodeFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) { func nodeFmt(n Node, s fmt.State, flag FmtFlag, mode FmtMode) {
t := n.Type() t := n.Type()
// We almost always want the original. // We almost always want the original.
@ -1586,7 +1586,7 @@ func nodeFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) {
exprFmt(n, s, 0, mode) exprFmt(n, s, 0, mode)
} }
func nodeDumpFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) { func nodeDumpFmt(n Node, s fmt.State, flag FmtFlag, mode FmtMode) {
recur := flag&FmtShort == 0 recur := flag&FmtShort == 0
if recur { if recur {
@ -1794,12 +1794,12 @@ func typeFormat(t *types.Type, s fmt.State, verb rune, mode FmtMode) {
} }
} }
func (n *Node) String() string { return fmt.Sprint(n) } func (n *node) String() string { return fmt.Sprint(n) }
func modeString(n *Node, mode FmtMode) string { return mode.Sprint(n) } func modeString(n Node, mode FmtMode) string { return mode.Sprint(n) }
// "%L" suffix with "(type %T)" where possible // "%L" suffix with "(type %T)" where possible
// "%+S" in debug mode, don't recurse, no multiline output // "%+S" in debug mode, don't recurse, no multiline output
func nconvFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) { func nconvFmt(n Node, s fmt.State, flag FmtFlag, mode FmtMode) {
if n == nil { if n == nil {
fmt.Fprint(s, "<N>") fmt.Fprint(s, "<N>")
return return
@ -1866,7 +1866,7 @@ func FDumpList(w io.Writer, s string, l Nodes) {
fmt.Fprintf(w, "%s%+v\n", s, l) fmt.Fprintf(w, "%s%+v\n", s, l)
} }
func Dump(s string, n *Node) { func Dump(s string, n Node) {
fmt.Printf("%s [%p]%+v\n", s, n, n) fmt.Printf("%s [%p]%+v\n", s, n, n)
} }
@ -1911,6 +1911,6 @@ func InstallTypeFormats() {
// Line returns n's position as a string. If n has been inlined, // Line returns n's position as a string. If n has been inlined,
// it uses the outermost position where n has been inlined. // it uses the outermost position where n has been inlined.
func Line(n *Node) string { func Line(n Node) string {
return base.FmtPos(n.Pos()) return base.FmtPos(n.Pos())
} }

View file

@ -20,7 +20,7 @@ import (
) )
// A Node is the abstract interface to an IR node. // A Node is the abstract interface to an IR node.
type INode interface { type Node interface {
// Formatting // Formatting
Format(s fmt.State, verb rune) Format(s fmt.State, verb rune)
String() string String() string
@ -30,19 +30,19 @@ type INode interface {
SetPos(x src.XPos) SetPos(x src.XPos)
// For making copies. Mainly used by Copy and SepCopy. // For making copies. Mainly used by Copy and SepCopy.
RawCopy() *Node RawCopy() Node
// Abstract graph structure, for generic traversals. // Abstract graph structure, for generic traversals.
Op() Op Op() Op
SetOp(x Op) SetOp(x Op)
Orig() *Node Orig() Node
SetOrig(x *Node) SetOrig(x Node)
SubOp() Op SubOp() Op
SetSubOp(x Op) SetSubOp(x Op)
Left() *Node Left() Node
SetLeft(x *Node) SetLeft(x Node)
Right() *Node Right() Node
SetRight(x *Node) SetRight(x Node)
Init() Nodes Init() Nodes
PtrInit() *Nodes PtrInit() *Nodes
SetInit(x Nodes) SetInit(x Nodes)
@ -71,8 +71,8 @@ type INode interface {
SetClass(x Class) SetClass(x Class)
Likely() bool Likely() bool
SetLikely(x bool) SetLikely(x bool)
SliceBounds() (low, high, max *Node) SliceBounds() (low, high, max Node)
SetSliceBounds(low, high, max *Node) SetSliceBounds(low, high, max Node)
Iota() int64 Iota() int64
SetIota(x int64) SetIota(x int64)
Colas() bool Colas() bool
@ -130,17 +130,17 @@ type INode interface {
CanBeAnSSASym() CanBeAnSSASym()
} }
var _ INode = (*Node)(nil) var _ Node = (*node)(nil)
// A Node is a single node in the syntax tree. // A Node is a single node in the syntax tree.
// Actually the syntax tree is a syntax DAG, because there is only one // Actually the syntax tree is a syntax DAG, because there is only one
// node with Op=ONAME for a given instance of a variable x. // node with Op=ONAME for a given instance of a variable x.
// The same is true for Op=OTYPE and Op=OLITERAL. See Node.mayBeShared. // The same is true for Op=OTYPE and Op=OLITERAL. See Node.mayBeShared.
type Node struct { type node struct {
// Tree structure. // Tree structure.
// Generic recursive walks should follow these fields. // Generic recursive walks should follow these fields.
left *Node left Node
right *Node right Node
init Nodes init Nodes
body Nodes body Nodes
list Nodes list Nodes
@ -148,7 +148,7 @@ type Node struct {
// most nodes // most nodes
typ *types.Type typ *types.Type
orig *Node // original form, for printing, and tracking copies of ONAMEs orig Node // original form, for printing, and tracking copies of ONAMEs
// func // func
fn *Func fn *Func
@ -179,46 +179,46 @@ type Node struct {
aux uint8 aux uint8
} }
func (n *Node) Left() *Node { return n.left } func (n *node) Left() Node { return n.left }
func (n *Node) SetLeft(x *Node) { n.left = x } func (n *node) SetLeft(x Node) { n.left = x }
func (n *Node) Right() *Node { return n.right } func (n *node) Right() Node { return n.right }
func (n *Node) SetRight(x *Node) { n.right = x } func (n *node) SetRight(x Node) { n.right = x }
func (n *Node) Orig() *Node { return n.orig } func (n *node) Orig() Node { return n.orig }
func (n *Node) SetOrig(x *Node) { n.orig = x } func (n *node) SetOrig(x Node) { n.orig = x }
func (n *Node) Type() *types.Type { return n.typ } func (n *node) Type() *types.Type { return n.typ }
func (n *Node) SetType(x *types.Type) { n.typ = x } func (n *node) SetType(x *types.Type) { n.typ = x }
func (n *Node) Func() *Func { return n.fn } func (n *node) Func() *Func { return n.fn }
func (n *Node) SetFunc(x *Func) { n.fn = x } func (n *node) SetFunc(x *Func) { n.fn = x }
func (n *Node) Name() *Name { return n.name } func (n *node) Name() *Name { return n.name }
func (n *Node) SetName(x *Name) { n.name = x } func (n *node) SetName(x *Name) { n.name = x }
func (n *Node) Sym() *types.Sym { return n.sym } func (n *node) Sym() *types.Sym { return n.sym }
func (n *Node) SetSym(x *types.Sym) { n.sym = x } func (n *node) SetSym(x *types.Sym) { n.sym = x }
func (n *Node) Pos() src.XPos { return n.pos } func (n *node) Pos() src.XPos { return n.pos }
func (n *Node) SetPos(x src.XPos) { n.pos = x } func (n *node) SetPos(x src.XPos) { n.pos = x }
func (n *Node) Offset() int64 { return n.offset } func (n *node) Offset() int64 { return n.offset }
func (n *Node) SetOffset(x int64) { n.offset = x } func (n *node) SetOffset(x int64) { n.offset = x }
func (n *Node) Esc() uint16 { return n.esc } func (n *node) Esc() uint16 { return n.esc }
func (n *Node) SetEsc(x uint16) { n.esc = x } func (n *node) SetEsc(x uint16) { n.esc = x }
func (n *Node) Op() Op { return n.op } func (n *node) Op() Op { return n.op }
func (n *Node) SetOp(x Op) { n.op = x } func (n *node) SetOp(x Op) { n.op = x }
func (n *Node) Init() Nodes { return n.init } func (n *node) Init() Nodes { return n.init }
func (n *Node) SetInit(x Nodes) { n.init = x } func (n *node) SetInit(x Nodes) { n.init = x }
func (n *Node) PtrInit() *Nodes { return &n.init } func (n *node) PtrInit() *Nodes { return &n.init }
func (n *Node) Body() Nodes { return n.body } func (n *node) Body() Nodes { return n.body }
func (n *Node) SetBody(x Nodes) { n.body = x } func (n *node) SetBody(x Nodes) { n.body = x }
func (n *Node) PtrBody() *Nodes { return &n.body } func (n *node) PtrBody() *Nodes { return &n.body }
func (n *Node) List() Nodes { return n.list } func (n *node) List() Nodes { return n.list }
func (n *Node) SetList(x Nodes) { n.list = x } func (n *node) SetList(x Nodes) { n.list = x }
func (n *Node) PtrList() *Nodes { return &n.list } func (n *node) PtrList() *Nodes { return &n.list }
func (n *Node) Rlist() Nodes { return n.rlist } func (n *node) Rlist() Nodes { return n.rlist }
func (n *Node) SetRlist(x Nodes) { n.rlist = x } func (n *node) SetRlist(x Nodes) { n.rlist = x }
func (n *Node) PtrRlist() *Nodes { return &n.rlist } func (n *node) PtrRlist() *Nodes { return &n.rlist }
func (n *Node) ResetAux() { func (n *node) ResetAux() {
n.aux = 0 n.aux = 0
} }
func (n *Node) SubOp() Op { func (n *node) SubOp() Op {
switch n.Op() { switch n.Op() {
case OASOP, ONAME: case OASOP, ONAME:
default: default:
@ -227,7 +227,7 @@ func (n *Node) SubOp() Op {
return Op(n.aux) return Op(n.aux)
} }
func (n *Node) SetSubOp(op Op) { func (n *node) SetSubOp(op Op) {
switch n.Op() { switch n.Op() {
case OASOP, ONAME: case OASOP, ONAME:
default: default:
@ -236,14 +236,14 @@ func (n *Node) SetSubOp(op Op) {
n.aux = uint8(op) n.aux = uint8(op)
} }
func (n *Node) IndexMapLValue() bool { func (n *node) IndexMapLValue() bool {
if n.Op() != OINDEXMAP { if n.Op() != OINDEXMAP {
base.Fatalf("unexpected op: %v", n.Op()) base.Fatalf("unexpected op: %v", n.Op())
} }
return n.aux != 0 return n.aux != 0
} }
func (n *Node) SetIndexMapLValue(b bool) { func (n *node) SetIndexMapLValue(b bool) {
if n.Op() != OINDEXMAP { if n.Op() != OINDEXMAP {
base.Fatalf("unexpected op: %v", n.Op()) base.Fatalf("unexpected op: %v", n.Op())
} }
@ -254,28 +254,28 @@ func (n *Node) SetIndexMapLValue(b bool) {
} }
} }
func (n *Node) TChanDir() types.ChanDir { func (n *node) TChanDir() types.ChanDir {
if n.Op() != OTCHAN { if n.Op() != OTCHAN {
base.Fatalf("unexpected op: %v", n.Op()) base.Fatalf("unexpected op: %v", n.Op())
} }
return types.ChanDir(n.aux) return types.ChanDir(n.aux)
} }
func (n *Node) SetTChanDir(dir types.ChanDir) { func (n *node) SetTChanDir(dir types.ChanDir) {
if n.Op() != OTCHAN { if n.Op() != OTCHAN {
base.Fatalf("unexpected op: %v", n.Op()) base.Fatalf("unexpected op: %v", n.Op())
} }
n.aux = uint8(dir) n.aux = uint8(dir)
} }
func IsSynthetic(n *Node) bool { func IsSynthetic(n Node) bool {
name := n.Sym().Name name := n.Sym().Name
return name[0] == '.' || name[0] == '~' return name[0] == '.' || name[0] == '~'
} }
// IsAutoTmp indicates if n was created by the compiler as a temporary, // IsAutoTmp indicates if n was created by the compiler as a temporary,
// based on the setting of the .AutoTemp flag in n's Name. // based on the setting of the .AutoTemp flag in n's Name.
func IsAutoTmp(n *Node) bool { func IsAutoTmp(n Node) bool {
if n == nil || n.Op() != ONAME { if n == nil || n.Op() != ONAME {
return false return false
} }
@ -308,49 +308,49 @@ const (
_, nodeEmbedded // ODCLFIELD embedded type _, nodeEmbedded // ODCLFIELD embedded type
) )
func (n *Node) Class() Class { return Class(n.flags.get3(nodeClass)) } func (n *node) Class() Class { return Class(n.flags.get3(nodeClass)) }
func (n *Node) Walkdef() uint8 { return n.flags.get2(nodeWalkdef) } func (n *node) Walkdef() uint8 { return n.flags.get2(nodeWalkdef) }
func (n *Node) Typecheck() uint8 { return n.flags.get2(nodeTypecheck) } func (n *node) Typecheck() uint8 { return n.flags.get2(nodeTypecheck) }
func (n *Node) Initorder() uint8 { return n.flags.get2(nodeInitorder) } func (n *node) Initorder() uint8 { return n.flags.get2(nodeInitorder) }
func (n *Node) HasBreak() bool { return n.flags&nodeHasBreak != 0 } func (n *node) HasBreak() bool { return n.flags&nodeHasBreak != 0 }
func (n *Node) NoInline() bool { return n.flags&nodeNoInline != 0 } func (n *node) NoInline() bool { return n.flags&nodeNoInline != 0 }
func (n *Node) Implicit() bool { return n.flags&nodeImplicit != 0 } func (n *node) Implicit() bool { return n.flags&nodeImplicit != 0 }
func (n *Node) IsDDD() bool { return n.flags&nodeIsDDD != 0 } func (n *node) IsDDD() bool { return n.flags&nodeIsDDD != 0 }
func (n *Node) Diag() bool { return n.flags&nodeDiag != 0 } func (n *node) Diag() bool { return n.flags&nodeDiag != 0 }
func (n *Node) Colas() bool { return n.flags&nodeColas != 0 } func (n *node) Colas() bool { return n.flags&nodeColas != 0 }
func (n *Node) NonNil() bool { return n.flags&nodeNonNil != 0 } func (n *node) NonNil() bool { return n.flags&nodeNonNil != 0 }
func (n *Node) Transient() bool { return n.flags&nodeTransient != 0 } func (n *node) Transient() bool { return n.flags&nodeTransient != 0 }
func (n *Node) Bounded() bool { return n.flags&nodeBounded != 0 } func (n *node) Bounded() bool { return n.flags&nodeBounded != 0 }
func (n *Node) HasCall() bool { return n.flags&nodeHasCall != 0 } func (n *node) HasCall() bool { return n.flags&nodeHasCall != 0 }
func (n *Node) Likely() bool { return n.flags&nodeLikely != 0 } func (n *node) Likely() bool { return n.flags&nodeLikely != 0 }
func (n *Node) HasVal() bool { return n.flags&nodeHasVal != 0 } func (n *node) HasVal() bool { return n.flags&nodeHasVal != 0 }
func (n *Node) HasOpt() bool { return n.flags&nodeHasOpt != 0 } func (n *node) HasOpt() bool { return n.flags&nodeHasOpt != 0 }
func (n *Node) Embedded() bool { return n.flags&nodeEmbedded != 0 } func (n *node) Embedded() bool { return n.flags&nodeEmbedded != 0 }
func (n *Node) SetClass(b Class) { n.flags.set3(nodeClass, uint8(b)) } func (n *node) SetClass(b Class) { n.flags.set3(nodeClass, uint8(b)) }
func (n *Node) SetWalkdef(b uint8) { n.flags.set2(nodeWalkdef, b) } func (n *node) SetWalkdef(b uint8) { n.flags.set2(nodeWalkdef, b) }
func (n *Node) SetTypecheck(b uint8) { n.flags.set2(nodeTypecheck, b) } func (n *node) SetTypecheck(b uint8) { n.flags.set2(nodeTypecheck, b) }
func (n *Node) SetInitorder(b uint8) { n.flags.set2(nodeInitorder, b) } func (n *node) SetInitorder(b uint8) { n.flags.set2(nodeInitorder, b) }
func (n *Node) SetHasBreak(b bool) { n.flags.set(nodeHasBreak, b) } func (n *node) SetHasBreak(b bool) { n.flags.set(nodeHasBreak, b) }
func (n *Node) SetNoInline(b bool) { n.flags.set(nodeNoInline, b) } func (n *node) SetNoInline(b bool) { n.flags.set(nodeNoInline, b) }
func (n *Node) SetImplicit(b bool) { n.flags.set(nodeImplicit, b) } func (n *node) SetImplicit(b bool) { n.flags.set(nodeImplicit, b) }
func (n *Node) SetIsDDD(b bool) { n.flags.set(nodeIsDDD, b) } func (n *node) SetIsDDD(b bool) { n.flags.set(nodeIsDDD, b) }
func (n *Node) SetDiag(b bool) { n.flags.set(nodeDiag, b) } func (n *node) SetDiag(b bool) { n.flags.set(nodeDiag, b) }
func (n *Node) SetColas(b bool) { n.flags.set(nodeColas, b) } func (n *node) SetColas(b bool) { n.flags.set(nodeColas, b) }
func (n *Node) SetTransient(b bool) { n.flags.set(nodeTransient, b) } func (n *node) SetTransient(b bool) { n.flags.set(nodeTransient, b) }
func (n *Node) SetHasCall(b bool) { n.flags.set(nodeHasCall, b) } func (n *node) SetHasCall(b bool) { n.flags.set(nodeHasCall, b) }
func (n *Node) SetLikely(b bool) { n.flags.set(nodeLikely, b) } func (n *node) SetLikely(b bool) { n.flags.set(nodeLikely, b) }
func (n *Node) setHasVal(b bool) { n.flags.set(nodeHasVal, b) } func (n *node) setHasVal(b bool) { n.flags.set(nodeHasVal, b) }
func (n *Node) setHasOpt(b bool) { n.flags.set(nodeHasOpt, b) } func (n *node) setHasOpt(b bool) { n.flags.set(nodeHasOpt, b) }
func (n *Node) SetEmbedded(b bool) { n.flags.set(nodeEmbedded, b) } func (n *node) SetEmbedded(b bool) { n.flags.set(nodeEmbedded, b) }
// MarkNonNil marks a pointer n as being guaranteed non-nil, // MarkNonNil marks a pointer n as being guaranteed non-nil,
// on all code paths, at all times. // on all code paths, at all times.
// During conversion to SSA, non-nil pointers won't have nil checks // During conversion to SSA, non-nil pointers won't have nil checks
// inserted before dereferencing. See state.exprPtr. // inserted before dereferencing. See state.exprPtr.
func (n *Node) MarkNonNil() { func (n *node) MarkNonNil() {
if !n.Type().IsPtr() && !n.Type().IsUnsafePtr() { if !n.Type().IsPtr() && !n.Type().IsUnsafePtr() {
base.Fatalf("MarkNonNil(%v), type %v", n, n.Type()) base.Fatalf("MarkNonNil(%v), type %v", n, n.Type())
} }
@ -361,7 +361,7 @@ func (n *Node) MarkNonNil() {
// When n is an index or slice operation, n does not need bounds checks. // When n is an index or slice operation, n does not need bounds checks.
// When n is a dereferencing operation, n does not need nil checks. // When n is a dereferencing operation, n does not need nil checks.
// When n is a makeslice+copy operation, n does not need length and cap checks. // When n is a makeslice+copy operation, n does not need length and cap checks.
func (n *Node) SetBounded(b bool) { func (n *node) SetBounded(b bool) {
switch n.Op() { switch n.Op() {
case OINDEX, OSLICE, OSLICEARR, OSLICE3, OSLICE3ARR, OSLICESTR: case OINDEX, OSLICE, OSLICEARR, OSLICE3, OSLICE3ARR, OSLICESTR:
// No bounds checks needed. // No bounds checks needed.
@ -377,7 +377,7 @@ func (n *Node) SetBounded(b bool) {
} }
// MarkReadonly indicates that n is an ONAME with readonly contents. // MarkReadonly indicates that n is an ONAME with readonly contents.
func (n *Node) MarkReadonly() { func (n *node) MarkReadonly() {
if n.Op() != ONAME { if n.Op() != ONAME {
base.Fatalf("Node.MarkReadonly %v", n.Op()) base.Fatalf("Node.MarkReadonly %v", n.Op())
} }
@ -389,7 +389,7 @@ func (n *Node) MarkReadonly() {
} }
// Val returns the constant.Value for the node. // Val returns the constant.Value for the node.
func (n *Node) Val() constant.Value { func (n *node) Val() constant.Value {
if !n.HasVal() { if !n.HasVal() {
return constant.MakeUnknown() return constant.MakeUnknown()
} }
@ -398,7 +398,7 @@ func (n *Node) Val() constant.Value {
// SetVal sets the constant.Value for the node, // SetVal sets the constant.Value for the node,
// which must not have been used with SetOpt. // which must not have been used with SetOpt.
func (n *Node) SetVal(v constant.Value) { func (n *node) SetVal(v constant.Value) {
if n.HasOpt() { if n.HasOpt() {
base.Flag.LowerH = 1 base.Flag.LowerH = 1
Dump("have Opt", n) Dump("have Opt", n)
@ -412,7 +412,7 @@ func (n *Node) SetVal(v constant.Value) {
} }
// Opt returns the optimizer data for the node. // Opt returns the optimizer data for the node.
func (n *Node) Opt() interface{} { func (n *node) Opt() interface{} {
if !n.HasOpt() { if !n.HasOpt() {
return nil return nil
} }
@ -421,7 +421,7 @@ func (n *Node) Opt() interface{} {
// SetOpt sets the optimizer data for the node, which must not have been used with SetVal. // SetOpt sets the optimizer data for the node, which must not have been used with SetVal.
// SetOpt(nil) is ignored for Vals to simplify call sites that are clearing Opts. // SetOpt(nil) is ignored for Vals to simplify call sites that are clearing Opts.
func (n *Node) SetOpt(x interface{}) { func (n *node) SetOpt(x interface{}) {
if x == nil { if x == nil {
if n.HasOpt() { if n.HasOpt() {
n.setHasOpt(false) n.setHasOpt(false)
@ -438,17 +438,17 @@ func (n *Node) SetOpt(x interface{}) {
n.e = x n.e = x
} }
func (n *Node) Iota() int64 { func (n *node) Iota() int64 {
return n.Offset() return n.Offset()
} }
func (n *Node) SetIota(x int64) { func (n *node) SetIota(x int64) {
n.SetOffset(x) n.SetOffset(x)
} }
// mayBeShared reports whether n may occur in multiple places in the AST. // mayBeShared reports whether n may occur in multiple places in the AST.
// Extra care must be taken when mutating such a node. // Extra care must be taken when mutating such a node.
func MayBeShared(n *Node) bool { func MayBeShared(n Node) bool {
switch n.Op() { switch n.Op() {
case ONAME, OLITERAL, ONIL, OTYPE: case ONAME, OLITERAL, ONIL, OTYPE:
return true return true
@ -457,7 +457,7 @@ func MayBeShared(n *Node) bool {
} }
// funcname returns the name (without the package) of the function n. // funcname returns the name (without the package) of the function n.
func FuncName(n *Node) string { func FuncName(n Node) string {
if n == nil || n.Func() == nil || n.Func().Nname == nil { if n == nil || n.Func() == nil || n.Func().Nname == nil {
return "<nil>" return "<nil>"
} }
@ -468,7 +468,7 @@ func FuncName(n *Node) string {
// This differs from the compiler's internal convention where local functions lack a package // This differs from the compiler's internal convention where local functions lack a package
// because the ultimate consumer of this is a human looking at an IDE; package is only empty // because the ultimate consumer of this is a human looking at an IDE; package is only empty
// if the compilation package is actually the empty string. // if the compilation package is actually the empty string.
func PkgFuncName(n *Node) string { func PkgFuncName(n Node) string {
var s *types.Sym var s *types.Sym
if n == nil { if n == nil {
return "<nil>" return "<nil>"
@ -494,19 +494,19 @@ func PkgFuncName(n *Node) string {
} }
// The compiler needs *Node to be assignable to cmd/compile/internal/ssa.Sym. // The compiler needs *Node to be assignable to cmd/compile/internal/ssa.Sym.
func (n *Node) CanBeAnSSASym() { func (n *node) CanBeAnSSASym() {
} }
// Name holds Node fields used only by named nodes (ONAME, OTYPE, OPACK, OLABEL, some OLITERAL). // Name holds Node fields used only by named nodes (ONAME, OTYPE, OPACK, OLABEL, some OLITERAL).
type Name struct { type Name struct {
Pack *Node // real package for import . names Pack Node // real package for import . names
Pkg *types.Pkg // pkg for OPACK nodes Pkg *types.Pkg // pkg for OPACK nodes
// For a local variable (not param) or extern, the initializing assignment (OAS or OAS2). // For a local variable (not param) or extern, the initializing assignment (OAS or OAS2).
// For a closure var, the ONAME node of the outer captured variable // For a closure var, the ONAME node of the outer captured variable
Defn *Node Defn Node
// The ODCLFUNC node (for a static function/method or a closure) in which // The ODCLFUNC node (for a static function/method or a closure) in which
// local variable or param is declared. // local variable or param is declared.
Curfn *Node Curfn Node
Param *Param // additional fields for ONAME, OTYPE Param *Param // additional fields for ONAME, OTYPE
Decldepth int32 // declaration loop depth, increased for every loop or label Decldepth int32 // declaration loop depth, increased for every loop or label
// Unique number for ONAME nodes within a function. Function outputs // Unique number for ONAME nodes within a function. Function outputs
@ -565,11 +565,11 @@ func (n *Name) SetOpenDeferSlot(b bool) { n.flags.set(nameOpenDeferSlot,
func (n *Name) SetLibfuzzerExtraCounter(b bool) { n.flags.set(nameLibfuzzerExtraCounter, b) } func (n *Name) SetLibfuzzerExtraCounter(b bool) { n.flags.set(nameLibfuzzerExtraCounter, b) }
type Param struct { type Param struct {
Ntype *Node Ntype Node
Heapaddr *Node // temp holding heap address of param Heapaddr Node // temp holding heap address of param
// ONAME PAUTOHEAP // ONAME PAUTOHEAP
Stackcopy *Node // the PPARAM/PPARAMOUT on-stack slot (moved func params only) Stackcopy Node // the PPARAM/PPARAMOUT on-stack slot (moved func params only)
// ONAME closure linkage // ONAME closure linkage
// Consider: // Consider:
@ -640,8 +640,8 @@ type Param struct {
// //
// Because of the sharding of pieces of the node, x.Defn means x.Name.Defn // Because of the sharding of pieces of the node, x.Defn means x.Name.Defn
// and x.Innermost/Outer means x.Name.Param.Innermost/Outer. // and x.Innermost/Outer means x.Name.Param.Innermost/Outer.
Innermost *Node Innermost Node
Outer *Node Outer Node
// OTYPE & ONAME //go:embed info, // OTYPE & ONAME //go:embed info,
// sharing storage to reduce gc.Param size. // sharing storage to reduce gc.Param size.
@ -762,9 +762,9 @@ func (p *Param) SetEmbedFiles(list []string) {
// the generated ODCLFUNC (as n.Func.Decl), but there is no // the generated ODCLFUNC (as n.Func.Decl), but there is no
// pointer from the Func back to the OCALLPART. // pointer from the Func back to the OCALLPART.
type Func struct { type Func struct {
Nname *Node // ONAME node Nname Node // ONAME node
Decl *Node // ODCLFUNC node Decl Node // ODCLFUNC node
OClosure *Node // OCLOSURE node OClosure Node // OCLOSURE node
Shortname *types.Sym Shortname *types.Sym
@ -774,10 +774,10 @@ type Func struct {
Exit Nodes Exit Nodes
// ONAME nodes for all params/locals for this func/closure, does NOT // ONAME nodes for all params/locals for this func/closure, does NOT
// include closurevars until transformclosure runs. // include closurevars until transformclosure runs.
Dcl []*Node Dcl []Node
ClosureEnter Nodes // list of ONAME nodes of captured variables ClosureEnter Nodes // list of ONAME nodes of captured variables
ClosureType *Node // closure representation type ClosureType Node // closure representation type
ClosureCalled bool // closure is only immediately called ClosureCalled bool // closure is only immediately called
ClosureVars Nodes // closure params; each has closurevar set ClosureVars Nodes // closure params; each has closurevar set
@ -822,8 +822,8 @@ type Inline struct {
Cost int32 // heuristic cost of inlining this function Cost int32 // heuristic cost of inlining this function
// Copies of Func.Dcl and Nbody for use during inlining. // Copies of Func.Dcl and Nbody for use during inlining.
Dcl []*Node Dcl []Node
Body []*Node Body []Node
} }
// A Mark represents a scope boundary. // A Mark represents a scope boundary.
@ -1108,17 +1108,17 @@ const (
// Nodes is a pointer to a slice of *Node. // Nodes is a pointer to a slice of *Node.
// For fields that are not used in most nodes, this is used instead of // For fields that are not used in most nodes, this is used instead of
// a slice to save space. // a slice to save space.
type Nodes struct{ slice *[]*Node } type Nodes struct{ slice *[]Node }
// asNodes returns a slice of *Node as a Nodes value. // asNodes returns a slice of *Node as a Nodes value.
func AsNodes(s []*Node) Nodes { func AsNodes(s []Node) Nodes {
return Nodes{&s} return Nodes{&s}
} }
// Slice returns the entries in Nodes as a slice. // Slice returns the entries in Nodes as a slice.
// Changes to the slice entries (as in s[i] = n) will be reflected in // Changes to the slice entries (as in s[i] = n) will be reflected in
// the Nodes. // the Nodes.
func (n Nodes) Slice() []*Node { func (n Nodes) Slice() []Node {
if n.slice == nil { if n.slice == nil {
return nil return nil
} }
@ -1135,25 +1135,25 @@ func (n Nodes) Len() int {
// Index returns the i'th element of Nodes. // Index returns the i'th element of Nodes.
// It panics if n does not have at least i+1 elements. // It panics if n does not have at least i+1 elements.
func (n Nodes) Index(i int) *Node { func (n Nodes) Index(i int) Node {
return (*n.slice)[i] return (*n.slice)[i]
} }
// First returns the first element of Nodes (same as n.Index(0)). // First returns the first element of Nodes (same as n.Index(0)).
// It panics if n has no elements. // It panics if n has no elements.
func (n Nodes) First() *Node { func (n Nodes) First() Node {
return (*n.slice)[0] return (*n.slice)[0]
} }
// Second returns the second element of Nodes (same as n.Index(1)). // Second returns the second element of Nodes (same as n.Index(1)).
// It panics if n has fewer than two elements. // It panics if n has fewer than two elements.
func (n Nodes) Second() *Node { func (n Nodes) Second() Node {
return (*n.slice)[1] return (*n.slice)[1]
} }
// Set sets n to a slice. // Set sets n to a slice.
// This takes ownership of the slice. // This takes ownership of the slice.
func (n *Nodes) Set(s []*Node) { func (n *Nodes) Set(s []Node) {
if len(s) == 0 { if len(s) == 0 {
n.slice = nil n.slice = nil
} else { } else {
@ -1166,18 +1166,18 @@ func (n *Nodes) Set(s []*Node) {
} }
// Set1 sets n to a slice containing a single node. // Set1 sets n to a slice containing a single node.
func (n *Nodes) Set1(n1 *Node) { func (n *Nodes) Set1(n1 Node) {
n.slice = &[]*Node{n1} n.slice = &[]Node{n1}
} }
// Set2 sets n to a slice containing two nodes. // Set2 sets n to a slice containing two nodes.
func (n *Nodes) Set2(n1, n2 *Node) { func (n *Nodes) Set2(n1, n2 Node) {
n.slice = &[]*Node{n1, n2} n.slice = &[]Node{n1, n2}
} }
// Set3 sets n to a slice containing three nodes. // Set3 sets n to a slice containing three nodes.
func (n *Nodes) Set3(n1, n2, n3 *Node) { func (n *Nodes) Set3(n1, n2, n3 Node) {
n.slice = &[]*Node{n1, n2, n3} n.slice = &[]Node{n1, n2, n3}
} }
// MoveNodes sets n to the contents of n2, then clears n2. // MoveNodes sets n to the contents of n2, then clears n2.
@ -1188,35 +1188,35 @@ func (n *Nodes) MoveNodes(n2 *Nodes) {
// SetIndex sets the i'th element of Nodes to node. // SetIndex sets the i'th element of Nodes to node.
// It panics if n does not have at least i+1 elements. // It panics if n does not have at least i+1 elements.
func (n Nodes) SetIndex(i int, node *Node) { func (n Nodes) SetIndex(i int, node Node) {
(*n.slice)[i] = node (*n.slice)[i] = node
} }
// SetFirst sets the first element of Nodes to node. // SetFirst sets the first element of Nodes to node.
// It panics if n does not have at least one elements. // It panics if n does not have at least one elements.
func (n Nodes) SetFirst(node *Node) { func (n Nodes) SetFirst(node Node) {
(*n.slice)[0] = node (*n.slice)[0] = node
} }
// SetSecond sets the second element of Nodes to node. // SetSecond sets the second element of Nodes to node.
// It panics if n does not have at least two elements. // It panics if n does not have at least two elements.
func (n Nodes) SetSecond(node *Node) { func (n Nodes) SetSecond(node Node) {
(*n.slice)[1] = node (*n.slice)[1] = node
} }
// Addr returns the address of the i'th element of Nodes. // Addr returns the address of the i'th element of Nodes.
// It panics if n does not have at least i+1 elements. // It panics if n does not have at least i+1 elements.
func (n Nodes) Addr(i int) **Node { func (n Nodes) Addr(i int) *Node {
return &(*n.slice)[i] return &(*n.slice)[i]
} }
// Append appends entries to Nodes. // Append appends entries to Nodes.
func (n *Nodes) Append(a ...*Node) { func (n *Nodes) Append(a ...Node) {
if len(a) == 0 { if len(a) == 0 {
return return
} }
if n.slice == nil { if n.slice == nil {
s := make([]*Node, len(a)) s := make([]Node, len(a))
copy(s, a) copy(s, a)
n.slice = &s n.slice = &s
return return
@ -1226,7 +1226,7 @@ func (n *Nodes) Append(a ...*Node) {
// Prepend prepends entries to Nodes. // Prepend prepends entries to Nodes.
// If a slice is passed in, this will take ownership of it. // If a slice is passed in, this will take ownership of it.
func (n *Nodes) Prepend(a ...*Node) { func (n *Nodes) Prepend(a ...Node) {
if len(a) == 0 { if len(a) == 0 {
return return
} }
@ -1251,7 +1251,7 @@ func (n *Nodes) AppendNodes(n2 *Nodes) {
// inspect invokes f on each node in an AST in depth-first order. // inspect invokes f on each node in an AST in depth-first order.
// If f(n) returns false, inspect skips visiting n's children. // If f(n) returns false, inspect skips visiting n's children.
func Inspect(n *Node, f func(*Node) bool) { func Inspect(n Node, f func(Node) bool) {
if n == nil || !f(n) { if n == nil || !f(n) {
return return
} }
@ -1263,7 +1263,7 @@ func Inspect(n *Node, f func(*Node) bool) {
InspectList(n.Rlist(), f) InspectList(n.Rlist(), f)
} }
func InspectList(l Nodes, f func(*Node) bool) { func InspectList(l Nodes, f func(Node) bool) {
for _, n := range l.Slice() { for _, n := range l.Slice() {
Inspect(n, f) Inspect(n, f)
} }
@ -1272,7 +1272,7 @@ func InspectList(l Nodes, f func(*Node) bool) {
// nodeQueue is a FIFO queue of *Node. The zero value of nodeQueue is // nodeQueue is a FIFO queue of *Node. The zero value of nodeQueue is
// a ready-to-use empty queue. // a ready-to-use empty queue.
type NodeQueue struct { type NodeQueue struct {
ring []*Node ring []Node
head, tail int head, tail int
} }
@ -1282,12 +1282,12 @@ func (q *NodeQueue) Empty() bool {
} }
// pushRight appends n to the right of the queue. // pushRight appends n to the right of the queue.
func (q *NodeQueue) PushRight(n *Node) { func (q *NodeQueue) PushRight(n Node) {
if len(q.ring) == 0 { if len(q.ring) == 0 {
q.ring = make([]*Node, 16) q.ring = make([]Node, 16)
} else if q.head+len(q.ring) == q.tail { } else if q.head+len(q.ring) == q.tail {
// Grow the ring. // Grow the ring.
nring := make([]*Node, len(q.ring)*2) nring := make([]Node, len(q.ring)*2)
// Copy the old elements. // Copy the old elements.
part := q.ring[q.head%len(q.ring):] part := q.ring[q.head%len(q.ring):]
if q.tail-q.head <= len(part) { if q.tail-q.head <= len(part) {
@ -1306,7 +1306,7 @@ func (q *NodeQueue) PushRight(n *Node) {
// popLeft pops a node from the left of the queue. It panics if q is // popLeft pops a node from the left of the queue. It panics if q is
// empty. // empty.
func (q *NodeQueue) PopLeft() *Node { func (q *NodeQueue) PopLeft() Node {
if q.Empty() { if q.Empty() {
panic("dequeue empty") panic("dequeue empty")
} }
@ -1316,25 +1316,25 @@ func (q *NodeQueue) PopLeft() *Node {
} }
// NodeSet is a set of Nodes. // NodeSet is a set of Nodes.
type NodeSet map[*Node]struct{} type NodeSet map[Node]struct{}
// Has reports whether s contains n. // Has reports whether s contains n.
func (s NodeSet) Has(n *Node) bool { func (s NodeSet) Has(n Node) bool {
_, isPresent := s[n] _, isPresent := s[n]
return isPresent return isPresent
} }
// Add adds n to s. // Add adds n to s.
func (s *NodeSet) Add(n *Node) { func (s *NodeSet) Add(n Node) {
if *s == nil { if *s == nil {
*s = make(map[*Node]struct{}) *s = make(map[Node]struct{})
} }
(*s)[n] = struct{}{} (*s)[n] = struct{}{}
} }
// Sorted returns s sorted according to less. // Sorted returns s sorted according to less.
func (s NodeSet) Sorted(less func(*Node, *Node) bool) []*Node { func (s NodeSet) Sorted(less func(Node, Node) bool) []Node {
var res []*Node var res []Node
for n := range s { for n := range s {
res = append(res, n) res = append(res, n)
} }
@ -1342,16 +1342,16 @@ func (s NodeSet) Sorted(less func(*Node, *Node) bool) []*Node {
return res return res
} }
func Nod(op Op, nleft, nright *Node) *Node { func Nod(op Op, nleft, nright Node) Node {
return NodAt(base.Pos, op, nleft, nright) return NodAt(base.Pos, op, nleft, nright)
} }
func NodAt(pos src.XPos, op Op, nleft, nright *Node) *Node { func NodAt(pos src.XPos, op Op, nleft, nright Node) Node {
var n *Node var n Node
switch op { switch op {
case ODCLFUNC: case ODCLFUNC:
var x struct { var x struct {
n Node n node
f Func f Func
} }
n = &x.n n = &x.n
@ -1361,13 +1361,13 @@ func NodAt(pos src.XPos, op Op, nleft, nright *Node) *Node {
base.Fatalf("use newname instead") base.Fatalf("use newname instead")
case OLABEL, OPACK: case OLABEL, OPACK:
var x struct { var x struct {
n Node n node
m Name m Name
} }
n = &x.n n = &x.n
n.SetName(&x.m) n.SetName(&x.m)
default: default:
n = new(Node) n = new(node)
} }
n.SetOp(op) n.SetOp(op)
n.SetLeft(nleft) n.SetLeft(nleft)
@ -1380,13 +1380,13 @@ func NodAt(pos src.XPos, op Op, nleft, nright *Node) *Node {
// newnamel returns a new ONAME Node associated with symbol s at position pos. // newnamel returns a new ONAME Node associated with symbol s at position pos.
// The caller is responsible for setting n.Name.Curfn. // The caller is responsible for setting n.Name.Curfn.
func NewNameAt(pos src.XPos, s *types.Sym) *Node { func NewNameAt(pos src.XPos, s *types.Sym) Node {
if s == nil { if s == nil {
base.Fatalf("newnamel nil") base.Fatalf("newnamel nil")
} }
var x struct { var x struct {
n Node n node
m Name m Name
p Param p Param
} }
@ -1453,14 +1453,14 @@ type SymAndPos struct {
Pos src.XPos // line of call Pos src.XPos // line of call
} }
func AsNode(n types.IRNode) *Node { func AsNode(n types.IRNode) Node {
if n == nil { if n == nil {
return nil return nil
} }
return n.(*Node) return n.(Node)
} }
var BlankNode *Node var BlankNode Node
// origSym returns the original symbol written by the user. // origSym returns the original symbol written by the user.
func OrigSym(s *types.Sym) *types.Sym { func OrigSym(s *types.Sym) *types.Sym {
@ -1489,7 +1489,7 @@ func OrigSym(s *types.Sym) *types.Sym {
// SliceBounds returns n's slice bounds: low, high, and max in expr[low:high:max]. // SliceBounds returns n's slice bounds: low, high, and max in expr[low:high:max].
// n must be a slice expression. max is nil if n is a simple slice expression. // n must be a slice expression. max is nil if n is a simple slice expression.
func (n *Node) SliceBounds() (low, high, max *Node) { func (n *node) SliceBounds() (low, high, max Node) {
if n.List().Len() == 0 { if n.List().Len() == 0 {
return nil, nil, nil return nil, nil, nil
} }
@ -1508,7 +1508,7 @@ func (n *Node) SliceBounds() (low, high, max *Node) {
// SetSliceBounds sets n's slice bounds, where n is a slice expression. // SetSliceBounds sets n's slice bounds, where n is a slice expression.
// n must be a slice expression. If max is non-nil, n must be a full slice expression. // n must be a slice expression. If max is non-nil, n must be a full slice expression.
func (n *Node) SetSliceBounds(low, high, max *Node) { func (n *node) SetSliceBounds(low, high, max Node) {
switch n.Op() { switch n.Op() {
case OSLICE, OSLICEARR, OSLICESTR: case OSLICE, OSLICEARR, OSLICESTR:
if max != nil { if max != nil {
@ -1555,13 +1555,13 @@ func (o Op) IsSlice3() bool {
return false return false
} }
func IsConst(n *Node, ct constant.Kind) bool { func IsConst(n Node, ct constant.Kind) bool {
return ConstType(n) == ct return ConstType(n) == ct
} }
// Int64Val returns n as an int64. // Int64Val returns n as an int64.
// n must be an integer or rune constant. // n must be an integer or rune constant.
func (n *Node) Int64Val() int64 { func (n *node) Int64Val() int64 {
if !IsConst(n, constant.Int) { if !IsConst(n, constant.Int) {
base.Fatalf("Int64Val(%v)", n) base.Fatalf("Int64Val(%v)", n)
} }
@ -1573,7 +1573,7 @@ func (n *Node) Int64Val() int64 {
} }
// CanInt64 reports whether it is safe to call Int64Val() on n. // CanInt64 reports whether it is safe to call Int64Val() on n.
func (n *Node) CanInt64() bool { func (n *node) CanInt64() bool {
if !IsConst(n, constant.Int) { if !IsConst(n, constant.Int) {
return false return false
} }
@ -1586,7 +1586,7 @@ func (n *Node) CanInt64() bool {
// Uint64Val returns n as an uint64. // Uint64Val returns n as an uint64.
// n must be an integer or rune constant. // n must be an integer or rune constant.
func (n *Node) Uint64Val() uint64 { func (n *node) Uint64Val() uint64 {
if !IsConst(n, constant.Int) { if !IsConst(n, constant.Int) {
base.Fatalf("Uint64Val(%v)", n) base.Fatalf("Uint64Val(%v)", n)
} }
@ -1599,7 +1599,7 @@ func (n *Node) Uint64Val() uint64 {
// BoolVal returns n as a bool. // BoolVal returns n as a bool.
// n must be a boolean constant. // n must be a boolean constant.
func (n *Node) BoolVal() bool { func (n *node) BoolVal() bool {
if !IsConst(n, constant.Bool) { if !IsConst(n, constant.Bool) {
base.Fatalf("BoolVal(%v)", n) base.Fatalf("BoolVal(%v)", n)
} }
@ -1608,7 +1608,7 @@ func (n *Node) BoolVal() bool {
// StringVal returns the value of a literal string Node as a string. // StringVal returns the value of a literal string Node as a string.
// n must be a string constant. // n must be a string constant.
func (n *Node) StringVal() string { func (n *node) StringVal() string {
if !IsConst(n, constant.String) { if !IsConst(n, constant.String) {
base.Fatalf("StringVal(%v)", n) base.Fatalf("StringVal(%v)", n)
} }
@ -1618,14 +1618,14 @@ func (n *Node) StringVal() string {
// rawcopy returns a shallow copy of n. // rawcopy returns a shallow copy of n.
// Note: copy or sepcopy (rather than rawcopy) is usually the // Note: copy or sepcopy (rather than rawcopy) is usually the
// correct choice (see comment with Node.copy, below). // correct choice (see comment with Node.copy, below).
func (n *Node) RawCopy() *Node { func (n *node) RawCopy() Node {
copy := *n copy := *n
return &copy return &copy
} }
// sepcopy returns a separate shallow copy of n, with the copy's // sepcopy returns a separate shallow copy of n, with the copy's
// Orig pointing to itself. // Orig pointing to itself.
func SepCopy(n *Node) *Node { func SepCopy(n Node) Node {
n = n.RawCopy() n = n.RawCopy()
n.SetOrig(n) n.SetOrig(n)
return n return n
@ -1638,7 +1638,7 @@ func SepCopy(n *Node) *Node {
// represent the original node anymore. // represent the original node anymore.
// (This caused the wrong complit Op to be used when printing error // (This caused the wrong complit Op to be used when printing error
// messages; see issues #26855, #27765). // messages; see issues #26855, #27765).
func Copy(n *Node) *Node { func Copy(n Node) Node {
copy := n.RawCopy() copy := n.RawCopy()
if n.Orig() == n { if n.Orig() == n {
copy.SetOrig(copy) copy.SetOrig(copy)
@ -1647,13 +1647,13 @@ func Copy(n *Node) *Node {
} }
// isNil reports whether n represents the universal untyped zero value "nil". // isNil reports whether n represents the universal untyped zero value "nil".
func IsNil(n *Node) bool { func IsNil(n Node) bool {
// Check n.Orig because constant propagation may produce typed nil constants, // Check n.Orig because constant propagation may produce typed nil constants,
// which don't exist in the Go spec. // which don't exist in the Go spec.
return n.Orig().Op() == ONIL return n.Orig().Op() == ONIL
} }
func IsBlank(n *Node) bool { func IsBlank(n Node) bool {
if n == nil { if n == nil {
return false return false
} }
@ -1662,6 +1662,6 @@ func IsBlank(n *Node) bool {
// IsMethod reports whether n is a method. // IsMethod reports whether n is a method.
// n must be a function or a method. // n must be a function or a method.
func IsMethod(n *Node) bool { func IsMethod(n Node) bool {
return n.Type().Recv() != nil return n.Type().Recv() != nil
} }

View file

@ -20,10 +20,10 @@ func TestSizeof(t *testing.T) {
_32bit uintptr // size on 32bit platforms _32bit uintptr // size on 32bit platforms
_64bit uintptr // size on 64bit platforms _64bit uintptr // size on 64bit platforms
}{ }{
{Func{}, 136, 248}, {Func{}, 152, 280},
{Name{}, 32, 56}, {Name{}, 44, 80},
{Param{}, 24, 48}, {Param{}, 44, 88},
{Node{}, 76, 128}, {node{}, 88, 152},
} }
for _, tt := range tests { for _, tt := range tests {

View file

@ -12,7 +12,7 @@ import (
"cmd/compile/internal/types" "cmd/compile/internal/types"
) )
func ConstType(n *Node) constant.Kind { func ConstType(n Node) constant.Kind {
if n == nil || n.Op() != OLITERAL { if n == nil || n.Op() != OLITERAL {
return constant.Unknown return constant.Unknown
} }
@ -22,7 +22,7 @@ func ConstType(n *Node) constant.Kind {
// ValueInterface returns the constant value stored in n as an interface{}. // ValueInterface returns the constant value stored in n as an interface{}.
// It returns int64s for ints and runes, float64s for floats, // It returns int64s for ints and runes, float64s for floats,
// and complex128s for complex values. // and complex128s for complex values.
func ConstValue(n *Node) interface{} { func ConstValue(n Node) interface{} {
switch v := n.Val(); v.Kind() { switch v := n.Val(); v.Kind() {
default: default:
base.Fatalf("unexpected constant: %v", v) base.Fatalf("unexpected constant: %v", v)
@ -91,7 +91,7 @@ func ValidTypeForConst(t *types.Type, v constant.Value) bool {
} }
// nodlit returns a new untyped constant with value v. // nodlit returns a new untyped constant with value v.
func NewLiteral(v constant.Value) *Node { func NewLiteral(v constant.Value) Node {
n := Nod(OLITERAL, nil, nil) n := Nod(OLITERAL, nil, nil)
if k := v.Kind(); k != constant.Unknown { if k := v.Kind(); k != constant.Unknown {
n.SetType(idealType(k)) n.SetType(idealType(k))

View file

@ -289,7 +289,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym: case *obj.LSym:
wantreg = "SB" wantreg = "SB"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case *ir.Node: case ir.Node:
wantreg = "SP" wantreg = "SP"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case nil: case nil:

View file

@ -263,7 +263,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym: case *obj.LSym:
wantreg = "SB" wantreg = "SB"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case *ir.Node: case ir.Node:
wantreg = "SP" wantreg = "SP"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case nil: case nil:

View file

@ -752,7 +752,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p.To.Reg = v.Reg() p.To.Reg = v.Reg()
} }
case *obj.LSym, *ir.Node: case *obj.LSym, ir.Node:
p := s.Prog(ppc64.AMOVD) p := s.Prog(ppc64.AMOVD)
p.From.Type = obj.TYPE_ADDR p.From.Type = obj.TYPE_ADDR
p.From.Reg = v.Args[0].Reg() p.From.Reg = v.Args[0].Reg()

View file

@ -324,7 +324,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym: case *obj.LSym:
wantreg = "SB" wantreg = "SB"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case *ir.Node: case ir.Node:
wantreg = "SP" wantreg = "SP"
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case nil: case nil:

View file

@ -139,7 +139,7 @@ type Frontend interface {
// Auto returns a Node for an auto variable of the given type. // Auto returns a Node for an auto variable of the given type.
// The SSA compiler uses this function to allocate space for spills. // The SSA compiler uses this function to allocate space for spills.
Auto(src.XPos, *types.Type) *ir.Node Auto(src.XPos, *types.Type) ir.Node
// Given the name for a compound type, returns the name we should use // Given the name for a compound type, returns the name we should use
// for the parts of that compound type. // for the parts of that compound type.

View file

@ -137,9 +137,9 @@ func dse(f *Func) {
// reaches stores then we delete all the stores. The other operations will then // reaches stores then we delete all the stores. The other operations will then
// be eliminated by the dead code elimination pass. // be eliminated by the dead code elimination pass.
func elimDeadAutosGeneric(f *Func) { func elimDeadAutosGeneric(f *Func) {
addr := make(map[*Value]*ir.Node) // values that the address of the auto reaches addr := make(map[*Value]ir.Node) // values that the address of the auto reaches
elim := make(map[*Value]*ir.Node) // values that could be eliminated if the auto is elim := make(map[*Value]ir.Node) // values that could be eliminated if the auto is
used := make(map[*ir.Node]bool) // used autos that must be kept used := make(map[ir.Node]bool) // used autos that must be kept
// visit the value and report whether any of the maps are updated // visit the value and report whether any of the maps are updated
visit := func(v *Value) (changed bool) { visit := func(v *Value) (changed bool) {
@ -147,7 +147,7 @@ func elimDeadAutosGeneric(f *Func) {
switch v.Op { switch v.Op {
case OpAddr, OpLocalAddr: case OpAddr, OpLocalAddr:
// Propagate the address if it points to an auto. // Propagate the address if it points to an auto.
n, ok := v.Aux.(*ir.Node) n, ok := v.Aux.(ir.Node)
if !ok || n.Class() != ir.PAUTO { if !ok || n.Class() != ir.PAUTO {
return return
} }
@ -158,7 +158,7 @@ func elimDeadAutosGeneric(f *Func) {
return return
case OpVarDef, OpVarKill: case OpVarDef, OpVarKill:
// v should be eliminated if we eliminate the auto. // v should be eliminated if we eliminate the auto.
n, ok := v.Aux.(*ir.Node) n, ok := v.Aux.(ir.Node)
if !ok || n.Class() != ir.PAUTO { if !ok || n.Class() != ir.PAUTO {
return return
} }
@ -174,7 +174,7 @@ func elimDeadAutosGeneric(f *Func) {
// for open-coded defers from being removed (since they // for open-coded defers from being removed (since they
// may not be used by the inline code, but will be used by // may not be used by the inline code, but will be used by
// panic processing). // panic processing).
n, ok := v.Aux.(*ir.Node) n, ok := v.Aux.(ir.Node)
if !ok || n.Class() != ir.PAUTO { if !ok || n.Class() != ir.PAUTO {
return return
} }
@ -222,7 +222,7 @@ func elimDeadAutosGeneric(f *Func) {
} }
// Propagate any auto addresses through v. // Propagate any auto addresses through v.
var node *ir.Node var node ir.Node
for _, a := range args { for _, a := range args {
if n, ok := addr[a]; ok && !used[n] { if n, ok := addr[a]; ok && !used[n] {
if node == nil { if node == nil {
@ -299,11 +299,11 @@ func elimUnreadAutos(f *Func) {
// Loop over all ops that affect autos taking note of which // Loop over all ops that affect autos taking note of which
// autos we need and also stores that we might be able to // autos we need and also stores that we might be able to
// eliminate. // eliminate.
seen := make(map[*ir.Node]bool) seen := make(map[ir.Node]bool)
var stores []*Value var stores []*Value
for _, b := range f.Blocks { for _, b := range f.Blocks {
for _, v := range b.Values { for _, v := range b.Values {
n, ok := v.Aux.(*ir.Node) n, ok := v.Aux.(ir.Node)
if !ok { if !ok {
continue continue
} }
@ -335,7 +335,7 @@ func elimUnreadAutos(f *Func) {
// Eliminate stores to unread autos. // Eliminate stores to unread autos.
for _, store := range stores { for _, store := range stores {
n, _ := store.Aux.(*ir.Node) n, _ := store.Aux.(ir.Node)
if seen[n] { if seen[n] {
continue continue
} }

View file

@ -25,7 +25,7 @@ type FuncDebug struct {
// Slots is all the slots used in the debug info, indexed by their SlotID. // Slots is all the slots used in the debug info, indexed by their SlotID.
Slots []LocalSlot Slots []LocalSlot
// The user variables, indexed by VarID. // The user variables, indexed by VarID.
Vars []*ir.Node Vars []ir.Node
// The slots that make up each variable, indexed by VarID. // The slots that make up each variable, indexed by VarID.
VarSlots [][]SlotID VarSlots [][]SlotID
// The location list data, indexed by VarID. Must be processed by PutLocationList. // The location list data, indexed by VarID. Must be processed by PutLocationList.
@ -166,7 +166,7 @@ func (s *debugState) logf(msg string, args ...interface{}) {
type debugState struct { type debugState struct {
// See FuncDebug. // See FuncDebug.
slots []LocalSlot slots []LocalSlot
vars []*ir.Node vars []ir.Node
varSlots [][]SlotID varSlots [][]SlotID
lists [][]byte lists [][]byte
@ -190,7 +190,7 @@ type debugState struct {
// The pending location list entry for each user variable, indexed by VarID. // The pending location list entry for each user variable, indexed by VarID.
pendingEntries []pendingEntry pendingEntries []pendingEntry
varParts map[*ir.Node][]SlotID varParts map[ir.Node][]SlotID
blockDebug []BlockDebug blockDebug []BlockDebug
pendingSlotLocs []VarLoc pendingSlotLocs []VarLoc
liveSlots []liveSlot liveSlots []liveSlot
@ -347,7 +347,7 @@ func BuildFuncDebug(ctxt *obj.Link, f *Func, loggingEnabled bool, stackOffset fu
} }
if state.varParts == nil { if state.varParts == nil {
state.varParts = make(map[*ir.Node][]SlotID) state.varParts = make(map[ir.Node][]SlotID)
} else { } else {
for n := range state.varParts { for n := range state.varParts {
delete(state.varParts, n) delete(state.varParts, n)
@ -380,7 +380,7 @@ func BuildFuncDebug(ctxt *obj.Link, f *Func, loggingEnabled bool, stackOffset fu
for _, b := range f.Blocks { for _, b := range f.Blocks {
for _, v := range b.Values { for _, v := range b.Values {
if v.Op == OpVarDef || v.Op == OpVarKill { if v.Op == OpVarDef || v.Op == OpVarKill {
n := v.Aux.(*ir.Node) n := v.Aux.(ir.Node)
if ir.IsSynthetic(n) { if ir.IsSynthetic(n) {
continue continue
} }
@ -718,7 +718,7 @@ func (state *debugState) processValue(v *Value, vSlots []SlotID, vReg *Register)
switch { switch {
case v.Op == OpVarDef, v.Op == OpVarKill: case v.Op == OpVarDef, v.Op == OpVarKill:
n := v.Aux.(*ir.Node) n := v.Aux.(ir.Node)
if ir.IsSynthetic(n) { if ir.IsSynthetic(n) {
break break
} }

View file

@ -69,7 +69,7 @@ type TestFrontend struct {
func (TestFrontend) StringData(s string) *obj.LSym { func (TestFrontend) StringData(s string) *obj.LSym {
return nil return nil
} }
func (TestFrontend) Auto(pos src.XPos, t *types.Type) *ir.Node { func (TestFrontend) Auto(pos src.XPos, t *types.Type) ir.Node {
n := ir.NewNameAt(pos, &types.Sym{Name: "aFakeAuto"}) n := ir.NewNameAt(pos, &types.Sym{Name: "aFakeAuto"})
n.SetClass(ir.PAUTO) n.SetClass(ir.PAUTO)
return n return n

View file

@ -60,7 +60,7 @@ func (r *Register) GCNum() int16 {
// { N: len, Type: int, Off: 0, SplitOf: parent, SplitOffset: 8} // { N: len, Type: int, Off: 0, SplitOf: parent, SplitOffset: 8}
// parent = &{N: s, Type: string} // parent = &{N: s, Type: string}
type LocalSlot struct { type LocalSlot struct {
N *ir.Node // an ONAME *gc.Node representing a stack location. N ir.Node // an ONAME *gc.Node representing a stack location.
Type *types.Type // type of slot Type *types.Type // type of slot
Off int64 // offset of slot in N Off int64 // offset of slot in N

View file

@ -236,7 +236,7 @@ func nilcheckelim2(f *Func) {
continue continue
} }
if v.Type.IsMemory() || v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() { if v.Type.IsMemory() || v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
if v.Op == OpVarKill || v.Op == OpVarLive || (v.Op == OpVarDef && !v.Aux.(*ir.Node).Type().HasPointers()) { if v.Op == OpVarKill || v.Op == OpVarLive || (v.Op == OpVarDef && !v.Aux.(ir.Node).Type().HasPointers()) {
// These ops don't really change memory. // These ops don't really change memory.
continue continue
// Note: OpVarDef requires that the defined variable not have pointers. // Note: OpVarDef requires that the defined variable not have pointers.

View file

@ -1249,7 +1249,7 @@ func (s *regAllocState) regalloc(f *Func) {
// This forces later liveness analysis to make the // This forces later liveness analysis to make the
// value live at this point. // value live at this point.
v.SetArg(0, s.makeSpill(a, b)) v.SetArg(0, s.makeSpill(a, b))
} else if _, ok := a.Aux.(*ir.Node); ok && vi.rematerializeable { } else if _, ok := a.Aux.(ir.Node); ok && vi.rematerializeable {
// Rematerializeable value with a gc.Node. This is the address of // Rematerializeable value with a gc.Node. This is the address of
// a stack object (e.g. an LEAQ). Keep the object live. // a stack object (e.g. an LEAQ). Keep the object live.
// Change it to VarLive, which is what plive expects for locals. // Change it to VarLive, which is what plive expects for locals.

View file

@ -22,7 +22,7 @@ func TestSizeof(t *testing.T) {
}{ }{
{Value{}, 72, 112}, {Value{}, 72, 112},
{Block{}, 164, 304}, {Block{}, 164, 304},
{LocalSlot{}, 28, 40}, {LocalSlot{}, 32, 48},
{valState{}, 28, 40}, {valState{}, 28, 40},
} }

View file

@ -157,7 +157,7 @@ func (s *stackAllocState) stackalloc() {
if v.Aux == nil { if v.Aux == nil {
f.Fatalf("%s has nil Aux\n", v.LongString()) f.Fatalf("%s has nil Aux\n", v.LongString())
} }
loc := LocalSlot{N: v.Aux.(*ir.Node), Type: v.Type, Off: v.AuxInt} loc := LocalSlot{N: v.Aux.(ir.Node), Type: v.Type, Off: v.AuxInt}
if f.pass.debug > stackDebug { if f.pass.debug > stackDebug {
fmt.Printf("stackalloc %s to %s\n", v, loc) fmt.Printf("stackalloc %s to %s\n", v, loc)
} }

View file

@ -237,7 +237,7 @@ func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value, extend bool) {
switch v.Aux.(type) { switch v.Aux.(type) {
case *obj.LSym: case *obj.LSym:
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
case *ir.Node: case ir.Node:
p.From.Reg = v.Args[0].Reg() p.From.Reg = v.Args[0].Reg()
gc.AddAux(&p.From, v) gc.AddAux(&p.From, v)
default: default: