[dev.regabi] cmd/compile: remove prealloc map

The prealloc map seems to exist to avoid adding a field to all nodes.
Now we can add a field to just the nodes that need the field,
so let's do that and avoid having a magic global with extra node state
that isn't preserved by operations like Copy nor printed by Dump.

This also makes clear which nodes can be prealloc'ed.
In particular, the code in walkstmt looked up an entry in
prealloc using an ONAME node, but there's no code that
ever stores such an entry, so the lookup never succeeded.
Having fields makes that kind of thing easier to see and fix.

Passes buildall w/ toolstash -cmp.

Change-Id: I418ad0e2847615c08868120c13ee719dc0b2eacb
Reviewed-on: https://go-review.googlesource.com/c/go/+/278915
Trust: Russ Cox <rsc@golang.org>
Run-TryBot: Russ Cox <rsc@golang.org>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Russ Cox 2020-12-17 08:49:22 -05:00
parent ffb0cb7044
commit c45313bf45
7 changed files with 33 additions and 34 deletions

View file

@ -378,7 +378,7 @@ func closureType(clo ir.Node) *types.Type {
return typ return typ
} }
func walkclosure(clo ir.Node, init *ir.Nodes) ir.Node { func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
fn := clo.Func() fn := clo.Func()
// If no closure vars, don't bother wrapping. // If no closure vars, don't bother wrapping.
@ -403,12 +403,12 @@ func walkclosure(clo ir.Node, init *ir.Nodes) ir.Node {
cfn := convnop(addr, clo.Type()) cfn := convnop(addr, clo.Type())
// non-escaping temp to use, if any. // non-escaping temp to use, if any.
if x := prealloc[clo]; x != nil { if x := clo.Prealloc; x != nil {
if !types.Identical(typ, x.Type()) { if !types.Identical(typ, x.Type()) {
panic("closure type does not match order's assigned type") panic("closure type does not match order's assigned type")
} }
addr.SetRight(x) addr.SetRight(x)
delete(prealloc, clo) clo.Prealloc = nil
} }
return walkexpr(cfn, init) return walkexpr(cfn, init)
@ -552,12 +552,12 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
cfn := convnop(addr, n.Type()) cfn := convnop(addr, n.Type())
// non-escaping temp to use, if any. // non-escaping temp to use, if any.
if x := prealloc[n]; x != nil { if x := n.Prealloc; x != nil {
if !types.Identical(typ, x.Type()) { if !types.Identical(typ, x.Type()) {
panic("partial call type does not match order's assigned type") panic("partial call type does not match order's assigned type")
} }
addr.SetRight(x) addr.SetRight(x)
delete(prealloc, n) n.Prealloc = nil
} }
return walkexpr(cfn, init) return walkexpr(cfn, init)

View file

@ -846,9 +846,9 @@ func (o *Order) stmt(n ir.Node) {
r := n.Right() r := n.Right()
n.SetRight(o.copyExpr(r)) n.SetRight(o.copyExpr(r))
// prealloc[n] is the temp for the iterator. // n.Prealloc is the temp for the iterator.
// hiter contains pointers and needs to be zeroed. // hiter contains pointers and needs to be zeroed.
prealloc[n] = o.newTemp(hiter(n.Type()), true) n.Prealloc = o.newTemp(hiter(n.Type()), true)
} }
o.exprListInPlace(n.List()) o.exprListInPlace(n.List())
if orderBody { if orderBody {
@ -1040,9 +1040,6 @@ func (o *Order) exprListInPlace(l ir.Nodes) {
} }
} }
// prealloc[x] records the allocation to use for x.
var prealloc = map[ir.Node]ir.Node{}
func (o *Order) exprNoLHS(n ir.Node) ir.Node { func (o *Order) exprNoLHS(n ir.Node) ir.Node {
return o.expr(n, nil) return o.expr(n, nil)
} }
@ -1079,11 +1076,12 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Allocate a temporary to hold the strings. // Allocate a temporary to hold the strings.
// Fewer than 5 strings use direct runtime helpers. // Fewer than 5 strings use direct runtime helpers.
case ir.OADDSTR: case ir.OADDSTR:
n := n.(*ir.AddStringExpr)
o.exprList(n.List()) o.exprList(n.List())
if n.List().Len() > 5 { if n.List().Len() > 5 {
t := types.NewArray(types.Types[types.TSTRING], int64(n.List().Len())) t := types.NewArray(types.Types[types.TSTRING], int64(n.List().Len()))
prealloc[n] = o.newTemp(t, false) n.Prealloc = o.newTemp(t, false)
} }
// Mark string(byteSlice) arguments to reuse byteSlice backing // Mark string(byteSlice) arguments to reuse byteSlice backing
@ -1268,7 +1266,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
case ir.OCLOSURE: case ir.OCLOSURE:
n := n.(*ir.ClosureExpr) n := n.(*ir.ClosureExpr)
if n.Transient() && len(n.Func().ClosureVars) > 0 { if n.Transient() && len(n.Func().ClosureVars) > 0 {
prealloc[n] = o.newTemp(closureType(n), false) n.Prealloc = o.newTemp(closureType(n), false)
} }
return n return n
@ -1277,15 +1275,16 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
n.SetLeft(o.expr(n.Left(), nil)) n.SetLeft(o.expr(n.Left(), nil))
if n.Transient() { if n.Transient() {
t := partialCallType(n) t := partialCallType(n)
prealloc[n] = o.newTemp(t, false) n.Prealloc = o.newTemp(t, false)
} }
return n return n
case ir.OSLICELIT: case ir.OSLICELIT:
n := n.(*ir.CompLitExpr)
o.exprList(n.List()) o.exprList(n.List())
if n.Transient() { if n.Transient() {
t := types.NewArray(n.Type().Elem(), ir.Int64Val(n.Right())) t := types.NewArray(n.Type().Elem(), ir.Int64Val(n.Right()))
prealloc[n] = o.newTemp(t, false) n.Prealloc = o.newTemp(t, false)
} }
return n return n

View file

@ -296,7 +296,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// we only use a once, so no copy needed. // we only use a once, so no copy needed.
ha := a ha := a
hit := prealloc[nrange] hit := nrange.Prealloc
th := hit.Type() th := hit.Type()
keysym := th.Field(0).Sym // depends on layout of iterator struct. See reflect.go:hiter keysym := th.Field(0).Sym // depends on layout of iterator struct. See reflect.go:hiter
elemsym := th.Field(1).Sym // ditto elemsym := th.Field(1).Sym // ditto

View file

@ -668,7 +668,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// set auto to point at new temp or heap (3 assign) // set auto to point at new temp or heap (3 assign)
var a ir.Node var a ir.Node
if x := prealloc[n]; x != nil { if x := n.Prealloc; x != nil {
// temp allocated during order.go for dddarg // temp allocated during order.go for dddarg
if !types.Identical(t, x.Type()) { if !types.Identical(t, x.Type()) {
panic("dotdotdot base type does not match order's assigned type") panic("dotdotdot base type does not match order's assigned type")

View file

@ -202,10 +202,7 @@ func walkstmt(n ir.Node) ir.Node {
if base.Flag.CompilingRuntime { if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", v) base.Errorf("%v escapes to heap, not allowed in runtime", v)
} }
if prealloc[v] == nil { nn := ir.Nod(ir.OAS, v.Name().Heapaddr, callnew(v.Type()))
prealloc[v] = callnew(v.Type())
}
nn := ir.Nod(ir.OAS, v.Name().Heapaddr, prealloc[v])
nn.SetColas(true) nn.SetColas(true)
return walkstmt(typecheck(nn, ctxStmt)) return walkstmt(typecheck(nn, ctxStmt))
} }
@ -1638,7 +1635,7 @@ func walkexpr1(n ir.Node, init *ir.Nodes) ir.Node {
return mkcall1(chanfn("chansend1", 2, n.Left().Type()), nil, init, n.Left(), n1) return mkcall1(chanfn("chansend1", 2, n.Left().Type()), nil, init, n.Left(), n1)
case ir.OCLOSURE: case ir.OCLOSURE:
return walkclosure(n, init) return walkclosure(n.(*ir.ClosureExpr), init)
case ir.OCALLPART: case ir.OCALLPART:
return walkpartialcall(n.(*ir.CallPartExpr), init) return walkpartialcall(n.(*ir.CallPartExpr), init)
@ -2713,11 +2710,9 @@ func addstr(n *ir.AddStringExpr, init *ir.Nodes) ir.Node {
fn = "concatstrings" fn = "concatstrings"
t := types.NewSlice(types.Types[types.TSTRING]) t := types.NewSlice(types.Types[types.TSTRING])
slice := ir.Nod(ir.OCOMPLIT, nil, ir.TypeNode(t)) // args[1:] to skip buf arg
if prealloc[n] != nil { slice := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(t), args[1:])
prealloc[slice] = prealloc[n] slice.Prealloc = n.Prealloc
}
slice.PtrList().Set(args[1:]) // skip buf arg
args = []ir.Node{buf, slice} args = []ir.Node{buf, slice}
slice.SetEsc(EscNone) slice.SetEsc(EscNone)
} }

View file

@ -90,6 +90,7 @@ func toNtype(x Node) Ntype {
type AddStringExpr struct { type AddStringExpr struct {
miniExpr miniExpr
List_ Nodes List_ Nodes
Prealloc *Name
} }
func NewAddStringExpr(pos src.XPos, list []Node) *AddStringExpr { func NewAddStringExpr(pos src.XPos, list []Node) *AddStringExpr {
@ -236,6 +237,7 @@ type CallPartExpr struct {
Func_ *Func Func_ *Func
X Node X Node
Method *types.Field Method *types.Field
Prealloc *Name
} }
func NewCallPartExpr(pos src.XPos, x Node, method *types.Field, fn *Func) *CallPartExpr { func NewCallPartExpr(pos src.XPos, x Node, method *types.Field, fn *Func) *CallPartExpr {
@ -256,6 +258,7 @@ func (n *CallPartExpr) SetLeft(x Node) { n.X = x }
type ClosureExpr struct { type ClosureExpr struct {
miniExpr miniExpr
Func_ *Func Func_ *Func
Prealloc *Name
} }
func NewClosureExpr(pos src.XPos, fn *Func) *ClosureExpr { func NewClosureExpr(pos src.XPos, fn *Func) *ClosureExpr {
@ -290,6 +293,7 @@ type CompLitExpr struct {
orig Node orig Node
Ntype Ntype Ntype Ntype
List_ Nodes // initialized values List_ Nodes // initialized values
Prealloc *Name
} }
func NewCompLitExpr(pos src.XPos, op Op, typ Ntype, list []Node) *CompLitExpr { func NewCompLitExpr(pos src.XPos, op Op, typ Ntype, list []Node) *CompLitExpr {

View file

@ -368,6 +368,7 @@ type RangeStmt struct {
Body_ Nodes Body_ Nodes
HasBreak_ bool HasBreak_ bool
typ *types.Type // TODO(rsc): Remove - use X.Type() instead typ *types.Type // TODO(rsc): Remove - use X.Type() instead
Prealloc *Name
} }
func NewRangeStmt(pos src.XPos, vars []Node, x Node, body []Node) *RangeStmt { func NewRangeStmt(pos src.XPos, vars []Node, x Node, body []Node) *RangeStmt {