[dev.regabi] cmd/compile: use Node getters and setters [generated]

Now that we have all the getters and setters defined, use them
and unexport all the actual Node fields. This is the next step
toward replacing Node with an interface.

[git-generate]
cd src/cmd/compile/internal/gc
rf '
        ex . ../ir ../ssa {
                import "cmd/compile/internal/ir"
                import "cmd/compile/internal/types"
                import "cmd/internal/src"
                var n, x *ir.Node
                var op ir.Op
                var t *types.Type
                var f *ir.Func
                var m *ir.Name
                var s *types.Sym
                var p src.XPos
                var i int64
                var e uint16
                var nodes ir.Nodes

                n.Op = op    -> n.SetOp(op)
                n.Left = x   -> n.SetLeft(x)
                n.Right = x  -> n.SetRight(x)
                n.Orig = x -> n.SetOrig(x)
                n.Type = t -> n.SetType(t)
                n.Func = f -> n.SetFunc(f)
                n.Name = m -> n.SetName(m)
                n.Sym = s -> n.SetSym(s)
                n.Pos = p -> n.SetPos(p)
                n.Xoffset = i -> n.SetXoffset(i)
                n.Esc = e -> n.SetEsc(e)

                n.Ninit.Append -> n.PtrNinit().Append
                n.Ninit.AppendNodes -> n.PtrNinit().AppendNodes
                n.Ninit.MoveNodes -> n.PtrNinit().MoveNodes
                n.Ninit.Prepend -> n.PtrNinit().Prepend
                n.Ninit.Set -> n.PtrNinit().Set
                n.Ninit.Set1 -> n.PtrNinit().Set1
                n.Ninit.Set2 -> n.PtrNinit().Set2
                n.Ninit.Set3 -> n.PtrNinit().Set3
                &n.Ninit -> n.PtrNinit()
                n.Ninit = nodes -> n.SetNinit(nodes)

                n.Nbody.Append -> n.PtrNbody().Append
                n.Nbody.AppendNodes -> n.PtrNbody().AppendNodes
                n.Nbody.MoveNodes -> n.PtrNbody().MoveNodes
                n.Nbody.Prepend -> n.PtrNbody().Prepend
                n.Nbody.Set -> n.PtrNbody().Set
                n.Nbody.Set1 -> n.PtrNbody().Set1
                n.Nbody.Set2 -> n.PtrNbody().Set2
                n.Nbody.Set3 -> n.PtrNbody().Set3
                &n.Nbody -> n.PtrNbody()
                n.Nbody = nodes -> n.SetNbody(nodes)

                n.List.Append -> n.PtrList().Append
                n.List.AppendNodes -> n.PtrList().AppendNodes
                n.List.MoveNodes -> n.PtrList().MoveNodes
                n.List.Prepend -> n.PtrList().Prepend
                n.List.Set -> n.PtrList().Set
                n.List.Set1 -> n.PtrList().Set1
                n.List.Set2 -> n.PtrList().Set2
                n.List.Set3 -> n.PtrList().Set3
                &n.List -> n.PtrList()
                n.List = nodes -> n.SetList(nodes)

                n.Rlist.Append -> n.PtrRlist().Append
                n.Rlist.AppendNodes -> n.PtrRlist().AppendNodes
                n.Rlist.MoveNodes -> n.PtrRlist().MoveNodes
                n.Rlist.Prepend -> n.PtrRlist().Prepend
                n.Rlist.Set -> n.PtrRlist().Set
                n.Rlist.Set1 -> n.PtrRlist().Set1
                n.Rlist.Set2 -> n.PtrRlist().Set2
                n.Rlist.Set3 -> n.PtrRlist().Set3
                &n.Rlist -> n.PtrRlist()
                n.Rlist = nodes -> n.SetRlist(nodes)
        }
        ex . ../ir ../ssa {
                import "cmd/compile/internal/ir"

                var n *ir.Node
                n.Op         -> n.GetOp()
                n.Left       -> n.GetLeft()
                n.Right      -> n.GetRight()
                n.Orig -> n.GetOrig()
                n.Type -> n.GetType()
                n.Func -> n.GetFunc()
                n.Name -> n.GetName()
                n.Sym -> n.GetSym()
                n.Pos -> n.GetPos()
                n.Xoffset -> n.GetXoffset()
                n.Esc -> n.GetEsc()

                avoid (*ir.Node).PtrNinit
                avoid (*ir.Node).PtrNbody
                avoid (*ir.Node).PtrList
                avoid (*ir.Node).PtrRlist

                n.Ninit -> n.GetNinit()
                n.Nbody -> n.GetNbody()
                n.List -> n.GetList()
                n.Rlist -> n.GetRlist()
        }
'

cd ../ir
rf '
        mv Node.Op Node.op
        mv Node.GetOp Node.Op

        mv Node.Left Node.left
        mv Node.GetLeft Node.Left

        mv Node.Right Node.right
        mv Node.GetRight Node.Right

        mv Node.Orig Node.orig
        mv Node.GetOrig Node.Orig

        mv Node.Type Node.typ
        mv Node.GetType Node.Type

        mv Node.Func Node.fn
        mv Node.GetFunc Node.Func

        mv Node.Name Node.name
        mv Node.GetName Node.Name

        # All uses are in other Node methods already.
        mv Node.E Node.e

        mv Node.Sym Node.sym
        mv Node.GetSym Node.Sym

        mv Node.Pos Node.pos
        mv Node.GetPos Node.Pos

        mv Node.Esc Node.esc
        mv Node.GetEsc Node.Esc

	# While we are here, rename Xoffset to more idiomatic Offset.
        mv Node.Xoffset Node.offset
        mv Node.GetXoffset Node.Offset
	mv Node.SetXoffset Node.SetOffset

        # While we are here, rename Ninit, Nbody to more idiomatic Init, Body.
        mv Node.Ninit Node.init
        mv Node.GetNinit Node.Init
        mv Node.PtrNinit Node.PtrInit
        mv Node.SetNinit Node.SetInit
        mv Node.Nbody Node.body
        mv Node.GetNbody Node.Body
        mv Node.PtrNbody Node.PtrBody
        mv Node.SetNbody Node.SetBody
        mv Node.List Node.list
        mv Node.GetList Node.List
        mv Node.Rlist Node.rlist
        mv Node.GetRlist Node.Rlist

        # Unexport these
        mv Node.SetHasOpt Node.setHasOpt
        mv Node.SetHasVal Node.setHasVal
'

Change-Id: I9894f633375c5237a29b6d6d7b89ba181b56ca3a
Reviewed-on: https://go-review.googlesource.com/c/go/+/273009
Trust: Russ Cox <rsc@golang.org>
Run-TryBot: Russ Cox <rsc@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Russ Cox 2020-11-22 09:59:15 -05:00
parent 41ab6689ed
commit acb4d1cef1
44 changed files with 5188 additions and 5186 deletions

View file

@ -293,15 +293,15 @@ func genhash(t *types.Type) *obj.LSym {
// func sym(p *T, h uintptr) uintptr
tfn := ir.Nod(ir.OTFUNC, nil, nil)
tfn.List.Set2(
tfn.PtrList().Set2(
namedfield("p", types.NewPtr(t)),
namedfield("h", types.Types[types.TUINTPTR]),
)
tfn.Rlist.Set1(anonfield(types.Types[types.TUINTPTR]))
tfn.PtrRlist().Set1(anonfield(types.Types[types.TUINTPTR]))
fn := dclfunc(sym, tfn)
np := ir.AsNode(tfn.Type.Params().Field(0).Nname)
nh := ir.AsNode(tfn.Type.Params().Field(1).Nname)
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
nh := ir.AsNode(tfn.Type().Params().Field(1).Nname)
switch t.Etype {
case types.TARRAY:
@ -312,11 +312,11 @@ func genhash(t *types.Type) *obj.LSym {
n := ir.Nod(ir.ORANGE, nil, ir.Nod(ir.ODEREF, np, nil))
ni := NewName(lookup("i"))
ni.Type = types.Types[types.TINT]
n.List.Set1(ni)
ni.SetType(types.Types[types.TINT])
n.PtrList().Set1(ni)
n.SetColas(true)
colasdefn(n.List.Slice(), n)
ni = n.List.First()
colasdefn(n.List().Slice(), n)
ni = n.List().First()
// h = hashel(&p[i], h)
call := ir.Nod(ir.OCALL, hashel, nil)
@ -324,11 +324,11 @@ func genhash(t *types.Type) *obj.LSym {
nx := ir.Nod(ir.OINDEX, np, ni)
nx.SetBounded(true)
na := ir.Nod(ir.OADDR, nx, nil)
call.List.Append(na)
call.List.Append(nh)
n.Nbody.Append(ir.Nod(ir.OAS, nh, call))
call.PtrList().Append(na)
call.PtrList().Append(nh)
n.PtrBody().Append(ir.Nod(ir.OAS, nh, call))
fn.Nbody.Append(n)
fn.PtrBody().Append(n)
case types.TSTRUCT:
// Walk the struct using memhash for runs of AMEM
@ -348,9 +348,9 @@ func genhash(t *types.Type) *obj.LSym {
call := ir.Nod(ir.OCALL, hashel, nil)
nx := nodSym(ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := ir.Nod(ir.OADDR, nx, nil)
call.List.Append(na)
call.List.Append(nh)
fn.Nbody.Append(ir.Nod(ir.OAS, nh, call))
call.PtrList().Append(na)
call.PtrList().Append(nh)
fn.PtrBody().Append(ir.Nod(ir.OAS, nh, call))
i++
continue
}
@ -363,37 +363,37 @@ func genhash(t *types.Type) *obj.LSym {
call := ir.Nod(ir.OCALL, hashel, nil)
nx := nodSym(ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := ir.Nod(ir.OADDR, nx, nil)
call.List.Append(na)
call.List.Append(nh)
call.List.Append(nodintconst(size))
fn.Nbody.Append(ir.Nod(ir.OAS, nh, call))
call.PtrList().Append(na)
call.PtrList().Append(nh)
call.PtrList().Append(nodintconst(size))
fn.PtrBody().Append(ir.Nod(ir.OAS, nh, call))
i = next
}
}
r := ir.Nod(ir.ORETURN, nil, nil)
r.List.Append(nh)
fn.Nbody.Append(r)
r.PtrList().Append(nh)
fn.PtrBody().Append(r)
if base.Flag.LowerR != 0 {
ir.DumpList("genhash body", fn.Nbody)
ir.DumpList("genhash body", fn.Body())
}
funcbody()
fn.Func.SetDupok(true)
fn.Func().SetDupok(true)
fn = typecheck(fn, ctxStmt)
Curfn = fn
typecheckslice(fn.Nbody.Slice(), ctxStmt)
typecheckslice(fn.Body().Slice(), ctxStmt)
Curfn = nil
if base.Debug.DclStack != 0 {
testdclstack()
}
fn.Func.SetNilCheckDisabled(true)
fn.Func().SetNilCheckDisabled(true)
xtop = append(xtop, fn)
// Build closure. It doesn't close over any variables, so
@ -432,12 +432,12 @@ func hashfor(t *types.Type) *ir.Node {
n := NewName(sym)
setNodeNameFunc(n)
n.Type = functype(nil, []*ir.Node{
n.SetType(functype(nil, []*ir.Node{
anonfield(types.NewPtr(t)),
anonfield(types.Types[types.TUINTPTR]),
}, []*ir.Node{
anonfield(types.Types[types.TUINTPTR]),
})
}))
return n
}
@ -522,16 +522,16 @@ func geneq(t *types.Type) *obj.LSym {
// func sym(p, q *T) bool
tfn := ir.Nod(ir.OTFUNC, nil, nil)
tfn.List.Set2(
tfn.PtrList().Set2(
namedfield("p", types.NewPtr(t)),
namedfield("q", types.NewPtr(t)),
)
tfn.Rlist.Set1(namedfield("r", types.Types[types.TBOOL]))
tfn.PtrRlist().Set1(namedfield("r", types.Types[types.TBOOL]))
fn := dclfunc(sym, tfn)
np := ir.AsNode(tfn.Type.Params().Field(0).Nname)
nq := ir.AsNode(tfn.Type.Params().Field(1).Nname)
nr := ir.AsNode(tfn.Type.Results().Field(0).Nname)
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
nq := ir.AsNode(tfn.Type().Params().Field(1).Nname)
nr := ir.AsNode(tfn.Type().Results().Field(0).Nname)
// Label to jump to if an equality test fails.
neq := autolabel(".neq")
@ -573,11 +573,11 @@ func geneq(t *types.Type) *obj.LSym {
// pi := p[i]
pi := ir.Nod(ir.OINDEX, np, i)
pi.SetBounded(true)
pi.Type = t.Elem()
pi.SetType(t.Elem())
// qi := q[i]
qi := ir.Nod(ir.OINDEX, nq, i)
qi.SetBounded(true)
qi.Type = t.Elem()
qi.SetType(t.Elem())
return eq(pi, qi)
}
@ -590,11 +590,11 @@ func geneq(t *types.Type) *obj.LSym {
for i := int64(0); i < nelem; i++ {
// if check {} else { goto neq }
nif := ir.Nod(ir.OIF, checkIdx(nodintconst(i)), nil)
nif.Rlist.Append(nodSym(ir.OGOTO, nil, neq))
fn.Nbody.Append(nif)
nif.PtrRlist().Append(nodSym(ir.OGOTO, nil, neq))
fn.PtrBody().Append(nif)
}
if last {
fn.Nbody.Append(ir.Nod(ir.OAS, nr, checkIdx(nodintconst(nelem))))
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, checkIdx(nodintconst(nelem))))
}
} else {
// Generate a for loop.
@ -604,14 +604,14 @@ func geneq(t *types.Type) *obj.LSym {
cond := ir.Nod(ir.OLT, i, nodintconst(nelem))
post := ir.Nod(ir.OAS, i, ir.Nod(ir.OADD, i, nodintconst(1)))
loop := ir.Nod(ir.OFOR, cond, post)
loop.Ninit.Append(init)
loop.PtrInit().Append(init)
// if eq(pi, qi) {} else { goto neq }
nif := ir.Nod(ir.OIF, checkIdx(i), nil)
nif.Rlist.Append(nodSym(ir.OGOTO, nil, neq))
loop.Nbody.Append(nif)
fn.Nbody.Append(loop)
nif.PtrRlist().Append(nodSym(ir.OGOTO, nil, neq))
loop.PtrBody().Append(nif)
fn.PtrBody().Append(loop)
if last {
fn.Nbody.Append(ir.Nod(ir.OAS, nr, nodbool(true)))
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, nodbool(true)))
}
}
}
@ -712,7 +712,7 @@ func geneq(t *types.Type) *obj.LSym {
var flatConds []*ir.Node
for _, c := range conds {
isCall := func(n *ir.Node) bool {
return n.Op == ir.OCALL || n.Op == ir.OCALLFUNC
return n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC
}
sort.SliceStable(c, func(i, j int) bool {
return !isCall(c[i]) && isCall(c[j])
@ -721,51 +721,51 @@ func geneq(t *types.Type) *obj.LSym {
}
if len(flatConds) == 0 {
fn.Nbody.Append(ir.Nod(ir.OAS, nr, nodbool(true)))
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, nodbool(true)))
} else {
for _, c := range flatConds[:len(flatConds)-1] {
// if cond {} else { goto neq }
n := ir.Nod(ir.OIF, c, nil)
n.Rlist.Append(nodSym(ir.OGOTO, nil, neq))
fn.Nbody.Append(n)
n.PtrRlist().Append(nodSym(ir.OGOTO, nil, neq))
fn.PtrBody().Append(n)
}
fn.Nbody.Append(ir.Nod(ir.OAS, nr, flatConds[len(flatConds)-1]))
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, flatConds[len(flatConds)-1]))
}
}
// ret:
// return
ret := autolabel(".ret")
fn.Nbody.Append(nodSym(ir.OLABEL, nil, ret))
fn.Nbody.Append(ir.Nod(ir.ORETURN, nil, nil))
fn.PtrBody().Append(nodSym(ir.OLABEL, nil, ret))
fn.PtrBody().Append(ir.Nod(ir.ORETURN, nil, nil))
// neq:
// r = false
// return (or goto ret)
fn.Nbody.Append(nodSym(ir.OLABEL, nil, neq))
fn.Nbody.Append(ir.Nod(ir.OAS, nr, nodbool(false)))
fn.PtrBody().Append(nodSym(ir.OLABEL, nil, neq))
fn.PtrBody().Append(ir.Nod(ir.OAS, nr, nodbool(false)))
if EqCanPanic(t) || hasCall(fn) {
// Epilogue is large, so share it with the equal case.
fn.Nbody.Append(nodSym(ir.OGOTO, nil, ret))
fn.PtrBody().Append(nodSym(ir.OGOTO, nil, ret))
} else {
// Epilogue is small, so don't bother sharing.
fn.Nbody.Append(ir.Nod(ir.ORETURN, nil, nil))
fn.PtrBody().Append(ir.Nod(ir.ORETURN, nil, nil))
}
// TODO(khr): the epilogue size detection condition above isn't perfect.
// We should really do a generic CL that shares epilogues across
// the board. See #24936.
if base.Flag.LowerR != 0 {
ir.DumpList("geneq body", fn.Nbody)
ir.DumpList("geneq body", fn.Body())
}
funcbody()
fn.Func.SetDupok(true)
fn.Func().SetDupok(true)
fn = typecheck(fn, ctxStmt)
Curfn = fn
typecheckslice(fn.Nbody.Slice(), ctxStmt)
typecheckslice(fn.Body().Slice(), ctxStmt)
Curfn = nil
if base.Debug.DclStack != 0 {
@ -776,7 +776,7 @@ func geneq(t *types.Type) *obj.LSym {
// We are comparing a struct or an array,
// neither of which can be nil, and our comparisons
// are shallow.
fn.Func.SetNilCheckDisabled(true)
fn.Func().SetNilCheckDisabled(true)
xtop = append(xtop, fn)
// Generate a closure which points at the function we just generated.
@ -786,31 +786,31 @@ func geneq(t *types.Type) *obj.LSym {
}
func hasCall(n *ir.Node) bool {
if n.Op == ir.OCALL || n.Op == ir.OCALLFUNC {
if n.Op() == ir.OCALL || n.Op() == ir.OCALLFUNC {
return true
}
if n.Left != nil && hasCall(n.Left) {
if n.Left() != nil && hasCall(n.Left()) {
return true
}
if n.Right != nil && hasCall(n.Right) {
if n.Right() != nil && hasCall(n.Right()) {
return true
}
for _, x := range n.Ninit.Slice() {
for _, x := range n.Init().Slice() {
if hasCall(x) {
return true
}
}
for _, x := range n.Nbody.Slice() {
for _, x := range n.Body().Slice() {
if hasCall(x) {
return true
}
}
for _, x := range n.List.Slice() {
for _, x := range n.List().Slice() {
if hasCall(x) {
return true
}
}
for _, x := range n.Rlist.Slice() {
for _, x := range n.Rlist().Slice() {
if hasCall(x) {
return true
}
@ -844,12 +844,12 @@ func eqstring(s, t *ir.Node) (eqlen, eqmem *ir.Node) {
fn := syslook("memequal")
fn = substArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
call := ir.Nod(ir.OCALL, fn, nil)
call.List.Append(sptr, tptr, ir.Copy(slen))
call.PtrList().Append(sptr, tptr, ir.Copy(slen))
call = typecheck(call, ctxExpr|ctxMultiOK)
cmp := ir.Nod(ir.OEQ, slen, tlen)
cmp = typecheck(cmp, ctxExpr)
cmp.Type = types.Types[types.TBOOL]
cmp.SetType(types.Types[types.TBOOL])
return cmp, call
}
@ -860,13 +860,13 @@ func eqstring(s, t *ir.Node) (eqlen, eqmem *ir.Node) {
// which can be used to construct interface equality comparison.
// eqtab must be evaluated before eqdata, and shortcircuiting is required.
func eqinterface(s, t *ir.Node) (eqtab, eqdata *ir.Node) {
if !types.Identical(s.Type, t.Type) {
base.Fatalf("eqinterface %v %v", s.Type, t.Type)
if !types.Identical(s.Type(), t.Type()) {
base.Fatalf("eqinterface %v %v", s.Type(), t.Type())
}
// func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
var fn *ir.Node
if s.Type.IsEmptyInterface() {
if s.Type().IsEmptyInterface() {
fn = syslook("efaceeq")
} else {
fn = syslook("ifaceeq")
@ -876,18 +876,18 @@ func eqinterface(s, t *ir.Node) (eqtab, eqdata *ir.Node) {
ttab := ir.Nod(ir.OITAB, t, nil)
sdata := ir.Nod(ir.OIDATA, s, nil)
tdata := ir.Nod(ir.OIDATA, t, nil)
sdata.Type = types.Types[types.TUNSAFEPTR]
tdata.Type = types.Types[types.TUNSAFEPTR]
sdata.SetType(types.Types[types.TUNSAFEPTR])
tdata.SetType(types.Types[types.TUNSAFEPTR])
sdata.SetTypecheck(1)
tdata.SetTypecheck(1)
call := ir.Nod(ir.OCALL, fn, nil)
call.List.Append(stab, sdata, tdata)
call.PtrList().Append(stab, sdata, tdata)
call = typecheck(call, ctxExpr|ctxMultiOK)
cmp := ir.Nod(ir.OEQ, stab, ttab)
cmp = typecheck(cmp, ctxExpr)
cmp.Type = types.Types[types.TBOOL]
cmp.SetType(types.Types[types.TBOOL])
return cmp, call
}
@ -899,12 +899,12 @@ func eqmem(p *ir.Node, q *ir.Node, field *types.Sym, size int64) *ir.Node {
nx = typecheck(nx, ctxExpr)
ny = typecheck(ny, ctxExpr)
fn, needsize := eqmemfunc(size, nx.Type.Elem())
fn, needsize := eqmemfunc(size, nx.Type().Elem())
call := ir.Nod(ir.OCALL, fn, nil)
call.List.Append(nx)
call.List.Append(ny)
call.PtrList().Append(nx)
call.PtrList().Append(ny)
if needsize {
call.List.Append(nodintconst(size))
call.PtrList().Append(nodintconst(size))
}
return call

View file

@ -126,11 +126,11 @@ func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
// NOTE(rsc): This comment may be stale.
// It's possible the ordering has changed and this is
// now the common case. I'm not sure.
if n.Name.Param.Stackcopy != nil {
n.Name.Param.Stackcopy.Xoffset = o
n.Xoffset = 0
if n.Name().Param.Stackcopy != nil {
n.Name().Param.Stackcopy.SetOffset(o)
n.SetOffset(0)
} else {
n.Xoffset = o
n.SetOffset(o)
}
}
@ -198,7 +198,7 @@ func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
}
*path = append(*path, t)
if p := ir.AsNode(t.Nod).Name.Param; p != nil && findTypeLoop(p.Ntype.Type, path) {
if p := ir.AsNode(t.Nod).Name().Param; p != nil && findTypeLoop(p.Ntype.Type(), path) {
return true
}
*path = (*path)[:len(*path)-1]
@ -308,7 +308,7 @@ func dowidth(t *types.Type) {
lno := base.Pos
if ir.AsNode(t.Nod) != nil {
base.Pos = ir.AsNode(t.Nod).Pos
base.Pos = ir.AsNode(t.Nod).Pos()
}
t.Width = -2

View file

@ -15,11 +15,11 @@ type exporter struct {
// markObject visits a reachable object.
func (p *exporter) markObject(n *ir.Node) {
if n.Op == ir.ONAME && n.Class() == ir.PFUNC {
if n.Op() == ir.ONAME && n.Class() == ir.PFUNC {
inlFlood(n)
}
p.markType(n.Type)
p.markType(n.Type())
}
// markType recursively visits types reachable from t to identify

View file

@ -10,7 +10,7 @@ import (
)
func npos(pos src.XPos, n *ir.Node) *ir.Node {
n.Pos = pos
n.SetPos(pos)
return n
}

View file

@ -18,14 +18,14 @@ func (p *noder) funcLit(expr *syntax.FuncLit) *ir.Node {
ntype := p.typeExpr(expr.Type)
dcl := p.nod(expr, ir.ODCLFUNC, nil, nil)
fn := dcl.Func
fn := dcl.Func()
fn.SetIsHiddenClosure(Curfn != nil)
fn.Nname = newfuncnamel(p.pos(expr), ir.BlankNode.Sym, fn) // filled in by typecheckclosure
fn.Nname.Name.Param.Ntype = xtype
fn.Nname.Name.Defn = dcl
fn.Nname = newfuncnamel(p.pos(expr), ir.BlankNode.Sym(), fn) // filled in by typecheckclosure
fn.Nname.Name().Param.Ntype = xtype
fn.Nname.Name().Defn = dcl
clo := p.nod(expr, ir.OCLOSURE, nil, nil)
clo.Func = fn
clo.SetFunc(fn)
fn.ClosureType = ntype
fn.OClosure = clo
@ -37,8 +37,8 @@ func (p *noder) funcLit(expr *syntax.FuncLit) *ir.Node {
// make the list of pointers for the closure call.
for _, v := range fn.ClosureVars.Slice() {
// Unlink from v1; see comment in syntax.go type Param for these fields.
v1 := v.Name.Defn
v1.Name.Param.Innermost = v.Name.Param.Outer
v1 := v.Name().Defn
v1.Name().Param.Innermost = v.Name().Param.Outer
// If the closure usage of v is not dense,
// we need to make it dense; now that we're out
@ -68,7 +68,7 @@ func (p *noder) funcLit(expr *syntax.FuncLit) *ir.Node {
// obtains f3's v, creating it if necessary (as it is in the example).
//
// capturevars will decide whether to use v directly or &v.
v.Name.Param.Outer = oldname(v.Sym)
v.Name().Param.Outer = oldname(v.Sym())
}
return clo
@ -79,7 +79,7 @@ func (p *noder) funcLit(expr *syntax.FuncLit) *ir.Node {
// TODO: This creation of the named function should probably really be done in a
// separate pass from type-checking.
func typecheckclosure(clo *ir.Node, top int) {
fn := clo.Func
fn := clo.Func()
dcl := fn.Decl
// Set current associated iota value, so iota can be used inside
// function in ConstSpec, see issue #22344
@ -88,7 +88,7 @@ func typecheckclosure(clo *ir.Node, top int) {
}
fn.ClosureType = typecheck(fn.ClosureType, ctxType)
clo.Type = fn.ClosureType.Type
clo.SetType(fn.ClosureType.Type())
fn.ClosureCalled = top&ctxCallee != 0
// Do not typecheck dcl twice, otherwise, we will end up pushing
@ -99,22 +99,22 @@ func typecheckclosure(clo *ir.Node, top int) {
}
for _, ln := range fn.ClosureVars.Slice() {
n := ln.Name.Defn
if !n.Name.Captured() {
n.Name.SetCaptured(true)
if n.Name.Decldepth == 0 {
n := ln.Name().Defn
if !n.Name().Captured() {
n.Name().SetCaptured(true)
if n.Name().Decldepth == 0 {
base.Fatalf("typecheckclosure: var %S does not have decldepth assigned", n)
}
// Ignore assignments to the variable in straightline code
// preceding the first capturing by a closure.
if n.Name.Decldepth == decldepth {
n.Name.SetAssigned(false)
if n.Name().Decldepth == decldepth {
n.Name().SetAssigned(false)
}
}
}
fn.Nname.Sym = closurename(Curfn)
fn.Nname.SetSym(closurename(Curfn))
setNodeNameFunc(fn.Nname)
dcl = typecheck(dcl, ctxStmt)
@ -122,12 +122,12 @@ func typecheckclosure(clo *ir.Node, top int) {
// At top level (in a variable initialization: curfn==nil) we're not
// ready to type check code yet; we'll check it later, because the
// underlying closure function we create is added to xtop.
if Curfn != nil && clo.Type != nil {
if Curfn != nil && clo.Type() != nil {
oldfn := Curfn
Curfn = dcl
olddd := decldepth
decldepth = 1
typecheckslice(dcl.Nbody.Slice(), ctxStmt)
typecheckslice(dcl.Body().Slice(), ctxStmt)
decldepth = olddd
Curfn = oldfn
}
@ -146,7 +146,7 @@ func closurename(outerfunc *ir.Node) *types.Sym {
gen := &globClosgen
if outerfunc != nil {
if outerfunc.Func.OClosure != nil {
if outerfunc.Func().OClosure != nil {
prefix = ""
}
@ -155,8 +155,8 @@ func closurename(outerfunc *ir.Node) *types.Sym {
// There may be multiple functions named "_". In those
// cases, we can't use their individual Closgens as it
// would lead to name clashes.
if !ir.IsBlank(outerfunc.Func.Nname) {
gen = &outerfunc.Func.Closgen
if !ir.IsBlank(outerfunc.Func().Nname) {
gen = &outerfunc.Func().Closgen
}
}
@ -174,12 +174,12 @@ var capturevarscomplete bool
// after capturing (effectively constant).
func capturevars(dcl *ir.Node) {
lno := base.Pos
base.Pos = dcl.Pos
fn := dcl.Func
base.Pos = dcl.Pos()
fn := dcl.Func()
cvars := fn.ClosureVars.Slice()
out := cvars[:0]
for _, v := range cvars {
if v.Type == nil {
if v.Type() == nil {
// If v.Type is nil, it means v looked like it
// was going to be used in the closure, but
// isn't. This happens in struct literals like
@ -192,29 +192,29 @@ func capturevars(dcl *ir.Node) {
// type check the & of closed variables outside the closure,
// so that the outer frame also grabs them and knows they escape.
dowidth(v.Type)
dowidth(v.Type())
outer := v.Name.Param.Outer
outermost := v.Name.Defn
outer := v.Name().Param.Outer
outermost := v.Name().Defn
// out parameters will be assigned to implicitly upon return.
if outermost.Class() != ir.PPARAMOUT && !outermost.Name.Addrtaken() && !outermost.Name.Assigned() && v.Type.Width <= 128 {
v.Name.SetByval(true)
if outermost.Class() != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
v.Name().SetByval(true)
} else {
outermost.Name.SetAddrtaken(true)
outermost.Name().SetAddrtaken(true)
outer = ir.Nod(ir.OADDR, outer, nil)
}
if base.Flag.LowerM > 1 {
var name *types.Sym
if v.Name.Curfn != nil && v.Name.Curfn.Func.Nname != nil {
name = v.Name.Curfn.Func.Nname.Sym
if v.Name().Curfn != nil && v.Name().Curfn.Func().Nname != nil {
name = v.Name().Curfn.Func().Nname.Sym()
}
how := "ref"
if v.Name.Byval() {
if v.Name().Byval() {
how = "value"
}
base.WarnfAt(v.Pos, "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym, outermost.Name.Addrtaken(), outermost.Name.Assigned(), int32(v.Type.Width))
base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Name().Addrtaken(), outermost.Name().Assigned(), int32(v.Type().Width))
}
outer = typecheck(outer, ctxExpr)
@ -229,8 +229,8 @@ func capturevars(dcl *ir.Node) {
// It transform closure bodies to properly reference captured variables.
func transformclosure(dcl *ir.Node) {
lno := base.Pos
base.Pos = dcl.Pos
fn := dcl.Func
base.Pos = dcl.Pos()
fn := dcl.Func()
if fn.ClosureCalled {
// If the closure is directly called, we transform it to a plain function call
@ -255,33 +255,33 @@ func transformclosure(dcl *ir.Node) {
var params []*types.Field
var decls []*ir.Node
for _, v := range fn.ClosureVars.Slice() {
if !v.Name.Byval() {
if !v.Name().Byval() {
// If v of type T is captured by reference,
// we introduce function param &v *T
// and v remains PAUTOHEAP with &v heapaddr
// (accesses will implicitly deref &v).
addr := NewName(lookup("&" + v.Sym.Name))
addr.Type = types.NewPtr(v.Type)
v.Name.Param.Heapaddr = addr
addr := NewName(lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
v.Name().Param.Heapaddr = addr
v = addr
}
v.SetClass(ir.PPARAM)
decls = append(decls, v)
fld := types.NewField(src.NoXPos, v.Sym, v.Type)
fld := types.NewField(src.NoXPos, v.Sym(), v.Type())
fld.Nname = ir.AsTypesNode(v)
params = append(params, fld)
}
if len(params) > 0 {
// Prepend params and decls.
f.Type.Params().SetFields(append(params, f.Type.Params().FieldSlice()...))
f.Type().Params().SetFields(append(params, f.Type().Params().FieldSlice()...))
fn.Dcl = append(decls, fn.Dcl...)
}
dowidth(f.Type)
dcl.Type = f.Type // update type of ODCLFUNC
dowidth(f.Type())
dcl.SetType(f.Type()) // update type of ODCLFUNC
} else {
// The closure is not called, so it is going to stay as closure.
var body []*ir.Node
@ -290,15 +290,15 @@ func transformclosure(dcl *ir.Node) {
// cv refers to the field inside of closure OSTRUCTLIT.
cv := ir.Nod(ir.OCLOSUREVAR, nil, nil)
cv.Type = v.Type
if !v.Name.Byval() {
cv.Type = types.NewPtr(v.Type)
cv.SetType(v.Type())
if !v.Name().Byval() {
cv.SetType(types.NewPtr(v.Type()))
}
offset = Rnd(offset, int64(cv.Type.Align))
cv.Xoffset = offset
offset += cv.Type.Width
offset = Rnd(offset, int64(cv.Type().Align))
cv.SetOffset(offset)
offset += cv.Type().Width
if v.Name.Byval() && v.Type.Width <= int64(2*Widthptr) {
if v.Name().Byval() && v.Type().Width <= int64(2*Widthptr) {
// If it is a small variable captured by value, downgrade it to PAUTO.
v.SetClass(ir.PAUTO)
fn.Dcl = append(fn.Dcl, v)
@ -306,14 +306,14 @@ func transformclosure(dcl *ir.Node) {
} else {
// Declare variable holding addresses taken from closure
// and initialize in entry prologue.
addr := NewName(lookup("&" + v.Sym.Name))
addr.Type = types.NewPtr(v.Type)
addr := NewName(lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
addr.SetClass(ir.PAUTO)
addr.Name.SetUsed(true)
addr.Name.Curfn = dcl
addr.Name().SetUsed(true)
addr.Name().Curfn = dcl
fn.Dcl = append(fn.Dcl, addr)
v.Name.Param.Heapaddr = addr
if v.Name.Byval() {
v.Name().Param.Heapaddr = addr
if v.Name().Byval() {
cv = ir.Nod(ir.OADDR, cv, nil)
}
body = append(body, ir.Nod(ir.OAS, addr, cv))
@ -333,21 +333,21 @@ func transformclosure(dcl *ir.Node) {
// hasemptycvars reports whether closure clo has an
// empty list of captured vars.
func hasemptycvars(clo *ir.Node) bool {
return clo.Func.ClosureVars.Len() == 0
return clo.Func().ClosureVars.Len() == 0
}
// closuredebugruntimecheck applies boilerplate checks for debug flags
// and compiling runtime
func closuredebugruntimecheck(clo *ir.Node) {
if base.Debug.Closure > 0 {
if clo.Esc == EscHeap {
base.WarnfAt(clo.Pos, "heap closure, captured vars = %v", clo.Func.ClosureVars)
if clo.Esc() == EscHeap {
base.WarnfAt(clo.Pos(), "heap closure, captured vars = %v", clo.Func().ClosureVars)
} else {
base.WarnfAt(clo.Pos, "stack closure, captured vars = %v", clo.Func.ClosureVars)
base.WarnfAt(clo.Pos(), "stack closure, captured vars = %v", clo.Func().ClosureVars)
}
}
if base.Flag.CompilingRuntime && clo.Esc == EscHeap {
base.ErrorfAt(clo.Pos, "heap-allocated closure, not allowed in runtime")
if base.Flag.CompilingRuntime && clo.Esc() == EscHeap {
base.ErrorfAt(clo.Pos(), "heap-allocated closure, not allowed in runtime")
}
}
@ -371,12 +371,12 @@ func closureType(clo *ir.Node) *types.Type {
fields := []*ir.Node{
namedfield(".F", types.Types[types.TUINTPTR]),
}
for _, v := range clo.Func.ClosureVars.Slice() {
typ := v.Type
if !v.Name.Byval() {
for _, v := range clo.Func().ClosureVars.Slice() {
typ := v.Type()
if !v.Name().Byval() {
typ = types.NewPtr(typ)
}
fields = append(fields, symfield(v.Sym, typ))
fields = append(fields, symfield(v.Sym(), typ))
}
typ := tostruct(fields)
typ.SetNoalg(true)
@ -384,12 +384,12 @@ func closureType(clo *ir.Node) *types.Type {
}
func walkclosure(clo *ir.Node, init *ir.Nodes) *ir.Node {
fn := clo.Func
fn := clo.Func()
// If no closure vars, don't bother wrapping.
if hasemptycvars(clo) {
if base.Debug.Closure > 0 {
base.WarnfAt(clo.Pos, "closure converted to global")
base.WarnfAt(clo.Pos(), "closure converted to global")
}
return fn.Nname
}
@ -398,21 +398,21 @@ func walkclosure(clo *ir.Node, init *ir.Nodes) *ir.Node {
typ := closureType(clo)
clos := ir.Nod(ir.OCOMPLIT, nil, typenod(typ))
clos.Esc = clo.Esc
clos.List.Set(append([]*ir.Node{ir.Nod(ir.OCFUNC, fn.Nname, nil)}, fn.ClosureEnter.Slice()...))
clos.SetEsc(clo.Esc())
clos.PtrList().Set(append([]*ir.Node{ir.Nod(ir.OCFUNC, fn.Nname, nil)}, fn.ClosureEnter.Slice()...))
clos = ir.Nod(ir.OADDR, clos, nil)
clos.Esc = clo.Esc
clos.SetEsc(clo.Esc())
// Force type conversion from *struct to the func type.
clos = convnop(clos, clo.Type)
clos = convnop(clos, clo.Type())
// non-escaping temp to use, if any.
if x := prealloc[clo]; x != nil {
if !types.Identical(typ, x.Type) {
if !types.Identical(typ, x.Type()) {
panic("closure type does not match order's assigned type")
}
clos.Left.Right = x
clos.Left().SetRight(x)
delete(prealloc, clo)
}
@ -420,7 +420,7 @@ func walkclosure(clo *ir.Node, init *ir.Nodes) *ir.Node {
}
func typecheckpartialcall(dot *ir.Node, sym *types.Sym) {
switch dot.Op {
switch dot.Op() {
case ir.ODOTINTER, ir.ODOTMETH:
break
@ -429,19 +429,19 @@ func typecheckpartialcall(dot *ir.Node, sym *types.Sym) {
}
// Create top-level function.
dcl := makepartialcall(dot, dot.Type, sym)
dcl.Func.SetWrapper(true)
dot.Op = ir.OCALLPART
dot.Right = NewName(sym)
dot.Type = dcl.Type
dot.Func = dcl.Func
dcl := makepartialcall(dot, dot.Type(), sym)
dcl.Func().SetWrapper(true)
dot.SetOp(ir.OCALLPART)
dot.SetRight(NewName(sym))
dot.SetType(dcl.Type())
dot.SetFunc(dcl.Func())
dot.SetOpt(nil) // clear types.Field from ODOTMETH
}
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls.
func makepartialcall(dot *ir.Node, t0 *types.Type, meth *types.Sym) *ir.Node {
rcvrtype := dot.Left.Type
rcvrtype := dot.Left().Type()
sym := methodSymSuffix(rcvrtype, meth, "-fm")
if sym.Uniq() {
@ -465,52 +465,52 @@ func makepartialcall(dot *ir.Node, t0 *types.Type, meth *types.Sym) *ir.Node {
// case. See issue 29389.
tfn := ir.Nod(ir.OTFUNC, nil, nil)
tfn.List.Set(structargs(t0.Params(), true))
tfn.Rlist.Set(structargs(t0.Results(), false))
tfn.PtrList().Set(structargs(t0.Params(), true))
tfn.PtrRlist().Set(structargs(t0.Results(), false))
dcl := dclfunc(sym, tfn)
fn := dcl.Func
fn := dcl.Func()
fn.SetDupok(true)
fn.SetNeedctxt(true)
tfn.Type.SetPkg(t0.Pkg())
tfn.Type().SetPkg(t0.Pkg())
// Declare and initialize variable holding receiver.
cv := ir.Nod(ir.OCLOSUREVAR, nil, nil)
cv.Type = rcvrtype
cv.Xoffset = Rnd(int64(Widthptr), int64(cv.Type.Align))
cv.SetType(rcvrtype)
cv.SetOffset(Rnd(int64(Widthptr), int64(cv.Type().Align)))
ptr := NewName(lookup(".this"))
declare(ptr, ir.PAUTO)
ptr.Name.SetUsed(true)
ptr.Name().SetUsed(true)
var body []*ir.Node
if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
ptr.Type = rcvrtype
ptr.SetType(rcvrtype)
body = append(body, ir.Nod(ir.OAS, ptr, cv))
} else {
ptr.Type = types.NewPtr(rcvrtype)
ptr.SetType(types.NewPtr(rcvrtype))
body = append(body, ir.Nod(ir.OAS, ptr, ir.Nod(ir.OADDR, cv, nil)))
}
call := ir.Nod(ir.OCALL, nodSym(ir.OXDOT, ptr, meth), nil)
call.List.Set(paramNnames(tfn.Type))
call.SetIsDDD(tfn.Type.IsVariadic())
call.PtrList().Set(paramNnames(tfn.Type()))
call.SetIsDDD(tfn.Type().IsVariadic())
if t0.NumResults() != 0 {
n := ir.Nod(ir.ORETURN, nil, nil)
n.List.Set1(call)
n.PtrList().Set1(call)
call = n
}
body = append(body, call)
dcl.Nbody.Set(body)
dcl.PtrBody().Set(body)
funcbody()
dcl = typecheck(dcl, ctxStmt)
// Need to typecheck the body of the just-generated wrapper.
// typecheckslice() requires that Curfn is set when processing an ORETURN.
Curfn = dcl
typecheckslice(dcl.Nbody.Slice(), ctxStmt)
typecheckslice(dcl.Body().Slice(), ctxStmt)
sym.Def = ir.AsTypesNode(dcl)
xtop = append(xtop, dcl)
Curfn = savecurfn
@ -525,7 +525,7 @@ func makepartialcall(dot *ir.Node, t0 *types.Type, meth *types.Sym) *ir.Node {
func partialCallType(n *ir.Node) *types.Type {
t := tostruct([]*ir.Node{
namedfield("F", types.Types[types.TUINTPTR]),
namedfield("R", n.Left.Type),
namedfield("R", n.Left().Type()),
})
t.SetNoalg(true)
return t
@ -539,13 +539,13 @@ func walkpartialcall(n *ir.Node, init *ir.Nodes) *ir.Node {
//
// Like walkclosure above.
if n.Left.Type.IsInterface() {
if n.Left().Type().IsInterface() {
// Trigger panic for method on nil interface now.
// Otherwise it happens in the wrapper and is confusing.
n.Left = cheapexpr(n.Left, init)
n.Left = walkexpr(n.Left, nil)
n.SetLeft(cheapexpr(n.Left(), init))
n.SetLeft(walkexpr(n.Left(), nil))
tab := ir.Nod(ir.OITAB, n.Left, nil)
tab := ir.Nod(ir.OITAB, n.Left(), nil)
tab = typecheck(tab, ctxExpr)
c := ir.Nod(ir.OCHECKNIL, tab, nil)
@ -556,21 +556,21 @@ func walkpartialcall(n *ir.Node, init *ir.Nodes) *ir.Node {
typ := partialCallType(n)
clos := ir.Nod(ir.OCOMPLIT, nil, typenod(typ))
clos.Esc = n.Esc
clos.List.Set2(ir.Nod(ir.OCFUNC, n.Func.Nname, nil), n.Left)
clos.SetEsc(n.Esc())
clos.PtrList().Set2(ir.Nod(ir.OCFUNC, n.Func().Nname, nil), n.Left())
clos = ir.Nod(ir.OADDR, clos, nil)
clos.Esc = n.Esc
clos.SetEsc(n.Esc())
// Force type conversion from *struct to the func type.
clos = convnop(clos, n.Type)
clos = convnop(clos, n.Type())
// non-escaping temp to use, if any.
if x := prealloc[n]; x != nil {
if !types.Identical(typ, x.Type) {
if !types.Identical(typ, x.Type()) {
panic("partial call type does not match order's assigned type")
}
clos.Left.Right = x
clos.Left().SetRight(x)
delete(prealloc, n)
}
@ -580,14 +580,14 @@ func walkpartialcall(n *ir.Node, init *ir.Nodes) *ir.Node {
// callpartMethod returns the *types.Field representing the method
// referenced by method value n.
func callpartMethod(n *ir.Node) *types.Field {
if n.Op != ir.OCALLPART {
if n.Op() != ir.OCALLPART {
base.Fatalf("expected OCALLPART, got %v", n)
}
// TODO(mdempsky): Optimize this. If necessary,
// makepartialcall could save m for us somewhere.
var m *types.Field
if lookdot0(n.Right.Sym, n.Left.Type, &m, false) != 1 {
if lookdot0(n.Right().Sym(), n.Left().Type(), &m, false) != 1 {
base.Fatalf("failed to find field for OCALLPART")
}

View file

@ -106,30 +106,30 @@ func convlit1(n *ir.Node, t *types.Type, explicit bool, context func() string) *
base.Fatalf("bad conversion to untyped: %v", t)
}
if n == nil || n.Type == nil {
if n == nil || n.Type() == nil {
// Allow sloppy callers.
return n
}
if !n.Type.IsUntyped() {
if !n.Type().IsUntyped() {
// Already typed; nothing to do.
return n
}
if n.Op == ir.OLITERAL || n.Op == ir.ONIL {
if n.Op() == ir.OLITERAL || n.Op() == ir.ONIL {
// Can't always set n.Type directly on OLITERAL nodes.
// See discussion on CL 20813.
n = n.RawCopy()
}
// Nil is technically not a constant, so handle it specially.
if n.Type.Etype == types.TNIL {
if n.Op != ir.ONIL {
base.Fatalf("unexpected op: %v (%v)", n, n.Op)
if n.Type().Etype == types.TNIL {
if n.Op() != ir.ONIL {
base.Fatalf("unexpected op: %v (%v)", n, n.Op())
}
if t == nil {
base.Errorf("use of untyped nil")
n.SetDiag(true)
n.Type = nil
n.SetType(nil)
return n
}
@ -138,15 +138,15 @@ func convlit1(n *ir.Node, t *types.Type, explicit bool, context func() string) *
return n
}
n.Type = t
n.SetType(t)
return n
}
if t == nil || !ir.OKForConst[t.Etype] {
t = defaultType(n.Type)
t = defaultType(n.Type())
}
switch n.Op {
switch n.Op() {
default:
base.Fatalf("unexpected untyped expression: %v", n)
@ -155,60 +155,60 @@ func convlit1(n *ir.Node, t *types.Type, explicit bool, context func() string) *
if v.Kind() == constant.Unknown {
break
}
n.Type = t
n.SetType(t)
n.SetVal(v)
return n
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.OREAL, ir.OIMAG:
ot := operandType(n.Op, t)
ot := operandType(n.Op(), t)
if ot == nil {
n = defaultlit(n, nil)
break
}
n.Left = convlit(n.Left, ot)
if n.Left.Type == nil {
n.Type = nil
n.SetLeft(convlit(n.Left(), ot))
if n.Left().Type() == nil {
n.SetType(nil)
return n
}
n.Type = t
n.SetType(t)
return n
case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT, ir.OOROR, ir.OANDAND, ir.OCOMPLEX:
ot := operandType(n.Op, t)
ot := operandType(n.Op(), t)
if ot == nil {
n = defaultlit(n, nil)
break
}
n.Left = convlit(n.Left, ot)
n.Right = convlit(n.Right, ot)
if n.Left.Type == nil || n.Right.Type == nil {
n.Type = nil
n.SetLeft(convlit(n.Left(), ot))
n.SetRight(convlit(n.Right(), ot))
if n.Left().Type() == nil || n.Right().Type() == nil {
n.SetType(nil)
return n
}
if !types.Identical(n.Left.Type, n.Right.Type) {
base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, n.Left.Type, n.Right.Type)
n.Type = nil
if !types.Identical(n.Left().Type(), n.Right().Type()) {
base.Errorf("invalid operation: %v (mismatched types %v and %v)", n, n.Left().Type(), n.Right().Type())
n.SetType(nil)
return n
}
n.Type = t
n.SetType(t)
return n
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
if !t.IsBoolean() {
break
}
n.Type = t
n.SetType(t)
return n
case ir.OLSH, ir.ORSH:
n.Left = convlit1(n.Left, t, explicit, nil)
n.Type = n.Left.Type
if n.Type != nil && !n.Type.IsInteger() {
base.Errorf("invalid operation: %v (shift of type %v)", n, n.Type)
n.Type = nil
n.SetLeft(convlit1(n.Left(), t, explicit, nil))
n.SetType(n.Left().Type())
if n.Type() != nil && !n.Type().IsInteger() {
base.Errorf("invalid operation: %v (shift of type %v)", n, n.Type())
n.SetType(nil)
}
return n
}
@ -225,7 +225,7 @@ func convlit1(n *ir.Node, t *types.Type, explicit bool, context func() string) *
}
n.SetDiag(true)
}
n.Type = nil
n.SetType(nil)
return n
}
@ -439,75 +439,75 @@ var tokenForOp = [...]token.Token{
// Otherwise, evalConst returns a new OLITERAL with the same value as n,
// and with .Orig pointing back to n.
func evalConst(n *ir.Node) *ir.Node {
nl, nr := n.Left, n.Right
nl, nr := n.Left(), n.Right()
// Pick off just the opcodes that can be constant evaluated.
switch op := n.Op; op {
switch op := n.Op(); op {
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
if nl.Op == ir.OLITERAL {
if nl.Op() == ir.OLITERAL {
var prec uint
if n.Type.IsUnsigned() {
prec = uint(n.Type.Size() * 8)
if n.Type().IsUnsigned() {
prec = uint(n.Type().Size() * 8)
}
return origConst(n, constant.UnaryOp(tokenForOp[op], nl.Val(), prec))
}
case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT, ir.OOROR, ir.OANDAND:
if nl.Op == ir.OLITERAL && nr.Op == ir.OLITERAL {
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
rval := nr.Val()
// check for divisor underflow in complex division (see issue 20227)
if op == ir.ODIV && n.Type.IsComplex() && constant.Sign(square(constant.Real(rval))) == 0 && constant.Sign(square(constant.Imag(rval))) == 0 {
if op == ir.ODIV && n.Type().IsComplex() && constant.Sign(square(constant.Real(rval))) == 0 && constant.Sign(square(constant.Imag(rval))) == 0 {
base.Errorf("complex division by zero")
n.Type = nil
n.SetType(nil)
return n
}
if (op == ir.ODIV || op == ir.OMOD) && constant.Sign(rval) == 0 {
base.Errorf("division by zero")
n.Type = nil
n.SetType(nil)
return n
}
tok := tokenForOp[op]
if op == ir.ODIV && n.Type.IsInteger() {
if op == ir.ODIV && n.Type().IsInteger() {
tok = token.QUO_ASSIGN // integer division
}
return origConst(n, constant.BinaryOp(nl.Val(), tok, rval))
}
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
if nl.Op == ir.OLITERAL && nr.Op == ir.OLITERAL {
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origBoolConst(n, constant.Compare(nl.Val(), tokenForOp[op], nr.Val()))
}
case ir.OLSH, ir.ORSH:
if nl.Op == ir.OLITERAL && nr.Op == ir.OLITERAL {
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
// shiftBound from go/types; "so we can express smallestFloat64"
const shiftBound = 1023 - 1 + 52
s, ok := constant.Uint64Val(nr.Val())
if !ok || s > shiftBound {
base.Errorf("invalid shift count %v", nr)
n.Type = nil
n.SetType(nil)
break
}
return origConst(n, constant.Shift(toint(nl.Val()), tokenForOp[op], uint(s)))
}
case ir.OCONV, ir.ORUNESTR:
if ir.OKForConst[n.Type.Etype] && nl.Op == ir.OLITERAL {
return origConst(n, convertVal(nl.Val(), n.Type, true))
if ir.OKForConst[n.Type().Etype] && nl.Op() == ir.OLITERAL {
return origConst(n, convertVal(nl.Val(), n.Type(), true))
}
case ir.OCONVNOP:
if ir.OKForConst[n.Type.Etype] && nl.Op == ir.OLITERAL {
if ir.OKForConst[n.Type().Etype] && nl.Op() == ir.OLITERAL {
// set so n.Orig gets OCONV instead of OCONVNOP
n.Op = ir.OCONV
n.SetOp(ir.OCONV)
return origConst(n, nl.Val())
}
case ir.OADDSTR:
// Merge adjacent constants in the argument list.
s := n.List.Slice()
s := n.List().Slice()
need := 0
for i := 0; i < len(s); i++ {
if i == 0 || !ir.IsConst(s[i-1], constant.String) || !ir.IsConst(s[i], constant.String) {
@ -537,7 +537,7 @@ func evalConst(n *ir.Node) *ir.Node {
}
nl := origConst(s[i], constant.MakeString(strings.Join(strs, "")))
nl.Orig = nl // it's bigger than just s[i]
nl.SetOrig(nl) // it's bigger than just s[i]
newList = append(newList, nl)
i = i2 - 1
} else {
@ -546,18 +546,18 @@ func evalConst(n *ir.Node) *ir.Node {
}
n = ir.Copy(n)
n.List.Set(newList)
n.PtrList().Set(newList)
return n
case ir.OCAP, ir.OLEN:
switch nl.Type.Etype {
switch nl.Type().Etype {
case types.TSTRING:
if ir.IsConst(nl, constant.String) {
return origIntConst(n, int64(len(nl.StringVal())))
}
case types.TARRAY:
if !hascallchan(nl) {
return origIntConst(n, nl.Type.NumElem())
return origIntConst(n, nl.Type().NumElem())
}
}
@ -565,17 +565,17 @@ func evalConst(n *ir.Node) *ir.Node {
return origIntConst(n, evalunsafe(n))
case ir.OREAL:
if nl.Op == ir.OLITERAL {
if nl.Op() == ir.OLITERAL {
return origConst(n, constant.Real(nl.Val()))
}
case ir.OIMAG:
if nl.Op == ir.OLITERAL {
if nl.Op() == ir.OLITERAL {
return origConst(n, constant.Imag(nl.Val()))
}
case ir.OCOMPLEX:
if nl.Op == ir.OLITERAL && nr.Op == ir.OLITERAL {
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origConst(n, makeComplex(nl.Val(), nr.Val()))
}
}
@ -621,7 +621,7 @@ var overflowNames = [...]string{
// origConst returns an OLITERAL with orig n and value v.
func origConst(n *ir.Node, v constant.Value) *ir.Node {
lno := setlineno(n)
v = convertVal(v, n.Type, false)
v = convertVal(v, n.Type(), false)
base.Pos = lno
switch v.Kind() {
@ -631,19 +631,19 @@ func origConst(n *ir.Node, v constant.Value) *ir.Node {
}
fallthrough
case constant.Unknown:
what := overflowNames[n.Op]
what := overflowNames[n.Op()]
if what == "" {
base.Fatalf("unexpected overflow: %v", n.Op)
base.Fatalf("unexpected overflow: %v", n.Op())
}
base.ErrorfAt(n.Pos, "constant %v overflow", what)
n.Type = nil
base.ErrorfAt(n.Pos(), "constant %v overflow", what)
n.SetType(nil)
return n
}
orig := n
n = ir.NodAt(orig.Pos, ir.OLITERAL, nil, nil)
n.Orig = orig
n.Type = orig.Type
n = ir.NodAt(orig.Pos(), ir.OLITERAL, nil, nil)
n.SetOrig(orig)
n.SetType(orig.Type())
n.SetVal(v)
return n
}
@ -663,16 +663,16 @@ func origIntConst(n *ir.Node, v int64) *ir.Node {
// The results of defaultlit2 MUST be assigned back to l and r, e.g.
// n.Left, n.Right = defaultlit2(n.Left, n.Right, force)
func defaultlit2(l *ir.Node, r *ir.Node, force bool) (*ir.Node, *ir.Node) {
if l.Type == nil || r.Type == nil {
if l.Type() == nil || r.Type() == nil {
return l, r
}
if !l.Type.IsUntyped() {
r = convlit(r, l.Type)
if !l.Type().IsUntyped() {
r = convlit(r, l.Type())
return l, r
}
if !r.Type.IsUntyped() {
l = convlit(l, r.Type)
if !r.Type().IsUntyped() {
l = convlit(l, r.Type())
return l, r
}
@ -681,17 +681,17 @@ func defaultlit2(l *ir.Node, r *ir.Node, force bool) (*ir.Node, *ir.Node) {
}
// Can't mix bool with non-bool, string with non-string, or nil with anything (untyped).
if l.Type.IsBoolean() != r.Type.IsBoolean() {
if l.Type().IsBoolean() != r.Type().IsBoolean() {
return l, r
}
if l.Type.IsString() != r.Type.IsString() {
if l.Type().IsString() != r.Type().IsString() {
return l, r
}
if ir.IsNil(l) || ir.IsNil(r) {
return l, r
}
t := defaultType(mixUntyped(l.Type, r.Type))
t := defaultType(mixUntyped(l.Type(), r.Type()))
l = convlit(l, t)
r = convlit(r, t)
return l, r
@ -748,7 +748,7 @@ func defaultType(t *types.Type) *types.Type {
}
func smallintconst(n *ir.Node) bool {
if n.Op == ir.OLITERAL {
if n.Op() == ir.OLITERAL {
v, ok := constant.Int64Val(n.Val())
return ok && int64(int32(v)) == v
}
@ -761,10 +761,10 @@ func smallintconst(n *ir.Node) bool {
// integer, or negative, it returns -1. If n is too large, it
// returns -2.
func indexconst(n *ir.Node) int64 {
if n.Op != ir.OLITERAL {
if n.Op() != ir.OLITERAL {
return -1
}
if !n.Type.IsInteger() && n.Type.Etype != types.TIDEAL {
if !n.Type().IsInteger() && n.Type().Etype != types.TIDEAL {
return -1
}
@ -784,14 +784,14 @@ func indexconst(n *ir.Node) int64 {
// Expressions derived from nil, like string([]byte(nil)), while they
// may be known at compile time, are not Go language constants.
func isGoConst(n *ir.Node) bool {
return n.Op == ir.OLITERAL
return n.Op() == ir.OLITERAL
}
func hascallchan(n *ir.Node) bool {
if n == nil {
return false
}
switch n.Op {
switch n.Op() {
case ir.OAPPEND,
ir.OCALL,
ir.OCALLFUNC,
@ -815,15 +815,15 @@ func hascallchan(n *ir.Node) bool {
return true
}
if hascallchan(n.Left) || hascallchan(n.Right) {
if hascallchan(n.Left()) || hascallchan(n.Right()) {
return true
}
for _, n1 := range n.List.Slice() {
for _, n1 := range n.List().Slice() {
if hascallchan(n1) {
return true
}
}
for _, n2 := range n.Rlist.Slice() {
for _, n2 := range n.Rlist().Slice() {
if hascallchan(n2) {
return true
}
@ -852,14 +852,14 @@ type constSetKey struct {
//
// n must not be an untyped constant.
func (s *constSet) add(pos src.XPos, n *ir.Node, what, where string) {
if n.Op == ir.OCONVIFACE && n.Implicit() {
n = n.Left
if n.Op() == ir.OCONVIFACE && n.Implicit() {
n = n.Left()
}
if !isGoConst(n) {
return
}
if n.Type.IsUntyped() {
if n.Type().IsUntyped() {
base.Fatalf("%v is untyped", n)
}
@ -878,7 +878,7 @@ func (s *constSet) add(pos src.XPos, n *ir.Node, what, where string) {
// #21866 by treating all type aliases like byte/uint8 and
// rune/int32.
typ := n.Type
typ := n.Type()
switch typ {
case types.Bytetype:
typ = types.Types[types.TUINT8]
@ -888,7 +888,7 @@ func (s *constSet) add(pos src.XPos, n *ir.Node, what, where string) {
k := constSetKey{typ, ir.ConstValue(n)}
if hasUniquePos(n) {
pos = n.Pos
pos = n.Pos()
}
if s.m == nil {

View file

@ -64,78 +64,78 @@ func declare(n *ir.Node, ctxt ir.Class) {
return
}
if n.Name == nil {
if n.Name() == nil {
// named OLITERAL needs Name; most OLITERALs don't.
n.Name = new(ir.Name)
n.SetName(new(ir.Name))
}
s := n.Sym
s := n.Sym()
// kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later.
if !inimport && !typecheckok && s.Pkg != ir.LocalPkg {
base.ErrorfAt(n.Pos, "cannot declare name %v", s)
base.ErrorfAt(n.Pos(), "cannot declare name %v", s)
}
gen := 0
if ctxt == ir.PEXTERN {
if s.Name == "init" {
base.ErrorfAt(n.Pos, "cannot declare init - must be func")
base.ErrorfAt(n.Pos(), "cannot declare init - must be func")
}
if s.Name == "main" && s.Pkg.Name == "main" {
base.ErrorfAt(n.Pos, "cannot declare main - must be func")
base.ErrorfAt(n.Pos(), "cannot declare main - must be func")
}
externdcl = append(externdcl, n)
} else {
if Curfn == nil && ctxt == ir.PAUTO {
base.Pos = n.Pos
base.Pos = n.Pos()
base.Fatalf("automatic outside function")
}
if Curfn != nil && ctxt != ir.PFUNC {
Curfn.Func.Dcl = append(Curfn.Func.Dcl, n)
Curfn.Func().Dcl = append(Curfn.Func().Dcl, n)
}
if n.Op == ir.OTYPE {
if n.Op() == ir.OTYPE {
declare_typegen++
gen = declare_typegen
} else if n.Op == ir.ONAME && ctxt == ir.PAUTO && !strings.Contains(s.Name, "·") {
} else if n.Op() == ir.ONAME && ctxt == ir.PAUTO && !strings.Contains(s.Name, "·") {
vargen++
gen = vargen
}
types.Pushdcl(s)
n.Name.Curfn = Curfn
n.Name().Curfn = Curfn
}
if ctxt == ir.PAUTO {
n.Xoffset = 0
n.SetOffset(0)
}
if s.Block == types.Block {
// functype will print errors about duplicate function arguments.
// Don't repeat the error here.
if ctxt != ir.PPARAM && ctxt != ir.PPARAMOUT {
redeclare(n.Pos, s, "in this block")
redeclare(n.Pos(), s, "in this block")
}
}
s.Block = types.Block
s.Lastlineno = base.Pos
s.Def = ir.AsTypesNode(n)
n.Name.Vargen = int32(gen)
n.Name().Vargen = int32(gen)
n.SetClass(ctxt)
if ctxt == ir.PFUNC {
n.Sym.SetFunc(true)
n.Sym().SetFunc(true)
}
autoexport(n, ctxt)
}
func addvar(n *ir.Node, t *types.Type, ctxt ir.Class) {
if n == nil || n.Sym == nil || (n.Op != ir.ONAME && n.Op != ir.ONONAME) || t == nil {
if n == nil || n.Sym() == nil || (n.Op() != ir.ONAME && n.Op() != ir.ONONAME) || t == nil {
base.Fatalf("addvar: n=%v t=%v nil", n, t)
}
n.Op = ir.ONAME
n.SetOp(ir.ONAME)
declare(n, ctxt)
n.Type = t
n.SetType(t)
}
// declare variables from grammar
@ -147,13 +147,13 @@ func variter(vl []*ir.Node, t *ir.Node, el []*ir.Node) []*ir.Node {
if len(el) == 1 && len(vl) > 1 {
e := el[0]
as2 := ir.Nod(ir.OAS2, nil, nil)
as2.List.Set(vl)
as2.Rlist.Set1(e)
as2.PtrList().Set(vl)
as2.PtrRlist().Set1(e)
for _, v := range vl {
v.Op = ir.ONAME
v.SetOp(ir.ONAME)
declare(v, dclcontext)
v.Name.Param.Ntype = t
v.Name.Defn = as2
v.Name().Param.Ntype = t
v.Name().Defn = as2
if Curfn != nil {
init = append(init, ir.Nod(ir.ODCL, v, nil))
}
@ -174,9 +174,9 @@ func variter(vl []*ir.Node, t *ir.Node, el []*ir.Node) []*ir.Node {
el = el[1:]
}
v.Op = ir.ONAME
v.SetOp(ir.ONAME)
declare(v, dclcontext)
v.Name.Param.Ntype = t
v.Name().Param.Ntype = t
if e != nil || Curfn != nil || ir.IsBlank(v) {
if Curfn != nil {
@ -184,8 +184,8 @@ func variter(vl []*ir.Node, t *ir.Node, el []*ir.Node) []*ir.Node {
}
e = ir.Nod(ir.OAS, v, e)
init = append(init, e)
if e.Right != nil {
v.Name.Defn = e
if e.Right() != nil {
v.Name().Defn = e
}
}
}
@ -202,8 +202,8 @@ func newnoname(s *types.Sym) *ir.Node {
base.Fatalf("newnoname nil")
}
n := ir.Nod(ir.ONONAME, nil, nil)
n.Sym = s
n.Xoffset = 0
n.SetSym(s)
n.SetOffset(0)
return n
}
@ -213,7 +213,7 @@ func newfuncnamel(pos src.XPos, s *types.Sym, fn *ir.Func) *ir.Node {
base.Fatalf("newfuncnamel - already have name")
}
n := ir.NewNameAt(pos, s)
n.Func = fn
n.SetFunc(fn)
fn.Nname = n
return n
}
@ -222,7 +222,7 @@ func newfuncnamel(pos src.XPos, s *types.Sym, fn *ir.Func) *ir.Node {
// being declared.
func dclname(s *types.Sym) *ir.Node {
n := NewName(s)
n.Op = ir.ONONAME // caller will correct it
n.SetOp(ir.ONONAME) // caller will correct it
return n
}
@ -234,10 +234,10 @@ func typenodl(pos src.XPos, t *types.Type) *ir.Node {
// if we copied another type with *t = *u
// then t->nod might be out of date, so
// check t->nod->type too
if ir.AsNode(t.Nod) == nil || ir.AsNode(t.Nod).Type != t {
if ir.AsNode(t.Nod) == nil || ir.AsNode(t.Nod).Type() != t {
t.Nod = ir.AsTypesNode(ir.NodAt(pos, ir.OTYPE, nil, nil))
ir.AsNode(t.Nod).Type = t
ir.AsNode(t.Nod).Sym = t.Sym
ir.AsNode(t.Nod).SetType(t)
ir.AsNode(t.Nod).SetSym(t.Sym)
}
return ir.AsNode(t.Nod)
@ -253,7 +253,7 @@ func namedfield(s string, typ *types.Type) *ir.Node {
func symfield(s *types.Sym, typ *types.Type) *ir.Node {
n := nodSym(ir.ODCLFIELD, nil, s)
n.Type = typ
n.SetType(typ)
return n
}
@ -270,28 +270,28 @@ func oldname(s *types.Sym) *ir.Node {
return newnoname(s)
}
if Curfn != nil && n.Op == ir.ONAME && n.Name.Curfn != nil && n.Name.Curfn != Curfn {
if Curfn != nil && n.Op() == ir.ONAME && n.Name().Curfn != nil && n.Name().Curfn != Curfn {
// Inner func is referring to var in outer func.
//
// TODO(rsc): If there is an outer variable x and we
// are parsing x := 5 inside the closure, until we get to
// the := it looks like a reference to the outer x so we'll
// make x a closure variable unnecessarily.
c := n.Name.Param.Innermost
if c == nil || c.Name.Curfn != Curfn {
c := n.Name().Param.Innermost
if c == nil || c.Name().Curfn != Curfn {
// Do not have a closure var for the active closure yet; make one.
c = NewName(s)
c.SetClass(ir.PAUTOHEAP)
c.Name.SetIsClosureVar(true)
c.Name().SetIsClosureVar(true)
c.SetIsDDD(n.IsDDD())
c.Name.Defn = n
c.Name().Defn = n
// Link into list of active closure variables.
// Popped from list in func funcLit.
c.Name.Param.Outer = n.Name.Param.Innermost
n.Name.Param.Innermost = c
c.Name().Param.Outer = n.Name().Param.Innermost
n.Name().Param.Innermost = c
Curfn.Func.ClosureVars.Append(c)
Curfn.Func().ClosureVars.Append(c)
}
// return ref to closure var, not original
@ -313,13 +313,13 @@ func importName(sym *types.Sym) *ir.Node {
// := declarations
func colasname(n *ir.Node) bool {
switch n.Op {
switch n.Op() {
case ir.ONAME,
ir.ONONAME,
ir.OPACK,
ir.OTYPE,
ir.OLITERAL:
return n.Sym != nil
return n.Sym() != nil
}
return false
@ -327,8 +327,8 @@ func colasname(n *ir.Node) bool {
func colasdefn(left []*ir.Node, defn *ir.Node) {
for _, n := range left {
if n.Sym != nil {
n.Sym.SetUniq(true)
if n.Sym() != nil {
n.Sym().SetUniq(true)
}
}
@ -338,44 +338,44 @@ func colasdefn(left []*ir.Node, defn *ir.Node) {
continue
}
if !colasname(n) {
base.ErrorfAt(defn.Pos, "non-name %v on left side of :=", n)
base.ErrorfAt(defn.Pos(), "non-name %v on left side of :=", n)
nerr++
continue
}
if !n.Sym.Uniq() {
base.ErrorfAt(defn.Pos, "%v repeated on left side of :=", n.Sym)
if !n.Sym().Uniq() {
base.ErrorfAt(defn.Pos(), "%v repeated on left side of :=", n.Sym())
n.SetDiag(true)
nerr++
continue
}
n.Sym.SetUniq(false)
if n.Sym.Block == types.Block {
n.Sym().SetUniq(false)
if n.Sym().Block == types.Block {
continue
}
nnew++
n = NewName(n.Sym)
n = NewName(n.Sym())
declare(n, dclcontext)
n.Name.Defn = defn
defn.Ninit.Append(ir.Nod(ir.ODCL, n, nil))
n.Name().Defn = defn
defn.PtrInit().Append(ir.Nod(ir.ODCL, n, nil))
left[i] = n
}
if nnew == 0 && nerr == 0 {
base.ErrorfAt(defn.Pos, "no new variables on left side of :=")
base.ErrorfAt(defn.Pos(), "no new variables on left side of :=")
}
}
// declare the arguments in an
// interface field declaration.
func ifacedcl(n *ir.Node) {
if n.Op != ir.ODCLFIELD || n.Left == nil {
if n.Op() != ir.ODCLFIELD || n.Left() == nil {
base.Fatalf("ifacedcl")
}
if n.Sym.IsBlank() {
if n.Sym().IsBlank() {
base.Errorf("methods must have a unique non-blank name")
}
}
@ -392,16 +392,16 @@ func funchdr(n *ir.Node) {
types.Markdcl()
if n.Func.Nname != nil && n.Func.Nname.Name.Param.Ntype != nil {
funcargs(n.Func.Nname.Name.Param.Ntype)
if n.Func().Nname != nil && n.Func().Nname.Name().Param.Ntype != nil {
funcargs(n.Func().Nname.Name().Param.Ntype)
} else {
funcargs2(n.Type)
funcargs2(n.Type())
}
}
func funcargs(nt *ir.Node) {
if nt.Op != ir.OTFUNC {
base.Fatalf("funcargs %v", nt.Op)
if nt.Op() != ir.OTFUNC {
base.Fatalf("funcargs %v", nt.Op())
}
// re-start the variable generation number
@ -411,13 +411,13 @@ func funcargs(nt *ir.Node) {
// TODO(mdempsky): This is ugly, and only necessary because
// esc.go uses Vargen to figure out result parameters' index
// within the result tuple.
vargen = nt.Rlist.Len()
vargen = nt.Rlist().Len()
// declare the receiver and in arguments.
if nt.Left != nil {
funcarg(nt.Left, ir.PPARAM)
if nt.Left() != nil {
funcarg(nt.Left(), ir.PPARAM)
}
for _, n := range nt.List.Slice() {
for _, n := range nt.List().Slice() {
funcarg(n, ir.PPARAM)
}
@ -425,21 +425,21 @@ func funcargs(nt *ir.Node) {
vargen = 0
// declare the out arguments.
gen := nt.List.Len()
for _, n := range nt.Rlist.Slice() {
if n.Sym == nil {
gen := nt.List().Len()
for _, n := range nt.Rlist().Slice() {
if n.Sym() == nil {
// Name so that escape analysis can track it. ~r stands for 'result'.
n.Sym = lookupN("~r", gen)
n.SetSym(lookupN("~r", gen))
gen++
}
if n.Sym.IsBlank() {
if n.Sym().IsBlank() {
// Give it a name so we can assign to it during return. ~b stands for 'blank'.
// The name must be different from ~r above because if you have
// func f() (_ int)
// func g() int
// f is allowed to use a plain 'return' with no arguments, while g is not.
// So the two cases must be distinguished.
n.Sym = lookupN("~b", gen)
n.SetSym(lookupN("~b", gen))
gen++
}
@ -450,20 +450,20 @@ func funcargs(nt *ir.Node) {
}
func funcarg(n *ir.Node, ctxt ir.Class) {
if n.Op != ir.ODCLFIELD {
base.Fatalf("funcarg %v", n.Op)
if n.Op() != ir.ODCLFIELD {
base.Fatalf("funcarg %v", n.Op())
}
if n.Sym == nil {
if n.Sym() == nil {
return
}
n.Right = ir.NewNameAt(n.Pos, n.Sym)
n.Right.Name.Param.Ntype = n.Left
n.Right.SetIsDDD(n.IsDDD())
declare(n.Right, ctxt)
n.SetRight(ir.NewNameAt(n.Pos(), n.Sym()))
n.Right().Name().Param.Ntype = n.Left()
n.Right().SetIsDDD(n.IsDDD())
declare(n.Right(), ctxt)
vargen++
n.Right.Name.Vargen = int32(vargen)
n.Right().Name().Vargen = int32(vargen)
}
// Same as funcargs, except run over an already constructed TFUNC.
@ -491,7 +491,7 @@ func funcarg2(f *types.Field, ctxt ir.Class) {
}
n := ir.NewNameAt(f.Pos, f.Sym)
f.Nname = ir.AsTypesNode(n)
n.Type = f.Type
n.SetType(f.Type)
n.SetIsDDD(f.IsDDD())
declare(n, ctxt)
}
@ -537,21 +537,21 @@ func checkembeddedtype(t *types.Type) {
func structfield(n *ir.Node) *types.Field {
lno := base.Pos
base.Pos = n.Pos
base.Pos = n.Pos()
if n.Op != ir.ODCLFIELD {
if n.Op() != ir.ODCLFIELD {
base.Fatalf("structfield: oops %v\n", n)
}
if n.Left != nil {
n.Left = typecheck(n.Left, ctxType)
n.Type = n.Left.Type
n.Left = nil
if n.Left() != nil {
n.SetLeft(typecheck(n.Left(), ctxType))
n.SetType(n.Left().Type())
n.SetLeft(nil)
}
f := types.NewField(n.Pos, n.Sym, n.Type)
f := types.NewField(n.Pos(), n.Sym(), n.Type())
if n.Embedded() {
checkembeddedtype(n.Type)
checkembeddedtype(n.Type())
f.Embedded = 1
}
if n.HasVal() {
@ -612,9 +612,9 @@ func tofunargs(l []*ir.Node, funarg types.Funarg) *types.Type {
for i, n := range l {
f := structfield(n)
f.SetIsDDD(n.IsDDD())
if n.Right != nil {
n.Right.Type = f.Type
f.Nname = ir.AsTypesNode(n.Right)
if n.Right() != nil {
n.Right().SetType(f.Type)
f.Nname = ir.AsTypesNode(n.Right())
}
if f.Broke() {
t.SetBroke(true)
@ -634,9 +634,9 @@ func tofunargsfield(fields []*types.Field, funarg types.Funarg) *types.Type {
func interfacefield(n *ir.Node) *types.Field {
lno := base.Pos
base.Pos = n.Pos
base.Pos = n.Pos()
if n.Op != ir.ODCLFIELD {
if n.Op() != ir.ODCLFIELD {
base.Fatalf("interfacefield: oops %v\n", n)
}
@ -649,13 +649,13 @@ func interfacefield(n *ir.Node) *types.Field {
// If Sym != nil, then Sym is MethodName and Left is Signature.
// Otherwise, Left is InterfaceTypeName.
if n.Left != nil {
n.Left = typecheck(n.Left, ctxType)
n.Type = n.Left.Type
n.Left = nil
if n.Left() != nil {
n.SetLeft(typecheck(n.Left(), ctxType))
n.SetType(n.Left().Type())
n.SetLeft(nil)
}
f := types.NewField(n.Pos, n.Sym, n.Type)
f := types.NewField(n.Pos(), n.Sym(), n.Type())
base.Pos = lno
return f
@ -872,7 +872,7 @@ func addmethod(n *ir.Node, msym *types.Sym, t *types.Type, local, nointerface bo
}
f := types.NewField(base.Pos, msym, t)
f.Nname = ir.AsTypesNode(n.Func.Nname)
f.Nname = ir.AsTypesNode(n.Func().Nname)
f.SetNointerface(nointerface)
mt.Methods().Append(f)
@ -936,26 +936,26 @@ func makefuncsym(s *types.Sym) {
// setNodeNameFunc marks a node as a function.
func setNodeNameFunc(n *ir.Node) {
if n.Op != ir.ONAME || n.Class() != ir.Pxxx {
if n.Op() != ir.ONAME || n.Class() != ir.Pxxx {
base.Fatalf("expected ONAME/Pxxx node, got %v", n)
}
n.SetClass(ir.PFUNC)
n.Sym.SetFunc(true)
n.Sym().SetFunc(true)
}
func dclfunc(sym *types.Sym, tfn *ir.Node) *ir.Node {
if tfn.Op != ir.OTFUNC {
if tfn.Op() != ir.OTFUNC {
base.Fatalf("expected OTFUNC node, got %v", tfn)
}
fn := ir.Nod(ir.ODCLFUNC, nil, nil)
fn.Func.Nname = newfuncnamel(base.Pos, sym, fn.Func)
fn.Func.Nname.Name.Defn = fn
fn.Func.Nname.Name.Param.Ntype = tfn
setNodeNameFunc(fn.Func.Nname)
fn.Func().Nname = newfuncnamel(base.Pos, sym, fn.Func())
fn.Func().Nname.Name().Defn = fn
fn.Func().Nname.Name().Param.Ntype = tfn
setNodeNameFunc(fn.Func().Nname)
funchdr(fn)
fn.Func.Nname.Name.Param.Ntype = typecheck(fn.Func.Nname.Name.Param.Ntype, ctxType)
fn.Func().Nname.Name().Param.Ntype = typecheck(fn.Func().Nname.Name().Param.Ntype, ctxType)
return fn
}
@ -987,7 +987,7 @@ func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
// directly. This has to happen before transformclosure since
// it's a lot harder to work out the argument after.
for _, n := range xtop {
if n.Op != ir.ODCLFUNC {
if n.Op() != ir.ODCLFUNC {
continue
}
c.curfn = n
@ -998,31 +998,31 @@ func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
}
func (c *nowritebarrierrecChecker) findExtraCalls(n *ir.Node) bool {
if n.Op != ir.OCALLFUNC {
if n.Op() != ir.OCALLFUNC {
return true
}
fn := n.Left
if fn == nil || fn.Op != ir.ONAME || fn.Class() != ir.PFUNC || fn.Name.Defn == nil {
fn := n.Left()
if fn == nil || fn.Op() != ir.ONAME || fn.Class() != ir.PFUNC || fn.Name().Defn == nil {
return true
}
if !isRuntimePkg(fn.Sym.Pkg) || fn.Sym.Name != "systemstack" {
if !isRuntimePkg(fn.Sym().Pkg) || fn.Sym().Name != "systemstack" {
return true
}
var callee *ir.Node
arg := n.List.First()
switch arg.Op {
arg := n.List().First()
switch arg.Op() {
case ir.ONAME:
callee = arg.Name.Defn
callee = arg.Name().Defn
case ir.OCLOSURE:
callee = arg.Func.Decl
callee = arg.Func().Decl
default:
base.Fatalf("expected ONAME or OCLOSURE node, got %+v", arg)
}
if callee.Op != ir.ODCLFUNC {
if callee.Op() != ir.ODCLFUNC {
base.Fatalf("expected ODCLFUNC node, got %+v", callee)
}
c.extraCalls[c.curfn] = append(c.extraCalls[c.curfn], nowritebarrierrecCall{callee, n.Pos})
c.extraCalls[c.curfn] = append(c.extraCalls[c.curfn], nowritebarrierrecCall{callee, n.Pos()})
return true
}
@ -1035,12 +1035,12 @@ func (c *nowritebarrierrecChecker) findExtraCalls(n *ir.Node) bool {
//
// This can be called concurrently for different from Nodes.
func (c *nowritebarrierrecChecker) recordCall(from *ir.Node, to *obj.LSym, pos src.XPos) {
if from.Op != ir.ODCLFUNC {
if from.Op() != ir.ODCLFUNC {
base.Fatalf("expected ODCLFUNC, got %v", from)
}
// We record this information on the *Func so this is
// concurrent-safe.
fn := from.Func
fn := from.Func()
if fn.NWBRCalls == nil {
fn.NWBRCalls = new([]ir.SymAndPos)
}
@ -1064,27 +1064,27 @@ func (c *nowritebarrierrecChecker) check() {
var q ir.NodeQueue
for _, n := range xtop {
if n.Op != ir.ODCLFUNC {
if n.Op() != ir.ODCLFUNC {
continue
}
symToFunc[n.Func.LSym] = n
symToFunc[n.Func().LSym] = n
// Make nowritebarrierrec functions BFS roots.
if n.Func.Pragma&ir.Nowritebarrierrec != 0 {
if n.Func().Pragma&ir.Nowritebarrierrec != 0 {
funcs[n] = nowritebarrierrecCall{}
q.PushRight(n)
}
// Check go:nowritebarrier functions.
if n.Func.Pragma&ir.Nowritebarrier != 0 && n.Func.WBPos.IsKnown() {
base.ErrorfAt(n.Func.WBPos, "write barrier prohibited")
if n.Func().Pragma&ir.Nowritebarrier != 0 && n.Func().WBPos.IsKnown() {
base.ErrorfAt(n.Func().WBPos, "write barrier prohibited")
}
}
// Perform a BFS of the call graph from all
// go:nowritebarrierrec functions.
enqueue := func(src, target *ir.Node, pos src.XPos) {
if target.Func.Pragma&ir.Yeswritebarrierrec != 0 {
if target.Func().Pragma&ir.Yeswritebarrierrec != 0 {
// Don't flow into this function.
return
}
@ -1101,14 +1101,14 @@ func (c *nowritebarrierrecChecker) check() {
fn := q.PopLeft()
// Check fn.
if fn.Func.WBPos.IsKnown() {
if fn.Func().WBPos.IsKnown() {
var err bytes.Buffer
call := funcs[fn]
for call.target != nil {
fmt.Fprintf(&err, "\n\t%v: called by %v", base.FmtPos(call.lineno), call.target.Func.Nname)
fmt.Fprintf(&err, "\n\t%v: called by %v", base.FmtPos(call.lineno), call.target.Func().Nname)
call = funcs[call.target]
}
base.ErrorfAt(fn.Func.WBPos, "write barrier prohibited by caller; %v%s", fn.Func.Nname, err.String())
base.ErrorfAt(fn.Func().WBPos, "write barrier prohibited by caller; %v%s", fn.Func().Nname, err.String())
continue
}
@ -1116,10 +1116,10 @@ func (c *nowritebarrierrecChecker) check() {
for _, callee := range c.extraCalls[fn] {
enqueue(fn, callee.target, callee.lineno)
}
if fn.Func.NWBRCalls == nil {
if fn.Func().NWBRCalls == nil {
continue
}
for _, callee := range *fn.Func.NWBRCalls {
for _, callee := range *fn.Func().NWBRCalls {
target := symToFunc[callee.Sym]
if target != nil {
enqueue(fn, target, callee.Pos)

View file

@ -236,15 +236,15 @@ func makePreinlineDclMap(fnsym *obj.LSym) map[varPos]int {
dcl := preInliningDcls(fnsym)
m := make(map[varPos]int)
for i, n := range dcl {
pos := base.Ctxt.InnermostPos(n.Pos)
pos := base.Ctxt.InnermostPos(n.Pos())
vp := varPos{
DeclName: unversion(n.Sym.Name),
DeclName: unversion(n.Sym().Name),
DeclFile: pos.RelFilename(),
DeclLine: pos.RelLine(),
DeclCol: pos.Col(),
}
if _, found := m[vp]; found {
base.Fatalf("child dcl collision on symbol %s within %v\n", n.Sym.Name, fnsym.Name)
base.Fatalf("child dcl collision on symbol %s within %v\n", n.Sym().Name, fnsym.Name)
}
m[vp] = i
}

View file

@ -113,15 +113,15 @@ func varEmbed(p *noder, names []*ir.Node, typ *ir.Node, exprs []*ir.Node, embeds
v := names[0]
if dclcontext != ir.PEXTERN {
numLocalEmbed++
v = ir.NewNameAt(v.Pos, lookupN("embed.", numLocalEmbed))
v.Sym.Def = ir.AsTypesNode(v)
v.Name.Param.Ntype = typ
v = ir.NewNameAt(v.Pos(), lookupN("embed.", numLocalEmbed))
v.Sym().Def = ir.AsTypesNode(v)
v.Name().Param.Ntype = typ
v.SetClass(ir.PEXTERN)
externdcl = append(externdcl, v)
exprs = []*ir.Node{v}
}
v.Name.Param.SetEmbedFiles(list)
v.Name().Param.SetEmbedFiles(list)
embedlist = append(embedlist, v)
return exprs
}
@ -131,17 +131,17 @@ func varEmbed(p *noder, names []*ir.Node, typ *ir.Node, exprs []*ir.Node, embeds
// can't tell whether "string" and "byte" really mean "string" and "byte".
// The result must be confirmed later, after type checking, using embedKind.
func embedKindApprox(typ *ir.Node) int {
if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
if typ.Sym() != nil && typ.Sym().Name == "FS" && (typ.Sym().Pkg.Path == "embed" || (typ.Sym().Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
return embedFiles
}
// These are not guaranteed to match only string and []byte -
// maybe the local package has redefined one of those words.
// But it's the best we can do now during the noder.
// The stricter check happens later, in initEmbed calling embedKind.
if typ.Sym != nil && typ.Sym.Name == "string" && typ.Sym.Pkg == ir.LocalPkg {
if typ.Sym() != nil && typ.Sym().Name == "string" && typ.Sym().Pkg == ir.LocalPkg {
return embedString
}
if typ.Op == ir.OTARRAY && typ.Left == nil && typ.Right.Sym != nil && typ.Right.Sym.Name == "byte" && typ.Right.Sym.Pkg == ir.LocalPkg {
if typ.Op() == ir.OTARRAY && typ.Left() == nil && typ.Right().Sym() != nil && typ.Right().Sym().Name == "byte" && typ.Right().Sym().Pkg == ir.LocalPkg {
return embedBytes
}
return embedUnknown
@ -193,18 +193,18 @@ func dumpembeds() {
// initEmbed emits the init data for a //go:embed variable,
// which is either a string, a []byte, or an embed.FS.
func initEmbed(v *ir.Node) {
files := v.Name.Param.EmbedFiles()
switch kind := embedKind(v.Type); kind {
files := v.Name().Param.EmbedFiles()
switch kind := embedKind(v.Type()); kind {
case embedUnknown:
base.ErrorfAt(v.Pos, "go:embed cannot apply to var of type %v", v.Type)
base.ErrorfAt(v.Pos(), "go:embed cannot apply to var of type %v", v.Type())
case embedString, embedBytes:
file := files[0]
fsym, size, err := fileStringSym(v.Pos, base.Flag.Cfg.Embed.Files[file], kind == embedString, nil)
fsym, size, err := fileStringSym(v.Pos(), base.Flag.Cfg.Embed.Files[file], kind == embedString, nil)
if err != nil {
base.ErrorfAt(v.Pos, "embed %s: %v", file, err)
base.ErrorfAt(v.Pos(), "embed %s: %v", file, err)
}
sym := v.Sym.Linksym()
sym := v.Sym().Linksym()
off := 0
off = dsymptr(sym, off, fsym, 0) // data string
off = duintptr(sym, off, uint64(size)) // len
@ -213,7 +213,7 @@ func initEmbed(v *ir.Node) {
}
case embedFiles:
slicedata := base.Ctxt.Lookup(`"".` + v.Sym.Name + `.files`)
slicedata := base.Ctxt.Lookup(`"".` + v.Sym().Name + `.files`)
off := 0
// []files pointed at by Files
off = dsymptr(slicedata, off, slicedata, 3*Widthptr) // []file, pointing just past slice
@ -228,7 +228,7 @@ func initEmbed(v *ir.Node) {
const hashSize = 16
hash := make([]byte, hashSize)
for _, file := range files {
off = dsymptr(slicedata, off, stringsym(v.Pos, file), 0) // file string
off = dsymptr(slicedata, off, stringsym(v.Pos(), file), 0) // file string
off = duintptr(slicedata, off, uint64(len(file)))
if strings.HasSuffix(file, "/") {
// entry for directory - no data
@ -236,9 +236,9 @@ func initEmbed(v *ir.Node) {
off = duintptr(slicedata, off, 0)
off += hashSize
} else {
fsym, size, err := fileStringSym(v.Pos, base.Flag.Cfg.Embed.Files[file], true, hash)
fsym, size, err := fileStringSym(v.Pos(), base.Flag.Cfg.Embed.Files[file], true, hash)
if err != nil {
base.ErrorfAt(v.Pos, "embed %s: %v", file, err)
base.ErrorfAt(v.Pos(), "embed %s: %v", file, err)
}
off = dsymptr(slicedata, off, fsym, 0) // data string
off = duintptr(slicedata, off, uint64(size))
@ -246,7 +246,7 @@ func initEmbed(v *ir.Node) {
}
}
ggloblsym(slicedata, int32(off), obj.RODATA|obj.LOCAL)
sym := v.Sym.Linksym()
sym := v.Sym().Linksym()
dsymptr(sym, 0, slicedata, 0)
}
}

File diff suppressed because it is too large Load diff

View file

@ -25,13 +25,13 @@ var asmlist []*ir.Node
// exportsym marks n for export (or reexport).
func exportsym(n *ir.Node) {
if n.Sym.OnExportList() {
if n.Sym().OnExportList() {
return
}
n.Sym.SetOnExportList(true)
n.Sym().SetOnExportList(true)
if base.Flag.E != 0 {
fmt.Printf("export symbol %v\n", n.Sym)
fmt.Printf("export symbol %v\n", n.Sym())
}
exportlist = append(exportlist, n)
@ -42,21 +42,21 @@ func initname(s string) bool {
}
func autoexport(n *ir.Node, ctxt ir.Class) {
if n.Sym.Pkg != ir.LocalPkg {
if n.Sym().Pkg != ir.LocalPkg {
return
}
if (ctxt != ir.PEXTERN && ctxt != ir.PFUNC) || dclcontext != ir.PEXTERN {
return
}
if n.Type != nil && n.Type.IsKind(types.TFUNC) && ir.IsMethod(n) {
if n.Type() != nil && n.Type().IsKind(types.TFUNC) && ir.IsMethod(n) {
return
}
if types.IsExported(n.Sym.Name) || initname(n.Sym.Name) {
if types.IsExported(n.Sym().Name) || initname(n.Sym().Name) {
exportsym(n)
}
if base.Flag.AsmHdr != "" && !n.Sym.Asm() {
n.Sym.SetAsm(true)
if base.Flag.AsmHdr != "" && !n.Sym().Asm() {
n.Sym().SetAsm(true)
asmlist = append(asmlist, n)
}
}
@ -89,7 +89,7 @@ func importsym(ipkg *types.Pkg, s *types.Sym, op ir.Op) *ir.Node {
s.SetPkgDef(ir.AsTypesNode(n))
s.Importdef = ipkg
}
if n.Op != ir.ONONAME && n.Op != op {
if n.Op() != ir.ONONAME && n.Op() != op {
redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path))
}
return n
@ -100,18 +100,18 @@ func importsym(ipkg *types.Pkg, s *types.Sym, op ir.Op) *ir.Node {
// ipkg is the package being imported
func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type {
n := importsym(ipkg, s, ir.OTYPE)
if n.Op != ir.OTYPE {
if n.Op() != ir.OTYPE {
t := types.New(types.TFORW)
t.Sym = s
t.Nod = ir.AsTypesNode(n)
n.Op = ir.OTYPE
n.Pos = pos
n.Type = t
n.SetOp(ir.OTYPE)
n.SetPos(pos)
n.SetType(t)
n.SetClass(ir.PEXTERN)
}
t := n.Type
t := n.Type()
if t == nil {
base.Fatalf("importtype %v", s)
}
@ -122,20 +122,20 @@ func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type {
// ipkg is the package being imported
func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Node {
n := importsym(ipkg, s, op)
if n.Op != ir.ONONAME {
if n.Op == op && (n.Class() != ctxt || !types.Identical(n.Type, t)) {
if n.Op() != ir.ONONAME {
if n.Op() == op && (n.Class() != ctxt || !types.Identical(n.Type(), t)) {
redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path))
}
return nil
}
n.Op = op
n.Pos = pos
n.SetOp(op)
n.SetPos(pos)
n.SetClass(ctxt)
if ctxt == ir.PFUNC {
n.Sym.SetFunc(true)
n.Sym().SetFunc(true)
}
n.Type = t
n.SetType(t)
return n
}
@ -162,7 +162,7 @@ func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
return
}
n.Func = new(ir.Func)
n.SetFunc(new(ir.Func))
if base.Flag.E != 0 {
fmt.Printf("import func %v%S\n", s, t)
@ -202,26 +202,26 @@ func dumpasmhdr() {
}
fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", ir.LocalPkg.Name)
for _, n := range asmlist {
if n.Sym.IsBlank() {
if n.Sym().IsBlank() {
continue
}
switch n.Op {
switch n.Op() {
case ir.OLITERAL:
t := n.Val().Kind()
if t == constant.Float || t == constant.Complex {
break
}
fmt.Fprintf(b, "#define const_%s %#v\n", n.Sym.Name, n.Val())
fmt.Fprintf(b, "#define const_%s %#v\n", n.Sym().Name, n.Val())
case ir.OTYPE:
t := n.Type
t := n.Type()
if !t.IsStruct() || t.StructType().Map != nil || t.IsFuncArgStruct() {
break
}
fmt.Fprintf(b, "#define %s__size %d\n", n.Sym.Name, int(t.Width))
fmt.Fprintf(b, "#define %s__size %d\n", n.Sym().Name, int(t.Width))
for _, f := range t.Fields().Slice() {
if !f.Sym.IsBlank() {
fmt.Fprintf(b, "#define %s_%s %d\n", n.Sym.Name, f.Sym.Name, int(f.Offset))
fmt.Fprintf(b, "#define %s_%s %d\n", n.Sym().Name, f.Sym.Name, int(f.Offset))
}
}
}

View file

@ -31,13 +31,13 @@ func sysvar(name string) *obj.LSym {
// isParamStackCopy reports whether this is the on-stack copy of a
// function parameter that moved to the heap.
func isParamStackCopy(n *ir.Node) bool {
return n.Op == ir.ONAME && (n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Name.Param.Heapaddr != nil
return n.Op() == ir.ONAME && (n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Name().Param.Heapaddr != nil
}
// isParamHeapCopy reports whether this is the on-heap copy of
// a function parameter that moved to the heap.
func isParamHeapCopy(n *ir.Node) bool {
return n.Op == ir.ONAME && n.Class() == ir.PAUTOHEAP && n.Name.Param.Stackcopy != nil
return n.Op() == ir.ONAME && n.Class() == ir.PAUTOHEAP && n.Name().Param.Stackcopy != nil
}
// autotmpname returns the name for an autotmp variable numbered n.
@ -56,7 +56,7 @@ func tempAt(pos src.XPos, curfn *ir.Node, t *types.Type) *ir.Node {
if curfn == nil {
base.Fatalf("no curfn for tempAt")
}
if curfn.Op == ir.OCLOSURE {
if curfn.Op() == ir.OCLOSURE {
ir.Dump("tempAt", curfn)
base.Fatalf("adding tempAt to wrong closure function")
}
@ -65,22 +65,22 @@ func tempAt(pos src.XPos, curfn *ir.Node, t *types.Type) *ir.Node {
}
s := &types.Sym{
Name: autotmpname(len(curfn.Func.Dcl)),
Name: autotmpname(len(curfn.Func().Dcl)),
Pkg: ir.LocalPkg,
}
n := ir.NewNameAt(pos, s)
s.Def = ir.AsTypesNode(n)
n.Type = t
n.SetType(t)
n.SetClass(ir.PAUTO)
n.Esc = EscNever
n.Name.Curfn = curfn
n.Name.SetUsed(true)
n.Name.SetAutoTemp(true)
curfn.Func.Dcl = append(curfn.Func.Dcl, n)
n.SetEsc(EscNever)
n.Name().Curfn = curfn
n.Name().SetUsed(true)
n.Name().SetAutoTemp(true)
curfn.Func().Dcl = append(curfn.Func().Dcl, n)
dowidth(t)
return n.Orig
return n.Orig()
}
func temp(t *types.Type) *ir.Node {

View file

@ -69,7 +69,7 @@ func newProgs(fn *ir.Node, worker int) *Progs {
pp.next = pp.NewProg()
pp.clearp(pp.next)
pp.pos = fn.Pos
pp.pos = fn.Pos()
pp.settext(fn)
// PCDATA tables implicitly start with index -1.
pp.prevLive = LivenessIndex{-1, false}
@ -181,10 +181,10 @@ func (pp *Progs) settext(fn *ir.Node) {
ptxt := pp.Prog(obj.ATEXT)
pp.Text = ptxt
fn.Func.LSym.Func().Text = ptxt
fn.Func().LSym.Func().Text = ptxt
ptxt.From.Type = obj.TYPE_MEM
ptxt.From.Name = obj.NAME_EXTERN
ptxt.From.Sym = fn.Func.LSym
ptxt.From.Sym = fn.Func().LSym
}
// initLSym defines f's obj.LSym and initializes it based on the
@ -199,7 +199,7 @@ func initLSym(f *ir.Func, hasBody bool) {
}
if nam := f.Nname; !ir.IsBlank(nam) {
f.LSym = nam.Sym.Linksym()
f.LSym = nam.Sym().Linksym()
if f.Pragma&ir.Systemstack != 0 {
f.LSym.Set(obj.AttrCFunc, true)
}
@ -221,7 +221,7 @@ func initLSym(f *ir.Func, hasBody bool) {
}
}
isLinknameExported := nam.Sym.Linkname != "" && (hasBody || hasDefABI)
isLinknameExported := nam.Sym().Linkname != "" && (hasBody || hasDefABI)
if abi, ok := symabiRefs[f.LSym.Name]; (ok && abi == obj.ABI0) || isLinknameExported {
// Either 1) this symbol is definitely
// referenced as ABI0 from this package; or 2)
@ -281,7 +281,7 @@ func initLSym(f *ir.Func, hasBody bool) {
// See test/recover.go for test cases and src/reflect/value.go
// for the actual functions being considered.
if base.Ctxt.Pkgpath == "reflect" {
switch f.Nname.Sym.Name {
switch f.Nname.Sym().Name {
case "callReflect", "callMethod":
flag |= obj.WRAPPER
}
@ -291,20 +291,20 @@ func initLSym(f *ir.Func, hasBody bool) {
}
func ggloblnod(nam *ir.Node) {
s := nam.Sym.Linksym()
s := nam.Sym().Linksym()
s.Gotype = ngotype(nam).Linksym()
flags := 0
if nam.Name.Readonly() {
if nam.Name().Readonly() {
flags = obj.RODATA
}
if nam.Type != nil && !nam.Type.HasPointers() {
if nam.Type() != nil && !nam.Type().HasPointers() {
flags |= obj.NOPTR
}
base.Ctxt.Globl(s, nam.Type.Width, flags)
if nam.Name.LibfuzzerExtraCounter() {
base.Ctxt.Globl(s, nam.Type().Width, flags)
if nam.Name().LibfuzzerExtraCounter() {
s.Type = objabi.SLIBFUZZER_EXTRA_COUNTER
}
if nam.Sym.Linkname != "" {
if nam.Sym().Linkname != "" {
// Make sure linkname'd symbol is non-package. When a symbol is
// both imported and linkname'd, s.Pkg may not set to "_" in
// types.Sym.Linksym because LSym already exists. Set it here.

View file

@ -329,7 +329,7 @@ func (w *exportWriter) writeIndex(index map[*ir.Node]uint64, mainIndex bool) {
}
for n := range index {
pkgObjs[n.Sym.Pkg] = append(pkgObjs[n.Sym.Pkg], n)
pkgObjs[n.Sym().Pkg] = append(pkgObjs[n.Sym().Pkg], n)
}
var pkgs []*types.Pkg
@ -337,7 +337,7 @@ func (w *exportWriter) writeIndex(index map[*ir.Node]uint64, mainIndex bool) {
pkgs = append(pkgs, pkg)
sort.Slice(objs, func(i, j int) bool {
return objs[i].Sym.Name < objs[j].Sym.Name
return objs[i].Sym().Name < objs[j].Sym().Name
})
}
@ -356,7 +356,7 @@ func (w *exportWriter) writeIndex(index map[*ir.Node]uint64, mainIndex bool) {
objs := pkgObjs[pkg]
w.uint64(uint64(len(objs)))
for _, n := range objs {
w.string(n.Sym.Name)
w.string(n.Sym().Name)
w.uint64(index[n])
}
}
@ -395,12 +395,12 @@ func (p *iexporter) stringOff(s string) uint64 {
// pushDecl adds n to the declaration work queue, if not already present.
func (p *iexporter) pushDecl(n *ir.Node) {
if n.Sym == nil || ir.AsNode(n.Sym.Def) != n && n.Op != ir.OTYPE {
base.Fatalf("weird Sym: %v, %v", n, n.Sym)
if n.Sym() == nil || ir.AsNode(n.Sym().Def) != n && n.Op() != ir.OTYPE {
base.Fatalf("weird Sym: %v, %v", n, n.Sym())
}
// Don't export predeclared declarations.
if n.Sym.Pkg == ir.BuiltinPkg || n.Sym.Pkg == unsafepkg {
if n.Sym().Pkg == ir.BuiltinPkg || n.Sym().Pkg == unsafepkg {
return
}
@ -425,16 +425,16 @@ type exportWriter struct {
func (p *iexporter) doDecl(n *ir.Node) {
w := p.newWriter()
w.setPkg(n.Sym.Pkg, false)
w.setPkg(n.Sym().Pkg, false)
switch n.Op {
switch n.Op() {
case ir.ONAME:
switch n.Class() {
case ir.PEXTERN:
// Variable.
w.tag('V')
w.pos(n.Pos)
w.typ(n.Type)
w.pos(n.Pos())
w.typ(n.Type())
w.varExt(n)
case ir.PFUNC:
@ -444,8 +444,8 @@ func (p *iexporter) doDecl(n *ir.Node) {
// Function.
w.tag('F')
w.pos(n.Pos)
w.signature(n.Type)
w.pos(n.Pos())
w.signature(n.Type())
w.funcExt(n)
default:
@ -456,23 +456,23 @@ func (p *iexporter) doDecl(n *ir.Node) {
// Constant.
n = typecheck(n, ctxExpr)
w.tag('C')
w.pos(n.Pos)
w.value(n.Type, n.Val())
w.pos(n.Pos())
w.value(n.Type(), n.Val())
case ir.OTYPE:
if IsAlias(n.Sym) {
if IsAlias(n.Sym()) {
// Alias.
w.tag('A')
w.pos(n.Pos)
w.typ(n.Type)
w.pos(n.Pos())
w.typ(n.Type())
break
}
// Defined type.
w.tag('T')
w.pos(n.Pos)
w.pos(n.Pos())
underlying := n.Type.Orig
underlying := n.Type().Orig
if underlying == types.Errortype.Orig {
// For "type T error", use error as the
// underlying type instead of error's own
@ -484,7 +484,7 @@ func (p *iexporter) doDecl(n *ir.Node) {
}
w.typ(underlying)
t := n.Type
t := n.Type()
if t.IsInterface() {
w.typeExt(t)
break
@ -519,7 +519,7 @@ func (p *iexporter) doInline(f *ir.Node) {
w := p.newWriter()
w.setPkg(fnpkg(f), false)
w.stmtList(ir.AsNodes(f.Func.Inl.Body))
w.stmtList(ir.AsNodes(f.Func().Inl.Body))
p.inlineIndex[f] = w.flush()
}
@ -574,7 +574,7 @@ func (w *exportWriter) qualifiedIdent(n *ir.Node) {
// Ensure any referenced declarations are written out too.
w.p.pushDecl(n)
s := n.Sym
s := n.Sym()
w.string(s.Name)
w.pkg(s.Pkg)
}
@ -956,36 +956,36 @@ func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
// Compiler-specific extensions.
func (w *exportWriter) varExt(n *ir.Node) {
w.linkname(n.Sym)
w.symIdx(n.Sym)
w.linkname(n.Sym())
w.symIdx(n.Sym())
}
func (w *exportWriter) funcExt(n *ir.Node) {
w.linkname(n.Sym)
w.symIdx(n.Sym)
w.linkname(n.Sym())
w.symIdx(n.Sym())
// Escape analysis.
for _, fs := range &types.RecvsParams {
for _, f := range fs(n.Type).FieldSlice() {
for _, f := range fs(n.Type()).FieldSlice() {
w.string(f.Note)
}
}
// Inline body.
if n.Func.Inl != nil {
w.uint64(1 + uint64(n.Func.Inl.Cost))
if n.Func.ExportInline() {
if n.Func().Inl != nil {
w.uint64(1 + uint64(n.Func().Inl.Cost))
if n.Func().ExportInline() {
w.p.doInline(n)
}
// Endlineno for inlined function.
if n.Name.Defn != nil {
w.pos(n.Name.Defn.Func.Endlineno)
if n.Name().Defn != nil {
w.pos(n.Name().Defn.Func().Endlineno)
} else {
// When the exported node was defined externally,
// e.g. io exports atomic.(*Value).Load or bytes exports errors.New.
// Keep it as we don't distinguish this case in iimport.go.
w.pos(n.Func.Endlineno)
w.pos(n.Func().Endlineno)
}
} else {
w.uint64(0)
@ -1038,7 +1038,7 @@ func (w *exportWriter) stmtList(list ir.Nodes) {
}
func (w *exportWriter) node(n *ir.Node) {
if ir.OpPrec[n.Op] < 0 {
if ir.OpPrec[n.Op()] < 0 {
w.stmt(n)
} else {
w.expr(n)
@ -1048,19 +1048,19 @@ func (w *exportWriter) node(n *ir.Node) {
// Caution: stmt will emit more than one node for statement nodes n that have a non-empty
// n.Ninit and where n cannot have a natural init section (such as in "if", "for", etc.).
func (w *exportWriter) stmt(n *ir.Node) {
if n.Ninit.Len() > 0 && !ir.StmtWithInit(n.Op) {
if n.Init().Len() > 0 && !ir.StmtWithInit(n.Op()) {
// can't use stmtList here since we don't want the final OEND
for _, n := range n.Ninit.Slice() {
for _, n := range n.Init().Slice() {
w.stmt(n)
}
}
switch op := n.Op; op {
switch op := n.Op(); op {
case ir.ODCL:
w.op(ir.ODCL)
w.pos(n.Left.Pos)
w.localName(n.Left)
w.typ(n.Left.Type)
w.pos(n.Left().Pos())
w.localName(n.Left())
w.typ(n.Left().Type())
// case ODCLFIELD:
// unimplemented - handled by default case
@ -1069,74 +1069,74 @@ func (w *exportWriter) stmt(n *ir.Node) {
// Don't export "v = <N>" initializing statements, hope they're always
// preceded by the DCL which will be re-parsed and typecheck to reproduce
// the "v = <N>" again.
if n.Right != nil {
if n.Right() != nil {
w.op(ir.OAS)
w.pos(n.Pos)
w.expr(n.Left)
w.expr(n.Right)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
}
case ir.OASOP:
w.op(ir.OASOP)
w.pos(n.Pos)
w.pos(n.Pos())
w.op(n.SubOp())
w.expr(n.Left)
w.expr(n.Left())
if w.bool(!n.Implicit()) {
w.expr(n.Right)
w.expr(n.Right())
}
case ir.OAS2:
w.op(ir.OAS2)
w.pos(n.Pos)
w.exprList(n.List)
w.exprList(n.Rlist)
w.pos(n.Pos())
w.exprList(n.List())
w.exprList(n.Rlist())
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
w.op(ir.OAS2)
w.pos(n.Pos)
w.exprList(n.List)
w.exprList(ir.AsNodes([]*ir.Node{n.Right}))
w.pos(n.Pos())
w.exprList(n.List())
w.exprList(ir.AsNodes([]*ir.Node{n.Right()}))
case ir.ORETURN:
w.op(ir.ORETURN)
w.pos(n.Pos)
w.exprList(n.List)
w.pos(n.Pos())
w.exprList(n.List())
// case ORETJMP:
// unreachable - generated by compiler for trampolin routines
case ir.OGO, ir.ODEFER:
w.op(op)
w.pos(n.Pos)
w.expr(n.Left)
w.pos(n.Pos())
w.expr(n.Left())
case ir.OIF:
w.op(ir.OIF)
w.pos(n.Pos)
w.stmtList(n.Ninit)
w.expr(n.Left)
w.stmtList(n.Nbody)
w.stmtList(n.Rlist)
w.pos(n.Pos())
w.stmtList(n.Init())
w.expr(n.Left())
w.stmtList(n.Body())
w.stmtList(n.Rlist())
case ir.OFOR:
w.op(ir.OFOR)
w.pos(n.Pos)
w.stmtList(n.Ninit)
w.exprsOrNil(n.Left, n.Right)
w.stmtList(n.Nbody)
w.pos(n.Pos())
w.stmtList(n.Init())
w.exprsOrNil(n.Left(), n.Right())
w.stmtList(n.Body())
case ir.ORANGE:
w.op(ir.ORANGE)
w.pos(n.Pos)
w.stmtList(n.List)
w.expr(n.Right)
w.stmtList(n.Nbody)
w.pos(n.Pos())
w.stmtList(n.List())
w.expr(n.Right())
w.stmtList(n.Body())
case ir.OSELECT, ir.OSWITCH:
w.op(op)
w.pos(n.Pos)
w.stmtList(n.Ninit)
w.exprsOrNil(n.Left, nil)
w.pos(n.Pos())
w.stmtList(n.Init())
w.exprsOrNil(n.Left(), nil)
w.caseList(n)
// case OCASE:
@ -1144,41 +1144,41 @@ func (w *exportWriter) stmt(n *ir.Node) {
case ir.OFALL:
w.op(ir.OFALL)
w.pos(n.Pos)
w.pos(n.Pos())
case ir.OBREAK, ir.OCONTINUE:
w.op(op)
w.pos(n.Pos)
w.exprsOrNil(n.Left, nil)
w.pos(n.Pos())
w.exprsOrNil(n.Left(), nil)
case ir.OEMPTY:
// nothing to emit
case ir.OGOTO, ir.OLABEL:
w.op(op)
w.pos(n.Pos)
w.string(n.Sym.Name)
w.pos(n.Pos())
w.string(n.Sym().Name)
default:
base.Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op)
base.Fatalf("exporter: CANNOT EXPORT: %v\nPlease notify gri@\n", n.Op())
}
}
func (w *exportWriter) caseList(sw *ir.Node) {
namedTypeSwitch := sw.Op == ir.OSWITCH && sw.Left != nil && sw.Left.Op == ir.OTYPESW && sw.Left.Left != nil
namedTypeSwitch := sw.Op() == ir.OSWITCH && sw.Left() != nil && sw.Left().Op() == ir.OTYPESW && sw.Left().Left() != nil
cases := sw.List.Slice()
cases := sw.List().Slice()
w.uint64(uint64(len(cases)))
for _, cas := range cases {
if cas.Op != ir.OCASE {
if cas.Op() != ir.OCASE {
base.Fatalf("expected OCASE, got %v", cas)
}
w.pos(cas.Pos)
w.stmtList(cas.List)
w.pos(cas.Pos())
w.stmtList(cas.List())
if namedTypeSwitch {
w.localName(cas.Rlist.First())
w.localName(cas.Rlist().First())
}
w.stmtList(cas.Nbody)
w.stmtList(cas.Body())
}
}
@ -1200,38 +1200,38 @@ func (w *exportWriter) expr(n *ir.Node) {
// }
// from exprfmt (fmt.go)
for n.Op == ir.OPAREN || n.Implicit() && (n.Op == ir.ODEREF || n.Op == ir.OADDR || n.Op == ir.ODOT || n.Op == ir.ODOTPTR) {
n = n.Left
for n.Op() == ir.OPAREN || n.Implicit() && (n.Op() == ir.ODEREF || n.Op() == ir.OADDR || n.Op() == ir.ODOT || n.Op() == ir.ODOTPTR) {
n = n.Left()
}
switch op := n.Op; op {
switch op := n.Op(); op {
// expressions
// (somewhat closely following the structure of exprfmt in fmt.go)
case ir.ONIL:
if !n.Type.HasNil() {
base.Fatalf("unexpected type for nil: %v", n.Type)
if !n.Type().HasNil() {
base.Fatalf("unexpected type for nil: %v", n.Type())
}
if n.Orig != nil && n.Orig != n {
w.expr(n.Orig)
if n.Orig() != nil && n.Orig() != n {
w.expr(n.Orig())
break
}
w.op(ir.OLITERAL)
w.pos(n.Pos)
w.typ(n.Type)
w.pos(n.Pos())
w.typ(n.Type())
case ir.OLITERAL:
w.op(ir.OLITERAL)
w.pos(n.Pos)
w.value(n.Type, n.Val())
w.pos(n.Pos())
w.value(n.Type(), n.Val())
case ir.OMETHEXPR:
// Special case: explicit name of func (*T) method(...) is turned into pkg.(*T).method,
// but for export, this should be rendered as (*pkg.T).meth.
// These nodes have the special property that they are names with a left OTYPE and a right ONAME.
w.op(ir.OXDOT)
w.pos(n.Pos)
w.expr(n.Left) // n.Left.Op == OTYPE
w.selector(n.Right.Sym)
w.pos(n.Pos())
w.expr(n.Left()) // n.Left.Op == OTYPE
w.selector(n.Right().Sym())
case ir.ONAME:
// Package scope name.
@ -1250,20 +1250,20 @@ func (w *exportWriter) expr(n *ir.Node) {
case ir.OTYPE:
w.op(ir.OTYPE)
w.typ(n.Type)
w.typ(n.Type())
case ir.OTYPESW:
w.op(ir.OTYPESW)
w.pos(n.Pos)
w.pos(n.Pos())
var s *types.Sym
if n.Left != nil {
if n.Left.Op != ir.ONONAME {
base.Fatalf("expected ONONAME, got %v", n.Left)
if n.Left() != nil {
if n.Left().Op() != ir.ONONAME {
base.Fatalf("expected ONONAME, got %v", n.Left())
}
s = n.Left.Sym
s = n.Left().Sym()
}
w.localIdent(s, 0) // declared pseudo-variable, if any
w.exprsOrNil(n.Right, nil)
w.exprsOrNil(n.Right(), nil)
// case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
// should have been resolved by typechecking - handled by default case
@ -1276,25 +1276,25 @@ func (w *exportWriter) expr(n *ir.Node) {
case ir.OPTRLIT:
w.op(ir.OADDR)
w.pos(n.Pos)
w.expr(n.Left)
w.pos(n.Pos())
w.expr(n.Left())
case ir.OSTRUCTLIT:
w.op(ir.OSTRUCTLIT)
w.pos(n.Pos)
w.typ(n.Type)
w.elemList(n.List) // special handling of field names
w.pos(n.Pos())
w.typ(n.Type())
w.elemList(n.List()) // special handling of field names
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
w.op(ir.OCOMPLIT)
w.pos(n.Pos)
w.typ(n.Type)
w.exprList(n.List)
w.pos(n.Pos())
w.typ(n.Type())
w.exprList(n.List())
case ir.OKEY:
w.op(ir.OKEY)
w.pos(n.Pos)
w.exprsOrNil(n.Left, n.Right)
w.pos(n.Pos())
w.exprsOrNil(n.Left(), n.Right())
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
@ -1302,40 +1302,40 @@ func (w *exportWriter) expr(n *ir.Node) {
case ir.OCALLPART:
// An OCALLPART is an OXDOT before type checking.
w.op(ir.OXDOT)
w.pos(n.Pos)
w.expr(n.Left)
w.pos(n.Pos())
w.expr(n.Left())
// Right node should be ONAME
w.selector(n.Right.Sym)
w.selector(n.Right().Sym())
case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH:
w.op(ir.OXDOT)
w.pos(n.Pos)
w.expr(n.Left)
w.selector(n.Sym)
w.pos(n.Pos())
w.expr(n.Left())
w.selector(n.Sym())
case ir.ODOTTYPE, ir.ODOTTYPE2:
w.op(ir.ODOTTYPE)
w.pos(n.Pos)
w.expr(n.Left)
w.typ(n.Type)
w.pos(n.Pos())
w.expr(n.Left())
w.typ(n.Type())
case ir.OINDEX, ir.OINDEXMAP:
w.op(ir.OINDEX)
w.pos(n.Pos)
w.expr(n.Left)
w.expr(n.Right)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
case ir.OSLICE, ir.OSLICESTR, ir.OSLICEARR:
w.op(ir.OSLICE)
w.pos(n.Pos)
w.expr(n.Left)
w.pos(n.Pos())
w.expr(n.Left())
low, high, _ := n.SliceBounds()
w.exprsOrNil(low, high)
case ir.OSLICE3, ir.OSLICE3ARR:
w.op(ir.OSLICE3)
w.pos(n.Pos)
w.expr(n.Left)
w.pos(n.Pos())
w.expr(n.Left())
low, high, max := n.SliceBounds()
w.exprsOrNil(low, high)
w.expr(max)
@ -1343,25 +1343,25 @@ func (w *exportWriter) expr(n *ir.Node) {
case ir.OCOPY, ir.OCOMPLEX:
// treated like other builtin calls (see e.g., OREAL)
w.op(op)
w.pos(n.Pos)
w.expr(n.Left)
w.expr(n.Right)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
w.op(ir.OEND)
case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR:
w.op(ir.OCONV)
w.pos(n.Pos)
w.expr(n.Left)
w.typ(n.Type)
w.pos(n.Pos())
w.expr(n.Left())
w.typ(n.Type())
case ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
w.op(op)
w.pos(n.Pos)
if n.Left != nil {
w.expr(n.Left)
w.pos(n.Pos())
if n.Left() != nil {
w.expr(n.Left())
w.op(ir.OEND)
} else {
w.exprList(n.List) // emits terminating OEND
w.exprList(n.List()) // emits terminating OEND
}
// only append() calls may contain '...' arguments
if op == ir.OAPPEND {
@ -1372,49 +1372,49 @@ func (w *exportWriter) expr(n *ir.Node) {
case ir.OCALL, ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OGETG:
w.op(ir.OCALL)
w.pos(n.Pos)
w.stmtList(n.Ninit)
w.expr(n.Left)
w.exprList(n.List)
w.pos(n.Pos())
w.stmtList(n.Init())
w.expr(n.Left())
w.exprList(n.List())
w.bool(n.IsDDD())
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
w.op(op) // must keep separate from OMAKE for importer
w.pos(n.Pos)
w.typ(n.Type)
w.pos(n.Pos())
w.typ(n.Type())
switch {
default:
// empty list
w.op(ir.OEND)
case n.List.Len() != 0: // pre-typecheck
w.exprList(n.List) // emits terminating OEND
case n.Right != nil:
w.expr(n.Left)
w.expr(n.Right)
case n.List().Len() != 0: // pre-typecheck
w.exprList(n.List()) // emits terminating OEND
case n.Right() != nil:
w.expr(n.Left())
w.expr(n.Right())
w.op(ir.OEND)
case n.Left != nil && (n.Op == ir.OMAKESLICE || !n.Left.Type.IsUntyped()):
w.expr(n.Left)
case n.Left() != nil && (n.Op() == ir.OMAKESLICE || !n.Left().Type().IsUntyped()):
w.expr(n.Left())
w.op(ir.OEND)
}
// unary expressions
case ir.OPLUS, ir.ONEG, ir.OADDR, ir.OBITNOT, ir.ODEREF, ir.ONOT, ir.ORECV:
w.op(op)
w.pos(n.Pos)
w.expr(n.Left)
w.pos(n.Pos())
w.expr(n.Left())
// binary expressions
case ir.OADD, ir.OAND, ir.OANDAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.OOROR, ir.ORSH, ir.OSEND, ir.OSUB, ir.OXOR:
w.op(op)
w.pos(n.Pos)
w.expr(n.Left)
w.expr(n.Right)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
case ir.OADDSTR:
w.op(ir.OADDSTR)
w.pos(n.Pos)
w.exprList(n.List)
w.pos(n.Pos())
w.exprList(n.List())
case ir.ODCLCONST:
// if exporting, DCLCONST should just be removed as its usage
@ -1422,7 +1422,7 @@ func (w *exportWriter) expr(n *ir.Node) {
default:
base.Fatalf("cannot export %v (%d) node\n"+
"\t==> please file an issue and assign to gri@", n.Op, int(n.Op))
"\t==> please file an issue and assign to gri@", n.Op(), int(n.Op()))
}
}
@ -1450,8 +1450,8 @@ func (w *exportWriter) exprsOrNil(a, b *ir.Node) {
func (w *exportWriter) elemList(list ir.Nodes) {
w.uint64(uint64(list.Len()))
for _, n := range list.Slice() {
w.selector(n.Sym)
w.expr(n.Left)
w.selector(n.Sym())
w.expr(n.Left())
}
}
@ -1464,11 +1464,11 @@ func (w *exportWriter) localName(n *ir.Node) {
// PPARAM/PPARAMOUT, because we only want to include vargen in
// non-param names.
var v int32
if n.Class() == ir.PAUTO || (n.Class() == ir.PAUTOHEAP && n.Name.Param.Stackcopy == nil) {
v = n.Name.Vargen
if n.Class() == ir.PAUTO || (n.Class() == ir.PAUTOHEAP && n.Name().Param.Stackcopy == nil) {
v = n.Name().Vargen
}
w.localIdent(n.Sym, v)
w.localIdent(n.Sym(), v)
}
func (w *exportWriter) localIdent(s *types.Sym, v int32) {

View file

@ -42,7 +42,7 @@ var (
)
func expandDecl(n *ir.Node) {
if n.Op != ir.ONONAME {
if n.Op() != ir.ONONAME {
return
}
@ -56,7 +56,7 @@ func expandDecl(n *ir.Node) {
}
func expandInline(fn *ir.Node) {
if fn.Func.Inl.Body != nil {
if fn.Func().Inl.Body != nil {
return
}
@ -69,12 +69,12 @@ func expandInline(fn *ir.Node) {
}
func importReaderFor(n *ir.Node, importers map[*types.Sym]iimporterAndOffset) *importReader {
x, ok := importers[n.Sym]
x, ok := importers[n.Sym()]
if !ok {
return nil
}
return x.p.newReader(x.off, n.Sym.Pkg)
return x.p.newReader(x.off, n.Sym().Pkg)
}
type intReader struct {
@ -282,8 +282,8 @@ func (r *importReader) setPkg() {
}
func (r *importReader) doDecl(n *ir.Node) {
if n.Op != ir.ONONAME {
base.Fatalf("doDecl: unexpected Op for %v: %v", n.Sym, n.Op)
if n.Op() != ir.ONONAME {
base.Fatalf("doDecl: unexpected Op for %v: %v", n.Sym(), n.Op())
}
tag := r.byte()
@ -293,24 +293,24 @@ func (r *importReader) doDecl(n *ir.Node) {
case 'A':
typ := r.typ()
importalias(r.p.ipkg, pos, n.Sym, typ)
importalias(r.p.ipkg, pos, n.Sym(), typ)
case 'C':
typ := r.typ()
val := r.value(typ)
importconst(r.p.ipkg, pos, n.Sym, typ, val)
importconst(r.p.ipkg, pos, n.Sym(), typ, val)
case 'F':
typ := r.signature(nil)
importfunc(r.p.ipkg, pos, n.Sym, typ)
importfunc(r.p.ipkg, pos, n.Sym(), typ)
r.funcExt(n)
case 'T':
// Types can be recursive. We need to setup a stub
// declaration before recursing.
t := importtype(r.p.ipkg, pos, n.Sym)
t := importtype(r.p.ipkg, pos, n.Sym())
// We also need to defer width calculations until
// after the underlying type has been assigned.
@ -332,7 +332,7 @@ func (r *importReader) doDecl(n *ir.Node) {
mtyp := r.signature(recv)
m := newfuncnamel(mpos, methodSym(recv.Type, msym), new(ir.Func))
m.Type = mtyp
m.SetType(mtyp)
m.SetClass(ir.PFUNC)
// methodSym already marked m.Sym as a function.
@ -350,7 +350,7 @@ func (r *importReader) doDecl(n *ir.Node) {
case 'V':
typ := r.typ()
importvar(r.p.ipkg, pos, n.Sym, typ)
importvar(r.p.ipkg, pos, n.Sym(), typ)
r.varExt(n)
default:
@ -500,13 +500,13 @@ func (r *importReader) typ1() *types.Type {
// types. Therefore, this must be a package-scope
// type.
n := ir.AsNode(r.qualifiedIdent().PkgDef())
if n.Op == ir.ONONAME {
if n.Op() == ir.ONONAME {
expandDecl(n)
}
if n.Op != ir.OTYPE {
base.Fatalf("expected OTYPE, got %v: %v, %v", n.Op, n.Sym, n)
if n.Op() != ir.OTYPE {
base.Fatalf("expected OTYPE, got %v: %v, %v", n.Op(), n.Sym(), n)
}
return n.Type
return n.Type()
case pointerType:
return types.NewPtr(r.typ())
case sliceType:
@ -636,27 +636,27 @@ func (r *importReader) byte() byte {
// Compiler-specific extensions.
func (r *importReader) varExt(n *ir.Node) {
r.linkname(n.Sym)
r.symIdx(n.Sym)
r.linkname(n.Sym())
r.symIdx(n.Sym())
}
func (r *importReader) funcExt(n *ir.Node) {
r.linkname(n.Sym)
r.symIdx(n.Sym)
r.linkname(n.Sym())
r.symIdx(n.Sym())
// Escape analysis.
for _, fs := range &types.RecvsParams {
for _, f := range fs(n.Type).FieldSlice() {
for _, f := range fs(n.Type()).FieldSlice() {
f.Note = r.string()
}
}
// Inline body.
if u := r.uint64(); u > 0 {
n.Func.Inl = &ir.Inline{
n.Func().Inl = &ir.Inline{
Cost: int32(u - 1),
}
n.Func.Endlineno = r.pos()
n.Func().Endlineno = r.pos()
}
}
@ -696,7 +696,7 @@ func (r *importReader) typeExt(t *types.Type) {
var typeSymIdx = make(map[*types.Type][2]int64)
func (r *importReader) doInline(n *ir.Node) {
if len(n.Func.Inl.Body) != 0 {
if len(n.Func().Inl.Body) != 0 {
base.Fatalf("%v already has inline body", n)
}
@ -712,15 +712,15 @@ func (r *importReader) doInline(n *ir.Node) {
// functions).
body = []*ir.Node{}
}
n.Func.Inl.Body = body
n.Func().Inl.Body = body
importlist = append(importlist, n)
if base.Flag.E > 0 && base.Flag.LowerM > 2 {
if base.Flag.LowerM > 3 {
fmt.Printf("inl body for %v %#v: %+v\n", n, n.Type, ir.AsNodes(n.Func.Inl.Body))
fmt.Printf("inl body for %v %#v: %+v\n", n, n.Type(), ir.AsNodes(n.Func().Inl.Body))
} else {
fmt.Printf("inl body for %v %#v: %v\n", n, n.Type, ir.AsNodes(n.Func.Inl.Body))
fmt.Printf("inl body for %v %#v: %v\n", n, n.Type(), ir.AsNodes(n.Func().Inl.Body))
}
}
}
@ -748,8 +748,8 @@ func (r *importReader) stmtList() []*ir.Node {
break
}
// OBLOCK nodes may be created when importing ODCL nodes - unpack them
if n.Op == ir.OBLOCK {
list = append(list, n.List.Slice()...)
if n.Op() == ir.OBLOCK {
list = append(list, n.List().Slice()...)
} else {
list = append(list, n)
}
@ -759,22 +759,22 @@ func (r *importReader) stmtList() []*ir.Node {
}
func (r *importReader) caseList(sw *ir.Node) []*ir.Node {
namedTypeSwitch := sw.Op == ir.OSWITCH && sw.Left != nil && sw.Left.Op == ir.OTYPESW && sw.Left.Left != nil
namedTypeSwitch := sw.Op() == ir.OSWITCH && sw.Left() != nil && sw.Left().Op() == ir.OTYPESW && sw.Left().Left() != nil
cases := make([]*ir.Node, r.uint64())
for i := range cases {
cas := ir.NodAt(r.pos(), ir.OCASE, nil, nil)
cas.List.Set(r.stmtList())
cas.PtrList().Set(r.stmtList())
if namedTypeSwitch {
// Note: per-case variables will have distinct, dotted
// names after import. That's okay: swt.go only needs
// Sym for diagnostics anyway.
caseVar := ir.NewNameAt(cas.Pos, r.ident())
caseVar := ir.NewNameAt(cas.Pos(), r.ident())
declare(caseVar, dclcontext)
cas.Rlist.Set1(caseVar)
caseVar.Name.Defn = sw.Left
cas.PtrRlist().Set1(caseVar)
caseVar.Name().Defn = sw.Left()
}
cas.Nbody.Set(r.stmtList())
cas.PtrBody().Set(r.stmtList())
cases[i] = cas
}
return cases
@ -794,7 +794,7 @@ func (r *importReader) exprList() []*ir.Node {
func (r *importReader) expr() *ir.Node {
n := r.node()
if n != nil && n.Op == ir.OBLOCK {
if n != nil && n.Op() == ir.OBLOCK {
base.Fatalf("unexpected block node: %v", n)
}
return n
@ -821,7 +821,7 @@ func (r *importReader) node() *ir.Node {
n = ir.NewLiteral(r.value(typ))
}
n = npos(pos, n)
n.Type = typ
n.SetType(typ)
return n
case ir.ONONAME:
@ -839,10 +839,10 @@ func (r *importReader) node() *ir.Node {
case ir.OTYPESW:
n := ir.NodAt(r.pos(), ir.OTYPESW, nil, nil)
if s := r.ident(); s != nil {
n.Left = npos(n.Pos, newnoname(s))
n.SetLeft(npos(n.Pos(), newnoname(s)))
}
right, _ := r.exprsOrNil()
n.Right = right
n.SetRight(right)
return n
// case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
@ -859,7 +859,7 @@ func (r *importReader) node() *ir.Node {
savedlineno := base.Pos
base.Pos = r.pos()
n := ir.NodAt(base.Pos, ir.OCOMPLIT, nil, typenod(r.typ()))
n.List.Set(r.elemList()) // special handling of field names
n.PtrList().Set(r.elemList()) // special handling of field names
base.Pos = savedlineno
return n
@ -868,7 +868,7 @@ func (r *importReader) node() *ir.Node {
case ir.OCOMPLIT:
n := ir.NodAt(r.pos(), ir.OCOMPLIT, nil, typenod(r.typ()))
n.List.Set(r.exprList())
n.PtrList().Set(r.exprList())
return n
case ir.OKEY:
@ -894,7 +894,7 @@ func (r *importReader) node() *ir.Node {
case ir.ODOTTYPE:
n := ir.NodAt(r.pos(), ir.ODOTTYPE, r.expr(), nil)
n.Type = r.typ()
n.SetType(r.typ())
return n
// case OINDEX, OINDEXMAP, OSLICE, OSLICESTR, OSLICEARR, OSLICE3, OSLICE3ARR:
@ -907,7 +907,7 @@ func (r *importReader) node() *ir.Node {
n := ir.NodAt(r.pos(), op, r.expr(), nil)
low, high := r.exprsOrNil()
var max *ir.Node
if n.Op.IsSlice3() {
if n.Op().IsSlice3() {
max = r.expr()
}
n.SetSliceBounds(low, high, max)
@ -918,12 +918,12 @@ func (r *importReader) node() *ir.Node {
case ir.OCONV:
n := ir.NodAt(r.pos(), ir.OCONV, r.expr(), nil)
n.Type = r.typ()
n.SetType(r.typ())
return n
case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
n := npos(r.pos(), builtinCall(op))
n.List.Set(r.exprList())
n.PtrList().Set(r.exprList())
if op == ir.OAPPEND {
n.SetIsDDD(r.bool())
}
@ -934,16 +934,16 @@ func (r *importReader) node() *ir.Node {
case ir.OCALL:
n := ir.NodAt(r.pos(), ir.OCALL, nil, nil)
n.Ninit.Set(r.stmtList())
n.Left = r.expr()
n.List.Set(r.exprList())
n.PtrInit().Set(r.stmtList())
n.SetLeft(r.expr())
n.PtrList().Set(r.exprList())
n.SetIsDDD(r.bool())
return n
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
n := npos(r.pos(), builtinCall(ir.OMAKE))
n.List.Append(typenod(r.typ()))
n.List.Append(r.exprList()...)
n.PtrList().Append(typenod(r.typ()))
n.PtrList().Append(r.exprList()...)
return n
// unary expressions
@ -984,12 +984,12 @@ func (r *importReader) node() *ir.Node {
case ir.OASOP:
n := ir.NodAt(r.pos(), ir.OASOP, nil, nil)
n.SetSubOp(r.op())
n.Left = r.expr()
n.SetLeft(r.expr())
if !r.bool() {
n.Right = nodintconst(1)
n.SetRight(nodintconst(1))
n.SetImplicit(true)
} else {
n.Right = r.expr()
n.SetRight(r.expr())
}
return n
@ -998,13 +998,13 @@ func (r *importReader) node() *ir.Node {
case ir.OAS2:
n := ir.NodAt(r.pos(), ir.OAS2, nil, nil)
n.List.Set(r.exprList())
n.Rlist.Set(r.exprList())
n.PtrList().Set(r.exprList())
n.PtrRlist().Set(r.exprList())
return n
case ir.ORETURN:
n := ir.NodAt(r.pos(), ir.ORETURN, nil, nil)
n.List.Set(r.exprList())
n.PtrList().Set(r.exprList())
return n
// case ORETJMP:
@ -1015,34 +1015,34 @@ func (r *importReader) node() *ir.Node {
case ir.OIF:
n := ir.NodAt(r.pos(), ir.OIF, nil, nil)
n.Ninit.Set(r.stmtList())
n.Left = r.expr()
n.Nbody.Set(r.stmtList())
n.Rlist.Set(r.stmtList())
n.PtrInit().Set(r.stmtList())
n.SetLeft(r.expr())
n.PtrBody().Set(r.stmtList())
n.PtrRlist().Set(r.stmtList())
return n
case ir.OFOR:
n := ir.NodAt(r.pos(), ir.OFOR, nil, nil)
n.Ninit.Set(r.stmtList())
n.PtrInit().Set(r.stmtList())
left, right := r.exprsOrNil()
n.Left = left
n.Right = right
n.Nbody.Set(r.stmtList())
n.SetLeft(left)
n.SetRight(right)
n.PtrBody().Set(r.stmtList())
return n
case ir.ORANGE:
n := ir.NodAt(r.pos(), ir.ORANGE, nil, nil)
n.List.Set(r.stmtList())
n.Right = r.expr()
n.Nbody.Set(r.stmtList())
n.PtrList().Set(r.stmtList())
n.SetRight(r.expr())
n.PtrBody().Set(r.stmtList())
return n
case ir.OSELECT, ir.OSWITCH:
n := ir.NodAt(r.pos(), op, nil, nil)
n.Ninit.Set(r.stmtList())
n.PtrInit().Set(r.stmtList())
left, _ := r.exprsOrNil()
n.Left = left
n.List.Set(r.caseList(n))
n.SetLeft(left)
n.PtrList().Set(r.caseList(n))
return n
// case OCASE:
@ -1056,7 +1056,7 @@ func (r *importReader) node() *ir.Node {
pos := r.pos()
left, _ := r.exprsOrNil()
if left != nil {
left = NewName(left.Sym)
left = NewName(left.Sym())
}
return ir.NodAt(pos, op, left, nil)
@ -1065,7 +1065,7 @@ func (r *importReader) node() *ir.Node {
case ir.OGOTO, ir.OLABEL:
n := ir.NodAt(r.pos(), op, nil, nil)
n.Sym = lookup(r.string())
n.SetSym(lookup(r.string()))
return n
case ir.OEND:

View file

@ -46,16 +46,16 @@ func fninit(n []*ir.Node) {
// Make a function that contains all the initialization statements.
if len(nf) > 0 {
base.Pos = nf[0].Pos // prolog/epilog gets line number of first init stmt
base.Pos = nf[0].Pos() // prolog/epilog gets line number of first init stmt
initializers := lookup("init")
fn := dclfunc(initializers, ir.Nod(ir.OTFUNC, nil, nil))
for _, dcl := range initTodo.Func.Dcl {
dcl.Name.Curfn = fn
for _, dcl := range initTodo.Func().Dcl {
dcl.Name().Curfn = fn
}
fn.Func.Dcl = append(fn.Func.Dcl, initTodo.Func.Dcl...)
initTodo.Func.Dcl = nil
fn.Func().Dcl = append(fn.Func().Dcl, initTodo.Func().Dcl...)
initTodo.Func().Dcl = nil
fn.Nbody.Set(nf)
fn.PtrBody().Set(nf)
funcbody()
fn = typecheck(fn, ctxStmt)
@ -65,7 +65,7 @@ func fninit(n []*ir.Node) {
xtop = append(xtop, fn)
fns = append(fns, initializers.Linksym())
}
if initTodo.Func.Dcl != nil {
if initTodo.Func().Dcl != nil {
// We only generate temps using initTodo if there
// are package-scope initialization statements, so
// something's weird if we get here.
@ -76,9 +76,9 @@ func fninit(n []*ir.Node) {
// Record user init functions.
for i := 0; i < renameinitgen; i++ {
s := lookupN("init.", i)
fn := ir.AsNode(s.Def).Name.Defn
fn := ir.AsNode(s.Def).Name().Defn
// Skip init functions with empty bodies.
if fn.Nbody.Len() == 1 && fn.Nbody.First().Op == ir.OEMPTY {
if fn.Body().Len() == 1 && fn.Body().First().Op() == ir.OEMPTY {
continue
}
fns = append(fns, s.Linksym())
@ -91,7 +91,7 @@ func fninit(n []*ir.Node) {
// Make an .inittask structure.
sym := lookup(".inittask")
nn := NewName(sym)
nn.Type = types.Types[types.TUINT8] // fake type
nn.SetType(types.Types[types.TUINT8]) // fake type
nn.SetClass(ir.PEXTERN)
sym.Def = ir.AsTypesNode(nn)
exportsym(nn)

View file

@ -86,7 +86,7 @@ func initOrder(l []*ir.Node) []*ir.Node {
// Process all package-level assignment in declaration order.
for _, n := range l {
switch n.Op {
switch n.Op() {
case ir.OAS, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
o.processAssign(n)
o.flushReady(s.staticInit)
@ -100,7 +100,7 @@ func initOrder(l []*ir.Node) []*ir.Node {
// Check that all assignments are now Done; if not, there must
// have been a dependency cycle.
for _, n := range l {
switch n.Op {
switch n.Op() {
case ir.OAS, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
if n.Initorder() != InitDone {
// If there have already been errors
@ -126,27 +126,27 @@ func initOrder(l []*ir.Node) []*ir.Node {
}
func (o *InitOrder) processAssign(n *ir.Node) {
if n.Initorder() != InitNotStarted || n.Xoffset != types.BADWIDTH {
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
if n.Initorder() != InitNotStarted || n.Offset() != types.BADWIDTH {
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Offset())
}
n.SetInitorder(InitPending)
n.Xoffset = 0
n.SetOffset(0)
// Compute number of variable dependencies and build the
// inverse dependency ("blocking") graph.
for dep := range collectDeps(n, true) {
defn := dep.Name.Defn
defn := dep.Name().Defn
// Skip dependencies on functions (PFUNC) and
// variables already initialized (InitDone).
if dep.Class() != ir.PEXTERN || defn.Initorder() == InitDone {
continue
}
n.Xoffset = n.Xoffset + 1
n.SetOffset(n.Offset() + 1)
o.blocking[defn] = append(o.blocking[defn], n)
}
if n.Xoffset == 0 {
if n.Offset() == 0 {
heap.Push(&o.ready, n)
}
}
@ -157,20 +157,20 @@ func (o *InitOrder) processAssign(n *ir.Node) {
func (o *InitOrder) flushReady(initialize func(*ir.Node)) {
for o.ready.Len() != 0 {
n := heap.Pop(&o.ready).(*ir.Node)
if n.Initorder() != InitPending || n.Xoffset != 0 {
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
if n.Initorder() != InitPending || n.Offset() != 0 {
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Offset())
}
initialize(n)
n.SetInitorder(InitDone)
n.Xoffset = types.BADWIDTH
n.SetOffset(types.BADWIDTH)
blocked := o.blocking[n]
delete(o.blocking, n)
for _, m := range blocked {
m.Xoffset = m.Xoffset - 1
if m.Xoffset == 0 {
m.SetOffset(m.Offset() - 1)
if m.Offset() == 0 {
heap.Push(&o.ready, m)
}
}
@ -196,14 +196,14 @@ func findInitLoopAndExit(n *ir.Node, path *[]*ir.Node) {
// There might be multiple loops involving n; by sorting
// references, we deterministically pick the one reported.
refers := collectDeps(n.Name.Defn, false).Sorted(func(ni, nj *ir.Node) bool {
return ni.Pos.Before(nj.Pos)
refers := collectDeps(n.Name().Defn, false).Sorted(func(ni, nj *ir.Node) bool {
return ni.Pos().Before(nj.Pos())
})
*path = append(*path, n)
for _, ref := range refers {
// Short-circuit variables that were initialized.
if ref.Class() == ir.PEXTERN && ref.Name.Defn.Initorder() == InitDone {
if ref.Class() == ir.PEXTERN && ref.Name().Defn.Initorder() == InitDone {
continue
}
@ -220,7 +220,7 @@ func reportInitLoopAndExit(l []*ir.Node) {
// the start.
i := -1
for j, n := range l {
if n.Class() == ir.PEXTERN && (i == -1 || n.Pos.Before(l[i].Pos)) {
if n.Class() == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
i = j
}
}
@ -242,7 +242,7 @@ func reportInitLoopAndExit(l []*ir.Node) {
}
fmt.Fprintf(&msg, "\t%v: %v", ir.Line(l[0]), l[0])
base.ErrorfAt(l[0].Pos, msg.String())
base.ErrorfAt(l[0].Pos(), msg.String())
base.ErrorExit()
}
@ -252,15 +252,15 @@ func reportInitLoopAndExit(l []*ir.Node) {
// upon functions (but not variables).
func collectDeps(n *ir.Node, transitive bool) ir.NodeSet {
d := initDeps{transitive: transitive}
switch n.Op {
switch n.Op() {
case ir.OAS:
d.inspect(n.Right)
d.inspect(n.Right())
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
d.inspect(n.Right)
d.inspect(n.Right())
case ir.ODCLFUNC:
d.inspectList(n.Nbody)
d.inspectList(n.Body())
default:
base.Fatalf("unexpected Op: %v", n.Op)
base.Fatalf("unexpected Op: %v", n.Op())
}
return d.seen
}
@ -276,7 +276,7 @@ func (d *initDeps) inspectList(l ir.Nodes) { ir.InspectList(l, d.visit) }
// visit calls foundDep on any package-level functions or variables
// referenced by n, if any.
func (d *initDeps) visit(n *ir.Node) bool {
switch n.Op {
switch n.Op() {
case ir.OMETHEXPR:
d.foundDep(methodExprName(n))
return false
@ -288,7 +288,7 @@ func (d *initDeps) visit(n *ir.Node) bool {
}
case ir.OCLOSURE:
d.inspectList(n.Func.Decl.Nbody)
d.inspectList(n.Func().Decl.Body())
case ir.ODOTMETH, ir.OCALLPART:
d.foundDep(methodExprName(n))
@ -308,7 +308,7 @@ func (d *initDeps) foundDep(n *ir.Node) {
// Names without definitions aren't interesting as far as
// initialization ordering goes.
if n.Name.Defn == nil {
if n.Name().Defn == nil {
return
}
@ -317,7 +317,7 @@ func (d *initDeps) foundDep(n *ir.Node) {
}
d.seen.Add(n)
if d.transitive && n.Class() == ir.PFUNC {
d.inspectList(n.Name.Defn.Nbody)
d.inspectList(n.Name().Defn.Body())
}
}
@ -331,7 +331,9 @@ func (d *initDeps) foundDep(n *ir.Node) {
type declOrder []*ir.Node
func (s declOrder) Len() int { return len(s) }
func (s declOrder) Less(i, j int) bool { return firstLHS(s[i]).Pos.Before(firstLHS(s[j]).Pos) }
func (s declOrder) Less(i, j int) bool {
return firstLHS(s[i]).Pos().Before(firstLHS(s[j]).Pos())
}
func (s declOrder) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s *declOrder) Push(x interface{}) { *s = append(*s, x.(*ir.Node)) }
@ -344,13 +346,13 @@ func (s *declOrder) Pop() interface{} {
// firstLHS returns the first expression on the left-hand side of
// assignment n.
func firstLHS(n *ir.Node) *ir.Node {
switch n.Op {
switch n.Op() {
case ir.OAS:
return n.Left
return n.Left()
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2RECV, ir.OAS2MAPR:
return n.List.First()
return n.List().First()
}
base.Fatalf("unexpected Op: %v", n.Op)
base.Fatalf("unexpected Op: %v", n.Op())
return nil
}

File diff suppressed because it is too large Load diff

View file

@ -253,7 +253,7 @@ func Main(archInit func(*Arch)) {
timings.Start("fe", "typecheck", "top1")
for i := 0; i < len(xtop); i++ {
n := xtop[i]
if op := n.Op; op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.Left.Name.Param.Alias()) {
if op := n.Op(); op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.Left().Name().Param.Alias()) {
xtop[i] = typecheck(n, ctxStmt)
}
}
@ -265,7 +265,7 @@ func Main(archInit func(*Arch)) {
timings.Start("fe", "typecheck", "top2")
for i := 0; i < len(xtop); i++ {
n := xtop[i]
if op := n.Op; op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.Left.Name.Param.Alias() {
if op := n.Op(); op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.Left().Name().Param.Alias() {
xtop[i] = typecheck(n, ctxStmt)
}
}
@ -276,14 +276,14 @@ func Main(archInit func(*Arch)) {
var fcount int64
for i := 0; i < len(xtop); i++ {
n := xtop[i]
if n.Op == ir.ODCLFUNC {
if n.Op() == ir.ODCLFUNC {
Curfn = n
decldepth = 1
errorsBefore := base.Errors()
typecheckslice(Curfn.Nbody.Slice(), ctxStmt)
typecheckslice(Curfn.Body().Slice(), ctxStmt)
checkreturn(Curfn)
if base.Errors() > errorsBefore {
Curfn.Nbody.Set(nil) // type errors; do not compile
Curfn.PtrBody().Set(nil) // type errors; do not compile
}
// Now that we've checked whether n terminates,
// we can eliminate some obviously dead code.
@ -306,7 +306,7 @@ func Main(archInit func(*Arch)) {
// because variables captured by value do not escape.
timings.Start("fe", "capturevars")
for _, n := range xtop {
if n.Op == ir.ODCLFUNC && n.Func.OClosure != nil {
if n.Op() == ir.ODCLFUNC && n.Func().OClosure != nil {
Curfn = n
capturevars(n)
}
@ -321,7 +321,7 @@ func Main(archInit func(*Arch)) {
// Typecheck imported function bodies if Debug.l > 1,
// otherwise lazily when used or re-exported.
for _, n := range importlist {
if n.Func.Inl != nil {
if n.Func().Inl != nil {
typecheckinl(n)
}
}
@ -340,7 +340,7 @@ func Main(archInit func(*Arch)) {
caninl(n)
} else {
if base.Flag.LowerM > 1 {
fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(n), n.Func.Nname)
fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(n), n.Func().Nname)
}
}
inlcalls(n)
@ -349,7 +349,7 @@ func Main(archInit func(*Arch)) {
}
for _, n := range xtop {
if n.Op == ir.ODCLFUNC {
if n.Op() == ir.ODCLFUNC {
devirtualize(n)
}
}
@ -379,7 +379,7 @@ func Main(archInit func(*Arch)) {
// before walk reaches a call of a closure.
timings.Start("fe", "xclosures")
for _, n := range xtop {
if n.Op == ir.ODCLFUNC && n.Func.OClosure != nil {
if n.Op() == ir.ODCLFUNC && n.Func().OClosure != nil {
Curfn = n
transformclosure(n)
}
@ -402,7 +402,7 @@ func Main(archInit func(*Arch)) {
fcount = 0
for i := 0; i < len(xtop); i++ {
n := xtop[i]
if n.Op == ir.ODCLFUNC {
if n.Op() == ir.ODCLFUNC {
funccompile(n)
fcount++
}
@ -430,7 +430,7 @@ func Main(archInit func(*Arch)) {
// Phase 9: Check external declarations.
timings.Start("be", "externaldcls")
for i, n := range externdcl {
if n.Op == ir.ONAME {
if n.Op() == ir.ONAME {
externdcl[i] = typecheck(externdcl[i], ctxExpr)
}
}
@ -484,7 +484,7 @@ func Main(archInit func(*Arch)) {
func numNonClosures(list []*ir.Node) int {
count := 0
for _, n := range list {
if n.Func.OClosure == nil {
if n.Func().OClosure == nil {
count++
}
}
@ -949,14 +949,14 @@ func clearImports() {
if n == nil {
continue
}
if n.Op == ir.OPACK {
if n.Op() == ir.OPACK {
// throw away top-level package name left over
// from previous file.
// leave s->block set to cause redeclaration
// errors if a conflicting top-level name is
// introduced by a different file.
if !n.Name.Used() && base.SyntaxErrors() == 0 {
unused = append(unused, importedPkg{n.Pos, n.Name.Pkg.Path, s.Name})
if !n.Name().Used() && base.SyntaxErrors() == 0 {
unused = append(unused, importedPkg{n.Pos(), n.Name().Pkg.Path, s.Name})
}
s.Def = nil
continue
@ -964,9 +964,9 @@ func clearImports() {
if IsAlias(s) {
// throw away top-level name left over
// from previous import . "x"
if n.Name != nil && n.Name.Pack != nil && !n.Name.Pack.Name.Used() && base.SyntaxErrors() == 0 {
unused = append(unused, importedPkg{n.Name.Pack.Pos, n.Name.Pack.Name.Pkg.Path, ""})
n.Name.Pack.Name.SetUsed(true)
if n.Name() != nil && n.Name().Pack != nil && !n.Name().Pack.Name().Used() && base.SyntaxErrors() == 0 {
unused = append(unused, importedPkg{n.Name().Pack.Pos(), n.Name().Pack.Name().Pkg.Path, ""})
n.Name().Pack.Name().SetUsed(true)
}
s.Def = nil
continue
@ -980,7 +980,7 @@ func clearImports() {
}
func IsAlias(sym *types.Sym) bool {
return sym.Def != nil && ir.AsNode(sym.Def).Sym != sym
return sym.Def != nil && ir.AsNode(sym.Def).Sym() != sym
}
// recordFlags records the specified command-line flags to be placed

View file

@ -162,10 +162,10 @@ func (p *noder) funcBody(fn *ir.Node, block *syntax.BlockStmt) {
if body == nil {
body = []*ir.Node{ir.Nod(ir.OEMPTY, nil, nil)}
}
fn.Nbody.Set(body)
fn.PtrBody().Set(body)
base.Pos = p.makeXPos(block.Rbrace)
fn.Func.Endlineno = base.Pos
fn.Func().Endlineno = base.Pos
}
funcbody()
@ -176,9 +176,9 @@ func (p *noder) openScope(pos syntax.Pos) {
types.Markdcl()
if trackScopes {
Curfn.Func.Parents = append(Curfn.Func.Parents, p.scope)
p.scopeVars = append(p.scopeVars, len(Curfn.Func.Dcl))
p.scope = ir.ScopeID(len(Curfn.Func.Parents))
Curfn.Func().Parents = append(Curfn.Func().Parents, p.scope)
p.scopeVars = append(p.scopeVars, len(Curfn.Func().Dcl))
p.scope = ir.ScopeID(len(Curfn.Func().Parents))
p.markScope(pos)
}
@ -191,29 +191,29 @@ func (p *noder) closeScope(pos syntax.Pos) {
if trackScopes {
scopeVars := p.scopeVars[len(p.scopeVars)-1]
p.scopeVars = p.scopeVars[:len(p.scopeVars)-1]
if scopeVars == len(Curfn.Func.Dcl) {
if scopeVars == len(Curfn.Func().Dcl) {
// no variables were declared in this scope, so we can retract it.
if int(p.scope) != len(Curfn.Func.Parents) {
if int(p.scope) != len(Curfn.Func().Parents) {
base.Fatalf("scope tracking inconsistency, no variables declared but scopes were not retracted")
}
p.scope = Curfn.Func.Parents[p.scope-1]
Curfn.Func.Parents = Curfn.Func.Parents[:len(Curfn.Func.Parents)-1]
p.scope = Curfn.Func().Parents[p.scope-1]
Curfn.Func().Parents = Curfn.Func().Parents[:len(Curfn.Func().Parents)-1]
nmarks := len(Curfn.Func.Marks)
Curfn.Func.Marks[nmarks-1].Scope = p.scope
nmarks := len(Curfn.Func().Marks)
Curfn.Func().Marks[nmarks-1].Scope = p.scope
prevScope := ir.ScopeID(0)
if nmarks >= 2 {
prevScope = Curfn.Func.Marks[nmarks-2].Scope
prevScope = Curfn.Func().Marks[nmarks-2].Scope
}
if Curfn.Func.Marks[nmarks-1].Scope == prevScope {
Curfn.Func.Marks = Curfn.Func.Marks[:nmarks-1]
if Curfn.Func().Marks[nmarks-1].Scope == prevScope {
Curfn.Func().Marks = Curfn.Func().Marks[:nmarks-1]
}
return
}
p.scope = Curfn.Func.Parents[p.scope-1]
p.scope = Curfn.Func().Parents[p.scope-1]
p.markScope(pos)
}
@ -221,10 +221,10 @@ func (p *noder) closeScope(pos syntax.Pos) {
func (p *noder) markScope(pos syntax.Pos) {
xpos := p.makeXPos(pos)
if i := len(Curfn.Func.Marks); i > 0 && Curfn.Func.Marks[i-1].Pos == xpos {
Curfn.Func.Marks[i-1].Scope = p.scope
if i := len(Curfn.Func().Marks); i > 0 && Curfn.Func().Marks[i-1].Pos == xpos {
Curfn.Func().Marks[i-1].Scope = p.scope
} else {
Curfn.Func.Marks = append(Curfn.Func.Marks, ir.Mark{Pos: xpos, Scope: p.scope})
Curfn.Func().Marks = append(Curfn.Func().Marks, ir.Mark{Pos: xpos, Scope: p.scope})
}
}
@ -357,24 +357,24 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
}
pack := p.nod(imp, ir.OPACK, nil, nil)
pack.Sym = my
pack.Name.Pkg = ipkg
pack.SetSym(my)
pack.Name().Pkg = ipkg
switch my.Name {
case ".":
importdot(ipkg, pack)
return
case "init":
base.ErrorfAt(pack.Pos, "cannot import package as init - init must be a func")
base.ErrorfAt(pack.Pos(), "cannot import package as init - init must be a func")
return
case "_":
return
}
if my.Def != nil {
redeclare(pack.Pos, my, "as imported package name")
redeclare(pack.Pos(), my, "as imported package name")
}
my.Def = ir.AsTypesNode(pack)
my.Lastlineno = pack.Pos
my.Lastlineno = pack.Pos()
my.Block = 1 // at top level
}
@ -452,14 +452,14 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*ir.Node {
}
v := values[i]
if decl.Values == nil {
v = treecopy(v, n.Pos)
v = treecopy(v, n.Pos())
}
n.Op = ir.OLITERAL
n.SetOp(ir.OLITERAL)
declare(n, dclcontext)
n.Name.Param.Ntype = typ
n.Name.Defn = v
n.Name().Param.Ntype = typ
n.Name().Defn = v
n.SetIota(cs.iota)
nn = append(nn, p.nod(decl, ir.ODCLCONST, n, nil))
@ -476,13 +476,13 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []*ir.Node {
func (p *noder) typeDecl(decl *syntax.TypeDecl) *ir.Node {
n := p.declName(decl.Name)
n.Op = ir.OTYPE
n.SetOp(ir.OTYPE)
declare(n, dclcontext)
// decl.Type may be nil but in that case we got a syntax error during parsing
typ := p.typeExprOrNil(decl.Type)
param := n.Name.Param
param := n.Name().Param
param.Ntype = typ
param.SetAlias(decl.Alias)
if pragma, ok := decl.Pragma.(*Pragma); ok {
@ -495,7 +495,7 @@ func (p *noder) typeDecl(decl *syntax.TypeDecl) *ir.Node {
nod := p.nod(decl, ir.ODCLTYPE, n, nil)
if param.Alias() && !langSupported(1, 9, ir.LocalPkg) {
base.ErrorfAt(nod.Pos, "type aliases only supported as of -lang=go1.9")
base.ErrorfAt(nod.Pos(), "type aliases only supported as of -lang=go1.9")
}
return nod
}
@ -510,7 +510,7 @@ func (p *noder) declNames(names []*syntax.Name) []*ir.Node {
func (p *noder) declName(name *syntax.Name) *ir.Node {
n := dclname(p.name(name))
n.Pos = p.pos(name)
n.SetPos(p.pos(name))
return n
}
@ -522,43 +522,43 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) *ir.Node {
if fun.Recv == nil {
if name.Name == "init" {
name = renameinit()
if t.List.Len() > 0 || t.Rlist.Len() > 0 {
base.ErrorfAt(f.Pos, "func init must have no arguments and no return values")
if t.List().Len() > 0 || t.Rlist().Len() > 0 {
base.ErrorfAt(f.Pos(), "func init must have no arguments and no return values")
}
}
if ir.LocalPkg.Name == "main" && name.Name == "main" {
if t.List.Len() > 0 || t.Rlist.Len() > 0 {
base.ErrorfAt(f.Pos, "func main must have no arguments and no return values")
if t.List().Len() > 0 || t.Rlist().Len() > 0 {
base.ErrorfAt(f.Pos(), "func main must have no arguments and no return values")
}
}
} else {
f.Func.Shortname = name
name = ir.BlankNode.Sym // filled in by typecheckfunc
f.Func().Shortname = name
name = ir.BlankNode.Sym() // filled in by typecheckfunc
}
f.Func.Nname = newfuncnamel(p.pos(fun.Name), name, f.Func)
f.Func.Nname.Name.Defn = f
f.Func.Nname.Name.Param.Ntype = t
f.Func().Nname = newfuncnamel(p.pos(fun.Name), name, f.Func())
f.Func().Nname.Name().Defn = f
f.Func().Nname.Name().Param.Ntype = t
if pragma, ok := fun.Pragma.(*Pragma); ok {
f.Func.Pragma = pragma.Flag & FuncPragmas
f.Func().Pragma = pragma.Flag & FuncPragmas
if pragma.Flag&ir.Systemstack != 0 && pragma.Flag&ir.Nosplit != 0 {
base.ErrorfAt(f.Pos, "go:nosplit and go:systemstack cannot be combined")
base.ErrorfAt(f.Pos(), "go:nosplit and go:systemstack cannot be combined")
}
pragma.Flag &^= FuncPragmas
p.checkUnused(pragma)
}
if fun.Recv == nil {
declare(f.Func.Nname, ir.PFUNC)
declare(f.Func().Nname, ir.PFUNC)
}
p.funcBody(f, fun.Body)
if fun.Body != nil {
if f.Func.Pragma&ir.Noescape != 0 {
base.ErrorfAt(f.Pos, "can only use //go:noescape with external func implementations")
if f.Func().Pragma&ir.Noescape != 0 {
base.ErrorfAt(f.Pos(), "can only use //go:noescape with external func implementations")
}
} else {
if base.Flag.Complete || strings.HasPrefix(ir.FuncName(f), "init.") {
@ -572,7 +572,7 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) *ir.Node {
}
}
if !isLinknamed {
base.ErrorfAt(f.Pos, "missing function body")
base.ErrorfAt(f.Pos(), "missing function body")
}
}
}
@ -583,10 +583,10 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) *ir.Node {
func (p *noder) signature(recv *syntax.Field, typ *syntax.FuncType) *ir.Node {
n := p.nod(typ, ir.OTFUNC, nil, nil)
if recv != nil {
n.Left = p.param(recv, false, false)
n.SetLeft(p.param(recv, false, false))
}
n.List.Set(p.params(typ.ParamList, true))
n.Rlist.Set(p.params(typ.ResultList, false))
n.PtrList().Set(p.params(typ.ParamList, true))
n.PtrRlist().Set(p.params(typ.ResultList, false))
return n
}
@ -609,7 +609,7 @@ func (p *noder) param(param *syntax.Field, dddOk, final bool) *ir.Node {
n := p.nodSym(param, ir.ODCLFIELD, typ, name)
// rewrite ...T parameter
if typ.Op == ir.ODDD {
if typ.Op() == ir.ODDD {
if !dddOk {
// We mark these as syntax errors to get automatic elimination
// of multiple such errors per line (see ErrorfAt in subr.go).
@ -621,12 +621,12 @@ func (p *noder) param(param *syntax.Field, dddOk, final bool) *ir.Node {
p.errorAt(param.Name.Pos(), "syntax error: cannot use ... with non-final parameter %s", param.Name.Value)
}
}
typ.Op = ir.OTARRAY
typ.Right = typ.Left
typ.Left = nil
typ.SetOp(ir.OTARRAY)
typ.SetRight(typ.Left())
typ.SetLeft(nil)
n.SetIsDDD(true)
if n.Left != nil {
n.Left.SetIsDDD(true)
if n.Left() != nil {
n.Left().SetIsDDD(true)
}
}
@ -658,20 +658,20 @@ func (p *noder) expr(expr syntax.Expr) *ir.Node {
case *syntax.BasicLit:
n := ir.NewLiteral(p.basicLit(expr))
if expr.Kind == syntax.RuneLit {
n.Type = types.UntypedRune
n.SetType(types.UntypedRune)
}
n.SetDiag(expr.Bad) // avoid follow-on errors if there was a syntax error
return n
case *syntax.CompositeLit:
n := p.nod(expr, ir.OCOMPLIT, nil, nil)
if expr.Type != nil {
n.Right = p.expr(expr.Type)
n.SetRight(p.expr(expr.Type))
}
l := p.exprs(expr.ElemList)
for i, e := range l {
l[i] = p.wrapname(expr.ElemList[i], e)
}
n.List.Set(l)
n.PtrList().Set(l)
base.Pos = p.makeXPos(expr.Rbrace)
return n
case *syntax.KeyValueExpr:
@ -684,12 +684,12 @@ func (p *noder) expr(expr syntax.Expr) *ir.Node {
case *syntax.SelectorExpr:
// parser.new_dotname
obj := p.expr(expr.X)
if obj.Op == ir.OPACK {
obj.Name.SetUsed(true)
return importName(obj.Name.Pkg.Lookup(expr.Sel.Value))
if obj.Op() == ir.OPACK {
obj.Name().SetUsed(true)
return importName(obj.Name().Pkg.Lookup(expr.Sel.Value))
}
n := nodSym(ir.OXDOT, obj, p.name(expr.Sel))
n.Pos = p.pos(expr) // lineno may have been changed by p.expr(expr.X)
n.SetPos(p.pos(expr)) // lineno may have been changed by p.expr(expr.X)
return n
case *syntax.IndexExpr:
return p.nod(expr, ir.OINDEX, p.expr(expr.X), p.expr(expr.Index))
@ -720,7 +720,7 @@ func (p *noder) expr(expr syntax.Expr) *ir.Node {
return p.nod(expr, p.binOp(expr.Op), x, p.expr(expr.Y))
case *syntax.CallExpr:
n := p.nod(expr, ir.OCALL, p.expr(expr.Fun), nil)
n.List.Set(p.exprs(expr.ArgList))
n.PtrList().Set(p.exprs(expr.ArgList))
n.SetIsDDD(expr.HasDots)
return n
@ -752,9 +752,9 @@ func (p *noder) expr(expr syntax.Expr) *ir.Node {
case *syntax.TypeSwitchGuard:
n := p.nod(expr, ir.OTYPESW, nil, p.expr(expr.X))
if expr.Lhs != nil {
n.Left = p.declName(expr.Lhs)
if ir.IsBlank(n.Left) {
base.Errorf("invalid variable name %v in type switch", n.Left)
n.SetLeft(p.declName(expr.Lhs))
if ir.IsBlank(n.Left()) {
base.Errorf("invalid variable name %v in type switch", n.Left())
}
}
return n
@ -804,7 +804,7 @@ func (p *noder) sum(x syntax.Expr) *ir.Node {
chunks := make([]string, 0, 1)
n := p.expr(x)
if ir.IsConst(n, constant.String) && n.Sym == nil {
if ir.IsConst(n, constant.String) && n.Sym() == nil {
nstr = n
chunks = append(chunks, nstr.StringVal())
}
@ -813,7 +813,7 @@ func (p *noder) sum(x syntax.Expr) *ir.Node {
add := adds[i]
r := p.expr(add.Y)
if ir.IsConst(r, constant.String) && r.Sym == nil {
if ir.IsConst(r, constant.String) && r.Sym() == nil {
if nstr != nil {
// Collapse r into nstr instead of adding to n.
chunks = append(chunks, r.StringVal())
@ -880,7 +880,7 @@ func (p *noder) structType(expr *syntax.StructType) *ir.Node {
p.setlineno(expr)
n := p.nod(expr, ir.OTSTRUCT, nil, nil)
n.List.Set(l)
n.PtrList().Set(l)
return n
}
@ -894,7 +894,7 @@ func (p *noder) interfaceType(expr *syntax.InterfaceType) *ir.Node {
} else {
mname := p.name(method.Name)
sig := p.typeExpr(method.Type)
sig.Left = fakeRecv()
sig.SetLeft(fakeRecv())
n = p.nodSym(method, ir.ODCLFIELD, sig, mname)
ifacedcl(n)
}
@ -902,7 +902,7 @@ func (p *noder) interfaceType(expr *syntax.InterfaceType) *ir.Node {
}
n := p.nod(expr, ir.OTINTER, nil, nil)
n.List.Set(l)
n.PtrList().Set(l)
return n
}
@ -910,8 +910,8 @@ func (p *noder) packname(expr syntax.Expr) *types.Sym {
switch expr := expr.(type) {
case *syntax.Name:
name := p.name(expr)
if n := oldname(name); n.Name != nil && n.Name.Pack != nil {
n.Name.Pack.Name.SetUsed(true)
if n := oldname(name); n.Name() != nil && n.Name().Pack != nil {
n.Name().Pack.Name().SetUsed(true)
}
return name
case *syntax.SelectorExpr:
@ -922,12 +922,12 @@ func (p *noder) packname(expr syntax.Expr) *types.Sym {
return name
}
var pkg *types.Pkg
if def.Op != ir.OPACK {
if def.Op() != ir.OPACK {
base.Errorf("%v is not a package", name)
pkg = ir.LocalPkg
} else {
def.Name.SetUsed(true)
pkg = def.Name.Pkg
def.Name().SetUsed(true)
pkg = def.Name().Pkg
}
return pkg.Lookup(expr.Sel.Value)
}
@ -948,7 +948,7 @@ func (p *noder) embedded(typ syntax.Expr) *ir.Node {
n.SetEmbedded(true)
if isStar {
n.Left = p.nod(op, ir.ODEREF, n.Left, nil)
n.SetLeft(p.nod(op, ir.ODEREF, n.Left(), nil))
}
return n
}
@ -962,8 +962,8 @@ func (p *noder) stmtsFall(stmts []syntax.Stmt, fallOK bool) []*ir.Node {
for i, stmt := range stmts {
s := p.stmtFall(stmt, fallOK && i+1 == len(stmts))
if s == nil {
} else if s.Op == ir.OBLOCK && s.Ninit.Len() == 0 {
nodes = append(nodes, s.List.Slice()...)
} else if s.Op() == ir.OBLOCK && s.Init().Len() == 0 {
nodes = append(nodes, s.List().Slice()...)
} else {
nodes = append(nodes, s)
}
@ -1010,12 +1010,12 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) *ir.Node {
if len(lhs) == 1 && len(rhs) == 1 {
// common case
n.Left = lhs[0]
n.Right = rhs[0]
n.SetLeft(lhs[0])
n.SetRight(rhs[0])
} else {
n.Op = ir.OAS2
n.List.Set(lhs)
n.Rlist.Set(rhs)
n.SetOp(ir.OAS2)
n.PtrList().Set(lhs)
n.PtrRlist().Set(rhs)
}
return n
@ -1038,7 +1038,7 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) *ir.Node {
}
n := p.nod(stmt, op, nil, nil)
if stmt.Label != nil {
n.Sym = p.name(stmt.Label)
n.SetSym(p.name(stmt.Label))
}
return n
case *syntax.CallStmt:
@ -1058,17 +1058,17 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) *ir.Node {
results = p.exprList(stmt.Results)
}
n := p.nod(stmt, ir.ORETURN, nil, nil)
n.List.Set(results)
if n.List.Len() == 0 && Curfn != nil {
for _, ln := range Curfn.Func.Dcl {
n.PtrList().Set(results)
if n.List().Len() == 0 && Curfn != nil {
for _, ln := range Curfn.Func().Dcl {
if ln.Class() == ir.PPARAM {
continue
}
if ln.Class() != ir.PPARAMOUT {
break
}
if ir.AsNode(ln.Sym.Def) != ln {
base.Errorf("%s is shadowed during return", ln.Sym.Name)
if ir.AsNode(ln.Sym().Def) != ln {
base.Errorf("%s is shadowed during return", ln.Sym().Name)
}
}
}
@ -1134,13 +1134,13 @@ func (p *noder) assignList(expr syntax.Expr, defn *ir.Node, colas bool) []*ir.No
newOrErr = true
n := NewName(sym)
declare(n, dclcontext)
n.Name.Defn = defn
defn.Ninit.Append(ir.Nod(ir.ODCL, n, nil))
n.Name().Defn = defn
defn.PtrInit().Append(ir.Nod(ir.ODCL, n, nil))
res[i] = n
}
if !newOrErr {
base.ErrorfAt(defn.Pos, "no new variables on left side of :=")
base.ErrorfAt(defn.Pos(), "no new variables on left side of :=")
}
return res
}
@ -1156,18 +1156,18 @@ func (p *noder) ifStmt(stmt *syntax.IfStmt) *ir.Node {
p.openScope(stmt.Pos())
n := p.nod(stmt, ir.OIF, nil, nil)
if stmt.Init != nil {
n.Ninit.Set1(p.stmt(stmt.Init))
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Cond != nil {
n.Left = p.expr(stmt.Cond)
n.SetLeft(p.expr(stmt.Cond))
}
n.Nbody.Set(p.blockStmt(stmt.Then))
n.PtrBody().Set(p.blockStmt(stmt.Then))
if stmt.Else != nil {
e := p.stmt(stmt.Else)
if e.Op == ir.OBLOCK && e.Ninit.Len() == 0 {
n.Rlist.Set(e.List.Slice())
if e.Op() == ir.OBLOCK && e.Init().Len() == 0 {
n.PtrRlist().Set(e.List().Slice())
} else {
n.Rlist.Set1(e)
n.PtrRlist().Set1(e)
}
}
p.closeAnotherScope()
@ -1184,21 +1184,21 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) *ir.Node {
n = p.nod(r, ir.ORANGE, nil, p.expr(r.X))
if r.Lhs != nil {
n.List.Set(p.assignList(r.Lhs, n, r.Def))
n.PtrList().Set(p.assignList(r.Lhs, n, r.Def))
}
} else {
n = p.nod(stmt, ir.OFOR, nil, nil)
if stmt.Init != nil {
n.Ninit.Set1(p.stmt(stmt.Init))
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Cond != nil {
n.Left = p.expr(stmt.Cond)
n.SetLeft(p.expr(stmt.Cond))
}
if stmt.Post != nil {
n.Right = p.stmt(stmt.Post)
n.SetRight(p.stmt(stmt.Post))
}
}
n.Nbody.Set(p.blockStmt(stmt.Body))
n.PtrBody().Set(p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
@ -1207,17 +1207,17 @@ func (p *noder) switchStmt(stmt *syntax.SwitchStmt) *ir.Node {
p.openScope(stmt.Pos())
n := p.nod(stmt, ir.OSWITCH, nil, nil)
if stmt.Init != nil {
n.Ninit.Set1(p.stmt(stmt.Init))
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Tag != nil {
n.Left = p.expr(stmt.Tag)
n.SetLeft(p.expr(stmt.Tag))
}
tswitch := n.Left
if tswitch != nil && tswitch.Op != ir.OTYPESW {
tswitch := n.Left()
if tswitch != nil && tswitch.Op() != ir.OTYPESW {
tswitch = nil
}
n.List.Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
n.PtrList().Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
p.closeScope(stmt.Rbrace)
return n
@ -1234,14 +1234,14 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.Node, rbra
n := p.nod(clause, ir.OCASE, nil, nil)
if clause.Cases != nil {
n.List.Set(p.exprList(clause.Cases))
n.PtrList().Set(p.exprList(clause.Cases))
}
if tswitch != nil && tswitch.Left != nil {
nn := NewName(tswitch.Left.Sym)
if tswitch != nil && tswitch.Left() != nil {
nn := NewName(tswitch.Left().Sym())
declare(nn, dclcontext)
n.Rlist.Set1(nn)
n.PtrRlist().Set1(nn)
// keep track of the instances for reporting unused
nn.Name.Defn = tswitch
nn.Name().Defn = tswitch
}
// Trim trailing empty statements. We omit them from
@ -1255,8 +1255,8 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.Node, rbra
body = body[:len(body)-1]
}
n.Nbody.Set(p.stmtsFall(body, true))
if l := n.Nbody.Len(); l > 0 && n.Nbody.Index(l-1).Op == ir.OFALL {
n.PtrBody().Set(p.stmtsFall(body, true))
if l := n.Body().Len(); l > 0 && n.Body().Index(l-1).Op() == ir.OFALL {
if tswitch != nil {
base.Errorf("cannot fallthrough in type switch")
}
@ -1275,7 +1275,7 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.Node, rbra
func (p *noder) selectStmt(stmt *syntax.SelectStmt) *ir.Node {
n := p.nod(stmt, ir.OSELECT, nil, nil)
n.List.Set(p.commClauses(stmt.Body, stmt.Rbrace))
n.PtrList().Set(p.commClauses(stmt.Body, stmt.Rbrace))
return n
}
@ -1290,9 +1290,9 @@ func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []*
n := p.nod(clause, ir.OCASE, nil, nil)
if clause.Comm != nil {
n.List.Set1(p.stmt(clause.Comm))
n.PtrList().Set1(p.stmt(clause.Comm))
}
n.Nbody.Set(p.stmts(clause.Body))
n.PtrBody().Set(p.stmts(clause.Body))
nodes = append(nodes, n)
}
if len(clauses) > 0 {
@ -1309,11 +1309,11 @@ func (p *noder) labeledStmt(label *syntax.LabeledStmt, fallOK bool) *ir.Node {
ls = p.stmtFall(label.Stmt, fallOK)
}
lhs.Name.Defn = ls
lhs.Name().Defn = ls
l := []*ir.Node{lhs}
if ls != nil {
if ls.Op == ir.OBLOCK && ls.Ninit.Len() == 0 {
l = append(l, ls.List.Slice()...)
if ls.Op() == ir.OBLOCK && ls.Init().Len() == 0 {
l = append(l, ls.List().Slice()...)
} else {
l = append(l, ls)
}
@ -1451,9 +1451,9 @@ func (p *noder) mkname(name *syntax.Name) *ir.Node {
func (p *noder) wrapname(n syntax.Node, x *ir.Node) *ir.Node {
// These nodes do not carry line numbers.
// Introduce a wrapper node to give them the correct line.
switch x.Op {
switch x.Op() {
case ir.OTYPE, ir.OLITERAL:
if x.Sym == nil {
if x.Sym() == nil {
break
}
fallthrough
@ -1470,7 +1470,7 @@ func (p *noder) nod(orig syntax.Node, op ir.Op, left, right *ir.Node) *ir.Node {
func (p *noder) nodSym(orig syntax.Node, op ir.Op, left *ir.Node, sym *types.Sym) *ir.Node {
n := nodSym(op, left, sym)
n.Pos = p.pos(orig)
n.SetPos(p.pos(orig))
return n
}
@ -1670,8 +1670,8 @@ func safeArg(name string) bool {
func mkname(sym *types.Sym) *ir.Node {
n := oldname(sym)
if n.Name != nil && n.Name.Pack != nil {
n.Name.Pack.Name.SetUsed(true)
if n.Name() != nil && n.Name().Pack != nil {
n.Name().Pack.Name().SetUsed(true)
}
return n
}

View file

@ -142,7 +142,7 @@ func dumpdata() {
for {
for i := xtops; i < len(xtop); i++ {
n := xtop[i]
if n.Op == ir.ODCLFUNC {
if n.Op() == ir.ODCLFUNC {
funccompile(n)
}
}
@ -204,12 +204,12 @@ func addptabs() {
return
}
for _, exportn := range exportlist {
s := exportn.Sym
s := exportn.Sym()
n := ir.AsNode(s.Def)
if n == nil {
continue
}
if n.Op != ir.ONAME {
if n.Op() != ir.ONAME {
continue
}
if !types.IsExported(s.Name) {
@ -218,37 +218,37 @@ func addptabs() {
if s.Pkg.Name != "main" {
continue
}
if n.Type.Etype == types.TFUNC && n.Class() == ir.PFUNC {
if n.Type().Etype == types.TFUNC && n.Class() == ir.PFUNC {
// function
ptabs = append(ptabs, ptabEntry{s: s, t: ir.AsNode(s.Def).Type})
ptabs = append(ptabs, ptabEntry{s: s, t: ir.AsNode(s.Def).Type()})
} else {
// variable
ptabs = append(ptabs, ptabEntry{s: s, t: types.NewPtr(ir.AsNode(s.Def).Type)})
ptabs = append(ptabs, ptabEntry{s: s, t: types.NewPtr(ir.AsNode(s.Def).Type())})
}
}
}
func dumpGlobal(n *ir.Node) {
if n.Type == nil {
if n.Type() == nil {
base.Fatalf("external %v nil type\n", n)
}
if n.Class() == ir.PFUNC {
return
}
if n.Sym.Pkg != ir.LocalPkg {
if n.Sym().Pkg != ir.LocalPkg {
return
}
dowidth(n.Type)
dowidth(n.Type())
ggloblnod(n)
}
func dumpGlobalConst(n *ir.Node) {
// only export typed constants
t := n.Type
t := n.Type()
if t == nil {
return
}
if n.Sym.Pkg != ir.LocalPkg {
if n.Sym().Pkg != ir.LocalPkg {
return
}
// only export integer constants for now
@ -263,13 +263,13 @@ func dumpGlobalConst(n *ir.Node) {
return
}
}
base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym.Name, typesymname(t), ir.Int64Val(t, v))
base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym().Name, typesymname(t), ir.Int64Val(t, v))
}
func dumpglobls() {
// add globals
for _, n := range externdcl {
switch n.Op {
switch n.Op() {
case ir.ONAME:
dumpGlobal(n)
case ir.OLITERAL:
@ -414,7 +414,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
if readonly {
sym = stringsym(pos, string(data))
} else {
sym = slicedata(pos, string(data)).Sym.Linksym()
sym = slicedata(pos, string(data)).Sym().Linksym()
}
if len(hash) > 0 {
sum := sha256.Sum256(data)
@ -462,7 +462,7 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
} else {
// Emit a zero-length data symbol
// and then fix up length and content to use file.
symdata = slicedata(pos, "").Sym.Linksym()
symdata = slicedata(pos, "").Sym().Linksym()
symdata.Size = size
symdata.Type = objabi.SNOPTRDATA
info := symdata.NewFileInfo()
@ -490,10 +490,10 @@ func slicedata(pos src.XPos, s string) *ir.Node {
}
func slicebytes(nam *ir.Node, s string) {
if nam.Op != ir.ONAME {
if nam.Op() != ir.ONAME {
base.Fatalf("slicebytes %v", nam)
}
slicesym(nam, slicedata(nam.Pos, s), int64(len(s)))
slicesym(nam, slicedata(nam.Pos(), s), int64(len(s)))
}
func dstringdata(s *obj.LSym, off int, t string, pos src.XPos, what string) int {
@ -531,12 +531,12 @@ func dsymptrWeakOff(s *obj.LSym, off int, x *obj.LSym) int {
// slicesym writes a static slice symbol {&arr, lencap, lencap} to n.
// arr must be an ONAME. slicesym does not modify n.
func slicesym(n, arr *ir.Node, lencap int64) {
s := n.Sym.Linksym()
off := n.Xoffset
if arr.Op != ir.ONAME {
s := n.Sym().Linksym()
off := n.Offset()
if arr.Op() != ir.ONAME {
base.Fatalf("slicesym non-name arr %v", arr)
}
s.WriteAddr(base.Ctxt, off, Widthptr, arr.Sym.Linksym(), arr.Xoffset)
s.WriteAddr(base.Ctxt, off, Widthptr, arr.Sym().Linksym(), arr.Offset())
s.WriteInt(base.Ctxt, off+sliceLenOffset, Widthptr, lencap)
s.WriteInt(base.Ctxt, off+sliceCapOffset, Widthptr, lencap)
}
@ -544,88 +544,88 @@ func slicesym(n, arr *ir.Node, lencap int64) {
// addrsym writes the static address of a to n. a must be an ONAME.
// Neither n nor a is modified.
func addrsym(n, a *ir.Node) {
if n.Op != ir.ONAME {
base.Fatalf("addrsym n op %v", n.Op)
if n.Op() != ir.ONAME {
base.Fatalf("addrsym n op %v", n.Op())
}
if n.Sym == nil {
if n.Sym() == nil {
base.Fatalf("addrsym nil n sym")
}
if a.Op != ir.ONAME {
base.Fatalf("addrsym a op %v", a.Op)
if a.Op() != ir.ONAME {
base.Fatalf("addrsym a op %v", a.Op())
}
s := n.Sym.Linksym()
s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, a.Sym.Linksym(), a.Xoffset)
s := n.Sym().Linksym()
s.WriteAddr(base.Ctxt, n.Offset(), Widthptr, a.Sym().Linksym(), a.Offset())
}
// pfuncsym writes the static address of f to n. f must be a global function.
// Neither n nor f is modified.
func pfuncsym(n, f *ir.Node) {
if n.Op != ir.ONAME {
base.Fatalf("pfuncsym n op %v", n.Op)
if n.Op() != ir.ONAME {
base.Fatalf("pfuncsym n op %v", n.Op())
}
if n.Sym == nil {
if n.Sym() == nil {
base.Fatalf("pfuncsym nil n sym")
}
if f.Class() != ir.PFUNC {
base.Fatalf("pfuncsym class not PFUNC %d", f.Class())
}
s := n.Sym.Linksym()
s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, funcsym(f.Sym).Linksym(), f.Xoffset)
s := n.Sym().Linksym()
s.WriteAddr(base.Ctxt, n.Offset(), Widthptr, funcsym(f.Sym()).Linksym(), f.Offset())
}
// litsym writes the static literal c to n.
// Neither n nor c is modified.
func litsym(n, c *ir.Node, wid int) {
if n.Op != ir.ONAME {
base.Fatalf("litsym n op %v", n.Op)
if n.Op() != ir.ONAME {
base.Fatalf("litsym n op %v", n.Op())
}
if n.Sym == nil {
if n.Sym() == nil {
base.Fatalf("litsym nil n sym")
}
if !types.Identical(n.Type, c.Type) {
base.Fatalf("litsym: type mismatch: %v has type %v, but %v has type %v", n, n.Type, c, c.Type)
if !types.Identical(n.Type(), c.Type()) {
base.Fatalf("litsym: type mismatch: %v has type %v, but %v has type %v", n, n.Type(), c, c.Type())
}
if c.Op == ir.ONIL {
if c.Op() == ir.ONIL {
return
}
if c.Op != ir.OLITERAL {
base.Fatalf("litsym c op %v", c.Op)
if c.Op() != ir.OLITERAL {
base.Fatalf("litsym c op %v", c.Op())
}
s := n.Sym.Linksym()
s := n.Sym().Linksym()
switch u := c.Val(); u.Kind() {
case constant.Bool:
i := int64(obj.Bool2int(constant.BoolVal(u)))
s.WriteInt(base.Ctxt, n.Xoffset, wid, i)
s.WriteInt(base.Ctxt, n.Offset(), wid, i)
case constant.Int:
s.WriteInt(base.Ctxt, n.Xoffset, wid, ir.Int64Val(n.Type, u))
s.WriteInt(base.Ctxt, n.Offset(), wid, ir.Int64Val(n.Type(), u))
case constant.Float:
f, _ := constant.Float64Val(u)
switch n.Type.Etype {
switch n.Type().Etype {
case types.TFLOAT32:
s.WriteFloat32(base.Ctxt, n.Xoffset, float32(f))
s.WriteFloat32(base.Ctxt, n.Offset(), float32(f))
case types.TFLOAT64:
s.WriteFloat64(base.Ctxt, n.Xoffset, f)
s.WriteFloat64(base.Ctxt, n.Offset(), f)
}
case constant.Complex:
re, _ := constant.Float64Val(constant.Real(u))
im, _ := constant.Float64Val(constant.Imag(u))
switch n.Type.Etype {
switch n.Type().Etype {
case types.TCOMPLEX64:
s.WriteFloat32(base.Ctxt, n.Xoffset, float32(re))
s.WriteFloat32(base.Ctxt, n.Xoffset+4, float32(im))
s.WriteFloat32(base.Ctxt, n.Offset(), float32(re))
s.WriteFloat32(base.Ctxt, n.Offset()+4, float32(im))
case types.TCOMPLEX128:
s.WriteFloat64(base.Ctxt, n.Xoffset, re)
s.WriteFloat64(base.Ctxt, n.Xoffset+8, im)
s.WriteFloat64(base.Ctxt, n.Offset(), re)
s.WriteFloat64(base.Ctxt, n.Offset()+8, im)
}
case constant.String:
i := constant.StringVal(u)
symdata := stringsym(n.Pos, i)
s.WriteAddr(base.Ctxt, n.Xoffset, Widthptr, symdata, 0)
s.WriteInt(base.Ctxt, n.Xoffset+int64(Widthptr), Widthptr, int64(len(i)))
symdata := stringsym(n.Pos(), i)
s.WriteAddr(base.Ctxt, n.Offset(), Widthptr, symdata, 0)
s.WriteInt(base.Ctxt, n.Offset()+int64(Widthptr), Widthptr, int64(len(i)))
default:
base.Fatalf("litsym unhandled OLITERAL %v", c)

File diff suppressed because it is too large Load diff

View file

@ -28,30 +28,30 @@ var (
)
func emitptrargsmap(fn *ir.Node) {
if ir.FuncName(fn) == "_" || fn.Func.Nname.Sym.Linkname != "" {
if ir.FuncName(fn) == "_" || fn.Func().Nname.Sym().Linkname != "" {
return
}
lsym := base.Ctxt.Lookup(fn.Func.LSym.Name + ".args_stackmap")
lsym := base.Ctxt.Lookup(fn.Func().LSym.Name + ".args_stackmap")
nptr := int(fn.Type.ArgWidth() / int64(Widthptr))
nptr := int(fn.Type().ArgWidth() / int64(Widthptr))
bv := bvalloc(int32(nptr) * 2)
nbitmap := 1
if fn.Type.NumResults() > 0 {
if fn.Type().NumResults() > 0 {
nbitmap = 2
}
off := duint32(lsym, 0, uint32(nbitmap))
off = duint32(lsym, off, uint32(bv.n))
if ir.IsMethod(fn) {
onebitwalktype1(fn.Type.Recvs(), 0, bv)
onebitwalktype1(fn.Type().Recvs(), 0, bv)
}
if fn.Type.NumParams() > 0 {
onebitwalktype1(fn.Type.Params(), 0, bv)
if fn.Type().NumParams() > 0 {
onebitwalktype1(fn.Type().Params(), 0, bv)
}
off = dbvec(lsym, off, bv)
if fn.Type.NumResults() > 0 {
onebitwalktype1(fn.Type.Results(), 0, bv)
if fn.Type().NumResults() > 0 {
onebitwalktype1(fn.Type().Results(), 0, bv)
off = dbvec(lsym, off, bv)
}
@ -74,30 +74,30 @@ func cmpstackvarlt(a, b *ir.Node) bool {
}
if a.Class() != ir.PAUTO {
return a.Xoffset < b.Xoffset
return a.Offset() < b.Offset()
}
if a.Name.Used() != b.Name.Used() {
return a.Name.Used()
if a.Name().Used() != b.Name().Used() {
return a.Name().Used()
}
ap := a.Type.HasPointers()
bp := b.Type.HasPointers()
ap := a.Type().HasPointers()
bp := b.Type().HasPointers()
if ap != bp {
return ap
}
ap = a.Name.Needzero()
bp = b.Name.Needzero()
ap = a.Name().Needzero()
bp = b.Name().Needzero()
if ap != bp {
return ap
}
if a.Type.Width != b.Type.Width {
return a.Type.Width > b.Type.Width
if a.Type().Width != b.Type().Width {
return a.Type().Width > b.Type().Width
}
return a.Sym.Name < b.Sym.Name
return a.Sym().Name < b.Sym().Name
}
// byStackvar implements sort.Interface for []*Node using cmpstackvarlt.
@ -110,18 +110,18 @@ func (s byStackVar) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s *ssafn) AllocFrame(f *ssa.Func) {
s.stksize = 0
s.stkptrsize = 0
fn := s.curfn.Func
fn := s.curfn.Func()
// Mark the PAUTO's unused.
for _, ln := range fn.Dcl {
if ln.Class() == ir.PAUTO {
ln.Name.SetUsed(false)
ln.Name().SetUsed(false)
}
}
for _, l := range f.RegAlloc {
if ls, ok := l.(ssa.LocalSlot); ok {
ls.N.Name.SetUsed(true)
ls.N.Name().SetUsed(true)
}
}
@ -133,10 +133,10 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
case ir.PPARAM, ir.PPARAMOUT:
// Don't modify nodfp; it is a global.
if n != nodfp {
n.Name.SetUsed(true)
n.Name().SetUsed(true)
}
case ir.PAUTO:
n.Name.SetUsed(true)
n.Name().SetUsed(true)
}
}
if !scratchUsed {
@ -155,16 +155,16 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Reassign stack offsets of the locals that are used.
lastHasPtr := false
for i, n := range fn.Dcl {
if n.Op != ir.ONAME || n.Class() != ir.PAUTO {
if n.Op() != ir.ONAME || n.Class() != ir.PAUTO {
continue
}
if !n.Name.Used() {
if !n.Name().Used() {
fn.Dcl = fn.Dcl[:i]
break
}
dowidth(n.Type)
w := n.Type.Width
dowidth(n.Type())
w := n.Type().Width
if w >= thearch.MAXWIDTH || w < 0 {
base.Fatalf("bad width")
}
@ -176,8 +176,8 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
w = 1
}
s.stksize += w
s.stksize = Rnd(s.stksize, int64(n.Type.Align))
if n.Type.HasPointers() {
s.stksize = Rnd(s.stksize, int64(n.Type().Align))
if n.Type().HasPointers() {
s.stkptrsize = s.stksize
lastHasPtr = true
} else {
@ -186,7 +186,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
if thearch.LinkArch.InFamily(sys.MIPS, sys.MIPS64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X) {
s.stksize = Rnd(s.stksize, int64(Widthptr))
}
n.Xoffset = -s.stksize
n.SetOffset(-s.stksize)
}
s.stksize = Rnd(s.stksize, int64(Widthreg))
@ -195,10 +195,10 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
func funccompile(fn *ir.Node) {
if Curfn != nil {
base.Fatalf("funccompile %v inside %v", fn.Func.Nname.Sym, Curfn.Func.Nname.Sym)
base.Fatalf("funccompile %v inside %v", fn.Func().Nname.Sym(), Curfn.Func().Nname.Sym())
}
if fn.Type == nil {
if fn.Type() == nil {
if base.Errors() == 0 {
base.Fatalf("funccompile missing type")
}
@ -206,11 +206,11 @@ func funccompile(fn *ir.Node) {
}
// assign parameter offsets
dowidth(fn.Type)
dowidth(fn.Type())
if fn.Nbody.Len() == 0 {
if fn.Body().Len() == 0 {
// Initialize ABI wrappers if necessary.
initLSym(fn.Func, false)
initLSym(fn.Func(), false)
emitptrargsmap(fn)
return
}
@ -234,7 +234,7 @@ func compile(fn *ir.Node) {
// Set up the function's LSym early to avoid data races with the assemblers.
// Do this before walk, as walk needs the LSym to set attributes/relocations
// (e.g. in markTypeUsedInInterface).
initLSym(fn.Func, true)
initLSym(fn.Func(), true)
walk(fn)
if base.Errors() > errorsBefore {
@ -259,15 +259,15 @@ func compile(fn *ir.Node) {
// be types of stack objects. We need to do this here
// because symbols must be allocated before the parallel
// phase of the compiler.
for _, n := range fn.Func.Dcl {
for _, n := range fn.Func().Dcl {
switch n.Class() {
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
if livenessShouldTrack(n) && n.Name.Addrtaken() {
dtypesym(n.Type)
if livenessShouldTrack(n) && n.Name().Addrtaken() {
dtypesym(n.Type())
// Also make sure we allocate a linker symbol
// for the stack object data, for the same reason.
if fn.Func.LSym.Func().StackObjects == nil {
fn.Func.LSym.Func().StackObjects = base.Ctxt.Lookup(fn.Func.LSym.Name + ".stkobj")
if fn.Func().LSym.Func().StackObjects == nil {
fn.Func().LSym.Func().StackObjects = base.Ctxt.Lookup(fn.Func().LSym.Name + ".stkobj")
}
}
}
@ -300,13 +300,13 @@ func compilenow(fn *ir.Node) bool {
// inline candidate but then never inlined (presumably because we
// found no call sites).
func isInlinableButNotInlined(fn *ir.Node) bool {
if fn.Func.Nname.Func.Inl == nil {
if fn.Func().Nname.Func().Inl == nil {
return false
}
if fn.Sym == nil {
if fn.Sym() == nil {
return true
}
return !fn.Sym.Linksym().WasInlined()
return !fn.Sym().Linksym().WasInlined()
}
const maxStackSize = 1 << 30
@ -318,9 +318,9 @@ const maxStackSize = 1 << 30
func compileSSA(fn *ir.Node, worker int) {
f := buildssa(fn, worker)
// Note: check arg size to fix issue 25507.
if f.Frontend().(*ssafn).stksize >= maxStackSize || fn.Type.ArgWidth() >= maxStackSize {
if f.Frontend().(*ssafn).stksize >= maxStackSize || fn.Type().ArgWidth() >= maxStackSize {
largeStackFramesMu.Lock()
largeStackFrames = append(largeStackFrames, largeStack{locals: f.Frontend().(*ssafn).stksize, args: fn.Type.ArgWidth(), pos: fn.Pos})
largeStackFrames = append(largeStackFrames, largeStack{locals: f.Frontend().(*ssafn).stksize, args: fn.Type().ArgWidth(), pos: fn.Pos()})
largeStackFramesMu.Unlock()
return
}
@ -336,14 +336,14 @@ func compileSSA(fn *ir.Node, worker int) {
if pp.Text.To.Offset >= maxStackSize {
largeStackFramesMu.Lock()
locals := f.Frontend().(*ssafn).stksize
largeStackFrames = append(largeStackFrames, largeStack{locals: locals, args: fn.Type.ArgWidth(), callee: pp.Text.To.Offset - locals, pos: fn.Pos})
largeStackFrames = append(largeStackFrames, largeStack{locals: locals, args: fn.Type().ArgWidth(), callee: pp.Text.To.Offset - locals, pos: fn.Pos()})
largeStackFramesMu.Unlock()
return
}
pp.Flush() // assemble, fill in boilerplate, etc.
// fieldtrack must be called after pp.Flush. See issue 20014.
fieldtrack(pp.Text.From.Sym, fn.Func.FieldTrack)
fieldtrack(pp.Text.From.Sym, fn.Func().FieldTrack)
}
func init() {
@ -371,7 +371,7 @@ func compileFunctions() {
// since they're most likely to be the slowest.
// This helps avoid stragglers.
sort.Slice(compilequeue, func(i, j int) bool {
return compilequeue[i].Nbody.Len() > compilequeue[j].Nbody.Len()
return compilequeue[i].Body().Len() > compilequeue[j].Body().Len()
})
}
var wg sync.WaitGroup
@ -399,8 +399,8 @@ func compileFunctions() {
func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope, dwarf.InlCalls) {
fn := curfn.(*ir.Node)
if fn.Func.Nname != nil {
if expect := fn.Func.Nname.Sym.Linksym(); fnsym != expect {
if fn.Func().Nname != nil {
if expect := fn.Func().Nname.Sym().Linksym(); fnsym != expect {
base.Fatalf("unexpected fnsym: %v != %v", fnsym, expect)
}
}
@ -430,18 +430,18 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
//
// These two adjustments keep toolstash -cmp working for now.
// Deciding the right answer is, as they say, future work.
isODCLFUNC := fn.Op == ir.ODCLFUNC
isODCLFUNC := fn.Op() == ir.ODCLFUNC
var apdecls []*ir.Node
// Populate decls for fn.
if isODCLFUNC {
for _, n := range fn.Func.Dcl {
if n.Op != ir.ONAME { // might be OTYPE or OLITERAL
for _, n := range fn.Func().Dcl {
if n.Op() != ir.ONAME { // might be OTYPE or OLITERAL
continue
}
switch n.Class() {
case ir.PAUTO:
if !n.Name.Used() {
if !n.Name().Used() {
// Text == nil -> generating abstract function
if fnsym.Func().Text != nil {
base.Fatalf("debuginfo unused node (AllocFrame should truncate fn.Func.Dcl)")
@ -457,7 +457,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
}
}
decls, dwarfVars := createDwarfVars(fnsym, isODCLFUNC, fn.Func, apdecls)
decls, dwarfVars := createDwarfVars(fnsym, isODCLFUNC, fn.Func(), apdecls)
// For each type referenced by the functions auto vars but not
// already referenced by a dwarf var, attach an R_USETYPE relocation to
@ -478,7 +478,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
var varScopes []ir.ScopeID
for _, decl := range decls {
pos := declPos(decl)
varScopes = append(varScopes, findScope(fn.Func.Marks, pos))
varScopes = append(varScopes, findScope(fn.Func().Marks, pos))
}
scopes := assembleScopes(fnsym, fn, dwarfVars, varScopes)
@ -490,7 +490,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
}
func declPos(decl *ir.Node) src.XPos {
if decl.Name.Defn != nil && (decl.Name.Captured() || decl.Name.Byval()) {
if decl.Name().Defn != nil && (decl.Name().Captured() || decl.Name().Byval()) {
// It's not clear which position is correct for captured variables here:
// * decl.Pos is the wrong position for captured variables, in the inner
// function, but it is the right position in the outer function.
@ -505,9 +505,9 @@ func declPos(decl *ir.Node) src.XPos {
// case statement.
// This code is probably wrong for type switch variables that are also
// captured.
return decl.Name.Defn.Pos
return decl.Name().Defn.Pos()
}
return decl.Pos
return decl.Pos()
}
// createSimpleVars creates a DWARF entry for every variable declared in the
@ -530,7 +530,7 @@ func createSimpleVars(fnsym *obj.LSym, apDecls []*ir.Node) ([]*ir.Node, []*dwarf
func createSimpleVar(fnsym *obj.LSym, n *ir.Node) *dwarf.Var {
var abbrev int
offs := n.Xoffset
offs := n.Offset()
switch n.Class() {
case ir.PAUTO:
@ -550,22 +550,22 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Node) *dwarf.Var {
base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n)
}
typename := dwarf.InfoPrefix + typesymname(n.Type)
typename := dwarf.InfoPrefix + typesymname(n.Type())
delete(fnsym.Func().Autot, ngotype(n).Linksym())
inlIndex := 0
if base.Flag.GenDwarfInl > 1 {
if n.Name.InlFormal() || n.Name.InlLocal() {
inlIndex = posInlIndex(n.Pos) + 1
if n.Name.InlFormal() {
if n.Name().InlFormal() || n.Name().InlLocal() {
inlIndex = posInlIndex(n.Pos()) + 1
if n.Name().InlFormal() {
abbrev = dwarf.DW_ABRV_PARAM
}
}
}
declpos := base.Ctxt.InnermostPos(declPos(n))
return &dwarf.Var{
Name: n.Sym.Name,
Name: n.Sym().Name,
IsReturnValue: n.Class() == ir.PPARAMOUT,
IsInlFormal: n.Name.InlFormal(),
IsInlFormal: n.Name().InlFormal(),
Abbrev: abbrev,
StackOffset: int32(offs),
Type: base.Ctxt.Lookup(typename),
@ -637,11 +637,11 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
if _, found := selected[n]; found {
continue
}
c := n.Sym.Name[0]
if c == '.' || n.Type.IsUntyped() {
c := n.Sym().Name[0]
if c == '.' || n.Type().IsUntyped() {
continue
}
if n.Class() == ir.PPARAM && !canSSAType(n.Type) {
if n.Class() == ir.PPARAM && !canSSAType(n.Type()) {
// SSA-able args get location lists, and may move in and
// out of registers, so those are handled elsewhere.
// Autos and named output params seem to get handled
@ -653,7 +653,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
decls = append(decls, n)
continue
}
typename := dwarf.InfoPrefix + typesymname(n.Type)
typename := dwarf.InfoPrefix + typesymname(n.Type())
decls = append(decls, n)
abbrev := dwarf.DW_ABRV_AUTO_LOCLIST
isReturnValue := (n.Class() == ir.PPARAMOUT)
@ -667,7 +667,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
// misleading location for the param (we want pointer-to-heap
// and not stack).
// TODO(thanm): generate a better location expression
stackcopy := n.Name.Param.Stackcopy
stackcopy := n.Name().Param.Stackcopy
if stackcopy != nil && (stackcopy.Class() == ir.PPARAM || stackcopy.Class() == ir.PPARAMOUT) {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
isReturnValue = (stackcopy.Class() == ir.PPARAMOUT)
@ -675,19 +675,19 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
}
inlIndex := 0
if base.Flag.GenDwarfInl > 1 {
if n.Name.InlFormal() || n.Name.InlLocal() {
inlIndex = posInlIndex(n.Pos) + 1
if n.Name.InlFormal() {
if n.Name().InlFormal() || n.Name().InlLocal() {
inlIndex = posInlIndex(n.Pos()) + 1
if n.Name().InlFormal() {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
}
}
}
declpos := base.Ctxt.InnermostPos(n.Pos)
declpos := base.Ctxt.InnermostPos(n.Pos())
vars = append(vars, &dwarf.Var{
Name: n.Sym.Name,
Name: n.Sym().Name,
IsReturnValue: isReturnValue,
Abbrev: abbrev,
StackOffset: int32(n.Xoffset),
StackOffset: int32(n.Offset()),
Type: base.Ctxt.Lookup(typename),
DeclFile: declpos.RelFilename(),
DeclLine: declpos.RelLine(),
@ -711,11 +711,11 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
func preInliningDcls(fnsym *obj.LSym) []*ir.Node {
fn := base.Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*ir.Node)
var rdcl []*ir.Node
for _, n := range fn.Func.Inl.Dcl {
c := n.Sym.Name[0]
for _, n := range fn.Func().Inl.Dcl {
c := n.Sym().Name[0]
// Avoid reporting "_" parameters, since if there are more than
// one, it can result in a collision later on, as in #23179.
if unversion(n.Sym.Name) == "_" || c == '.' || n.Type.IsUntyped() {
if unversion(n.Sym().Name) == "_" || c == '.' || n.Type().IsUntyped() {
continue
}
rdcl = append(rdcl, n)
@ -741,7 +741,7 @@ func stackOffset(slot ssa.LocalSlot) int32 {
case ir.PPARAM, ir.PPARAMOUT:
off += base.Ctxt.FixedFrameSize()
}
return int32(off + n.Xoffset + slot.Off)
return int32(off + n.Offset() + slot.Off)
}
// createComplexVar builds a single DWARF variable entry and location list.
@ -764,18 +764,18 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
typename := dwarf.InfoPrefix + gotype.Name[len("type."):]
inlIndex := 0
if base.Flag.GenDwarfInl > 1 {
if n.Name.InlFormal() || n.Name.InlLocal() {
inlIndex = posInlIndex(n.Pos) + 1
if n.Name.InlFormal() {
if n.Name().InlFormal() || n.Name().InlLocal() {
inlIndex = posInlIndex(n.Pos()) + 1
if n.Name().InlFormal() {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
}
}
}
declpos := base.Ctxt.InnermostPos(n.Pos)
declpos := base.Ctxt.InnermostPos(n.Pos())
dvar := &dwarf.Var{
Name: n.Sym.Name,
Name: n.Sym().Name,
IsReturnValue: n.Class() == ir.PPARAMOUT,
IsInlFormal: n.Name.InlFormal(),
IsInlFormal: n.Name().InlFormal(),
Abbrev: abbrev,
Type: base.Ctxt.Lookup(typename),
// The stack offset is used as a sorting key, so for decomposed

View file

@ -27,12 +27,12 @@ func typeWithPointers() *types.Type {
}
func markUsed(n *ir.Node) *ir.Node {
n.Name.SetUsed(true)
n.Name().SetUsed(true)
return n
}
func markNeedZero(n *ir.Node) *ir.Node {
n.Name.SetNeedzero(true)
n.Name().SetNeedzero(true)
return n
}
@ -43,8 +43,8 @@ func TestCmpstackvar(t *testing.T) {
s = &types.Sym{Name: "."}
}
n := NewName(s)
n.Type = t
n.Xoffset = xoffset
n.SetType(t)
n.SetOffset(xoffset)
n.SetClass(cl)
return n
}
@ -158,8 +158,8 @@ func TestCmpstackvar(t *testing.T) {
func TestStackvarSort(t *testing.T) {
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) *ir.Node {
n := NewName(s)
n.Type = t
n.Xoffset = xoffset
n.SetType(t)
n.SetOffset(xoffset)
n.SetClass(cl)
return n
}

View file

@ -207,14 +207,14 @@ type progeffectscache struct {
// nor do we care about empty structs (handled by the pointer check),
// nor do we care about the fake PAUTOHEAP variables.
func livenessShouldTrack(n *ir.Node) bool {
return n.Op == ir.ONAME && (n.Class() == ir.PAUTO || n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Type.HasPointers()
return n.Op() == ir.ONAME && (n.Class() == ir.PAUTO || n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Type().HasPointers()
}
// getvariables returns the list of on-stack variables that we need to track
// and a map for looking up indices by *Node.
func getvariables(fn *ir.Node) ([]*ir.Node, map[*ir.Node]int32) {
var vars []*ir.Node
for _, n := range fn.Func.Dcl {
for _, n := range fn.Func().Dcl {
if livenessShouldTrack(n) {
vars = append(vars, n)
}
@ -272,7 +272,7 @@ const (
// If v does not affect any tracked variables, it returns -1, 0.
func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
n, e := affectedNode(v)
if e == 0 || n == nil || n.Op != ir.ONAME { // cheapest checks first
if e == 0 || n == nil || n.Op() != ir.ONAME { // cheapest checks first
return -1, 0
}
@ -282,7 +282,7 @@ func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
// variable" ICEs (issue 19632).
switch v.Op {
case ssa.OpVarDef, ssa.OpVarKill, ssa.OpVarLive, ssa.OpKeepAlive:
if !n.Name.Used() {
if !n.Name().Used() {
return -1, 0
}
}
@ -297,7 +297,7 @@ func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
if e&(ssa.SymRead|ssa.SymAddr) != 0 {
effect |= uevar
}
if e&ssa.SymWrite != 0 && (!isfat(n.Type) || v.Op == ssa.OpVarDef) {
if e&ssa.SymWrite != 0 && (!isfat(n.Type()) || v.Op == ssa.OpVarDef) {
effect |= varkill
}
@ -491,10 +491,10 @@ func (lv *Liveness) pointerMap(liveout bvec, vars []*ir.Node, args, locals bvec)
node := vars[i]
switch node.Class() {
case ir.PAUTO:
onebitwalktype1(node.Type, node.Xoffset+lv.stkptrsize, locals)
onebitwalktype1(node.Type(), node.Offset()+lv.stkptrsize, locals)
case ir.PPARAM, ir.PPARAMOUT:
onebitwalktype1(node.Type, node.Xoffset, args)
onebitwalktype1(node.Type(), node.Offset(), args)
}
}
}
@ -788,14 +788,14 @@ func (lv *Liveness) epilogue() {
// pointers to copy values back to the stack).
// TODO: if the output parameter is heap-allocated, then we
// don't need to keep the stack copy live?
if lv.fn.Func.HasDefer() {
if lv.fn.Func().HasDefer() {
for i, n := range lv.vars {
if n.Class() == ir.PPARAMOUT {
if n.Name.IsOutputParamHeapAddr() {
if n.Name().IsOutputParamHeapAddr() {
// Just to be paranoid. Heap addresses are PAUTOs.
base.Fatalf("variable %v both output param and heap output param", n)
}
if n.Name.Param.Heapaddr != nil {
if n.Name().Param.Heapaddr != nil {
// If this variable moved to the heap, then
// its stack copy is not live.
continue
@ -803,21 +803,21 @@ func (lv *Liveness) epilogue() {
// Note: zeroing is handled by zeroResults in walk.go.
livedefer.Set(int32(i))
}
if n.Name.IsOutputParamHeapAddr() {
if n.Name().IsOutputParamHeapAddr() {
// This variable will be overwritten early in the function
// prologue (from the result of a mallocgc) but we need to
// zero it in case that malloc causes a stack scan.
n.Name.SetNeedzero(true)
n.Name().SetNeedzero(true)
livedefer.Set(int32(i))
}
if n.Name.OpenDeferSlot() {
if n.Name().OpenDeferSlot() {
// Open-coded defer args slots must be live
// everywhere in a function, since a panic can
// occur (almost) anywhere. Because it is live
// everywhere, it must be zeroed on entry.
livedefer.Set(int32(i))
// It was already marked as Needzero when created.
if !n.Name.Needzero() {
if !n.Name().Needzero() {
base.Fatalf("all pointer-containing defer arg slots should have Needzero set")
}
}
@ -891,7 +891,7 @@ func (lv *Liveness) epilogue() {
if n.Class() == ir.PPARAM {
continue // ok
}
base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Func.Nname, n)
base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Func().Nname, n)
}
// Record live variables.
@ -904,7 +904,7 @@ func (lv *Liveness) epilogue() {
}
// If we have an open-coded deferreturn call, make a liveness map for it.
if lv.fn.Func.OpenCodedDeferDisallowed() {
if lv.fn.Func().OpenCodedDeferDisallowed() {
lv.livenessMap.deferreturn = LivenessDontCare
} else {
lv.livenessMap.deferreturn = LivenessIndex{
@ -922,7 +922,7 @@ func (lv *Liveness) epilogue() {
// input parameters.
for j, n := range lv.vars {
if n.Class() != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Func.Nname, n)
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Func().Nname, n)
}
}
}
@ -980,7 +980,7 @@ func (lv *Liveness) showlive(v *ssa.Value, live bvec) {
return
}
pos := lv.fn.Func.Nname.Pos
pos := lv.fn.Func().Nname.Pos()
if v != nil {
pos = v.Pos
}
@ -1024,7 +1024,7 @@ func (lv *Liveness) printbvec(printed bool, name string, live bvec) bool {
if !live.Get(int32(i)) {
continue
}
fmt.Printf("%s%s", comma, n.Sym.Name)
fmt.Printf("%s%s", comma, n.Sym().Name)
comma = ","
}
return true
@ -1042,7 +1042,7 @@ func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bo
}
fmt.Printf("%s=", name)
if x {
fmt.Printf("%s", lv.vars[pos].Sym.Name)
fmt.Printf("%s", lv.vars[pos].Sym().Name)
}
return true
@ -1090,7 +1090,7 @@ func (lv *Liveness) printDebug() {
if b == lv.f.Entry {
live := lv.stackMaps[0]
fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Func.Nname.Pos))
fmt.Printf("(%s) function entry\n", base.FmtPos(lv.fn.Func().Nname.Pos()))
fmt.Printf("\tlive=")
printed = false
for j, n := range lv.vars {
@ -1168,7 +1168,7 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
for _, n := range lv.vars {
switch n.Class() {
case ir.PPARAM, ir.PPARAMOUT:
if maxArgNode == nil || n.Xoffset > maxArgNode.Xoffset {
if maxArgNode == nil || n.Offset() > maxArgNode.Offset() {
maxArgNode = n
}
}
@ -1176,7 +1176,7 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
// Next, find the offset of the largest pointer in the largest node.
var maxArgs int64
if maxArgNode != nil {
maxArgs = maxArgNode.Xoffset + typeptrdata(maxArgNode.Type)
maxArgs = maxArgNode.Offset() + typeptrdata(maxArgNode.Type())
}
// Size locals bitmaps to be stkptrsize sized.
@ -1266,7 +1266,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
}
// Emit the live pointer map data structures
ls := e.curfn.Func.LSym
ls := e.curfn.Func().LSym
fninfo := ls.Func()
fninfo.GCArgs, fninfo.GCLocals = lv.emit()

View file

@ -61,12 +61,12 @@ func ispkgin(pkgs []string) bool {
}
func instrument(fn *ir.Node) {
if fn.Func.Pragma&ir.Norace != 0 {
if fn.Func().Pragma&ir.Norace != 0 {
return
}
if !base.Flag.Race || !ispkgin(norace_inst_pkgs) {
fn.Func.SetInstrumentBody(true)
fn.Func().SetInstrumentBody(true)
}
if base.Flag.Race {
@ -74,8 +74,8 @@ func instrument(fn *ir.Node) {
base.Pos = src.NoXPos
if thearch.LinkArch.Arch.Family != sys.AMD64 {
fn.Func.Enter.Prepend(mkcall("racefuncenterfp", nil, nil))
fn.Func.Exit.Append(mkcall("racefuncexit", nil, nil))
fn.Func().Enter.Prepend(mkcall("racefuncenterfp", nil, nil))
fn.Func().Exit.Append(mkcall("racefuncexit", nil, nil))
} else {
// nodpc is the PC of the caller as extracted by
@ -84,11 +84,11 @@ func instrument(fn *ir.Node) {
// work on arm or others that might support
// race in the future.
nodpc := ir.Copy(nodfp)
nodpc.Type = types.Types[types.TUINTPTR]
nodpc.Xoffset = int64(-Widthptr)
fn.Func.Dcl = append(fn.Func.Dcl, nodpc)
fn.Func.Enter.Prepend(mkcall("racefuncenter", nil, nil, nodpc))
fn.Func.Exit.Append(mkcall("racefuncexit", nil, nil))
nodpc.SetType(types.Types[types.TUINTPTR])
nodpc.SetOffset(int64(-Widthptr))
fn.Func().Dcl = append(fn.Func().Dcl, nodpc)
fn.Func().Enter.Prepend(mkcall("racefuncenter", nil, nil, nodpc))
fn.Func().Exit.Append(mkcall("racefuncexit", nil, nil))
}
base.Pos = lno
}

View file

@ -27,7 +27,7 @@ func typecheckrange(n *ir.Node) {
// second half of dance, the first half being typecheckrangeExpr
n.SetTypecheck(1)
ls := n.List.Slice()
ls := n.List().Slice()
for i1, n1 := range ls {
if n1.Typecheck() == 0 {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
@ -35,21 +35,21 @@ func typecheckrange(n *ir.Node) {
}
decldepth++
typecheckslice(n.Nbody.Slice(), ctxStmt)
typecheckslice(n.Body().Slice(), ctxStmt)
decldepth--
}
func typecheckrangeExpr(n *ir.Node) {
n.Right = typecheck(n.Right, ctxExpr)
n.SetRight(typecheck(n.Right(), ctxExpr))
t := n.Right.Type
t := n.Right().Type()
if t == nil {
return
}
// delicate little dance. see typecheckas2
ls := n.List.Slice()
ls := n.List().Slice()
for i1, n1 := range ls {
if n1.Name == nil || n1.Name.Defn != n {
if n1.Name() == nil || n1.Name().Defn != n {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
}
}
@ -57,13 +57,13 @@ func typecheckrangeExpr(n *ir.Node) {
if t.IsPtr() && t.Elem().IsArray() {
t = t.Elem()
}
n.Type = t
n.SetType(t)
var t1, t2 *types.Type
toomany := false
switch t.Etype {
default:
base.ErrorfAt(n.Pos, "cannot range over %L", n.Right)
base.ErrorfAt(n.Pos(), "cannot range over %L", n.Right())
return
case types.TARRAY, types.TSLICE:
@ -76,13 +76,13 @@ func typecheckrangeExpr(n *ir.Node) {
case types.TCHAN:
if !t.ChanDir().CanRecv() {
base.ErrorfAt(n.Pos, "invalid operation: range %v (receive from send-only type %v)", n.Right, n.Right.Type)
base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.Right(), n.Right().Type())
return
}
t1 = t.Elem()
t2 = nil
if n.List.Len() == 2 {
if n.List().Len() == 2 {
toomany = true
}
@ -91,16 +91,16 @@ func typecheckrangeExpr(n *ir.Node) {
t2 = types.Runetype
}
if n.List.Len() > 2 || toomany {
base.ErrorfAt(n.Pos, "too many variables in range")
if n.List().Len() > 2 || toomany {
base.ErrorfAt(n.Pos(), "too many variables in range")
}
var v1, v2 *ir.Node
if n.List.Len() != 0 {
v1 = n.List.First()
if n.List().Len() != 0 {
v1 = n.List().First()
}
if n.List.Len() > 1 {
v2 = n.List.Second()
if n.List().Len() > 1 {
v2 = n.List().Second()
}
// this is not only an optimization but also a requirement in the spec.
@ -109,28 +109,28 @@ func typecheckrangeExpr(n *ir.Node) {
// present."
if ir.IsBlank(v2) {
if v1 != nil {
n.List.Set1(v1)
n.PtrList().Set1(v1)
}
v2 = nil
}
if v1 != nil {
if v1.Name != nil && v1.Name.Defn == n {
v1.Type = t1
} else if v1.Type != nil {
if op, why := assignop(t1, v1.Type); op == ir.OXXX {
base.ErrorfAt(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why)
if v1.Name() != nil && v1.Name().Defn == n {
v1.SetType(t1)
} else if v1.Type() != nil {
if op, why := assignop(t1, v1.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t1, v1, why)
}
}
checkassign(n, v1)
}
if v2 != nil {
if v2.Name != nil && v2.Name.Defn == n {
v2.Type = t2
} else if v2.Type != nil {
if op, why := assignop(t2, v2.Type); op == ir.OXXX {
base.ErrorfAt(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why)
if v2.Name() != nil && v2.Name().Defn == n {
v2.SetType(t2)
} else if v2.Type() != nil {
if op, why := assignop(t2, v2.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t2, v2, why)
}
}
checkassign(n, v2)
@ -159,7 +159,7 @@ func cheapComputableIndex(width int64) bool {
// the returned node.
func walkrange(n *ir.Node) *ir.Node {
if isMapClear(n) {
m := n.Right
m := n.Right()
lno := setlineno(m)
n = mapClear(m)
base.Pos = lno
@ -173,20 +173,20 @@ func walkrange(n *ir.Node) *ir.Node {
// hb: hidden bool
// a, v1, v2: not hidden aggregate, val 1, 2
t := n.Type
t := n.Type()
a := n.Right
a := n.Right()
lno := setlineno(a)
n.Right = nil
n.SetRight(nil)
var v1, v2 *ir.Node
l := n.List.Len()
l := n.List().Len()
if l > 0 {
v1 = n.List.First()
v1 = n.List().First()
}
if l > 1 {
v2 = n.List.Second()
v2 = n.List().Second()
}
if ir.IsBlank(v2) {
@ -203,7 +203,7 @@ func walkrange(n *ir.Node) *ir.Node {
// n.List has no meaning anymore, clear it
// to avoid erroneous processing by racewalk.
n.List.Set(nil)
n.PtrList().Set(nil)
var ifGuard *ir.Node
@ -230,8 +230,8 @@ func walkrange(n *ir.Node) *ir.Node {
init = append(init, ir.Nod(ir.OAS, hv1, nil))
init = append(init, ir.Nod(ir.OAS, hn, ir.Nod(ir.OLEN, ha, nil)))
n.Left = ir.Nod(ir.OLT, hv1, hn)
n.Right = ir.Nod(ir.OAS, hv1, ir.Nod(ir.OADD, hv1, nodintconst(1)))
n.SetLeft(ir.Nod(ir.OLT, hv1, hn))
n.SetRight(ir.Nod(ir.OAS, hv1, ir.Nod(ir.OADD, hv1, nodintconst(1))))
// for range ha { body }
if v1 == nil {
@ -245,15 +245,15 @@ func walkrange(n *ir.Node) *ir.Node {
}
// for v1, v2 := range ha { body }
if cheapComputableIndex(n.Type.Elem().Width) {
if cheapComputableIndex(n.Type().Elem().Width) {
// v1, v2 = hv1, ha[hv1]
tmp := ir.Nod(ir.OINDEX, ha, hv1)
tmp.SetBounded(true)
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
a := ir.Nod(ir.OAS2, nil, nil)
a.List.Set2(v1, v2)
a.Rlist.Set2(hv1, tmp)
a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(hv1, tmp)
body = []*ir.Node{a}
break
}
@ -271,10 +271,10 @@ func walkrange(n *ir.Node) *ir.Node {
// elimination on the index variable (see #20711).
// Enhance the prove pass to understand this.
ifGuard = ir.Nod(ir.OIF, nil, nil)
ifGuard.Left = ir.Nod(ir.OLT, hv1, hn)
ifGuard.SetLeft(ir.Nod(ir.OLT, hv1, hn))
translatedLoopOp = ir.OFORUNTIL
hp := temp(types.NewPtr(n.Type.Elem()))
hp := temp(types.NewPtr(n.Type().Elem()))
tmp := ir.Nod(ir.OINDEX, ha, nodintconst(0))
tmp.SetBounded(true)
init = append(init, ir.Nod(ir.OAS, hp, ir.Nod(ir.OADDR, tmp, nil)))
@ -282,8 +282,8 @@ func walkrange(n *ir.Node) *ir.Node {
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
a := ir.Nod(ir.OAS2, nil, nil)
a.List.Set2(v1, v2)
a.Rlist.Set2(hv1, ir.Nod(ir.ODEREF, hp, nil))
a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(hv1, ir.Nod(ir.ODEREF, hp, nil))
body = append(body, a)
// Advance pointer as part of the late increment.
@ -293,7 +293,7 @@ func walkrange(n *ir.Node) *ir.Node {
// end of the allocation.
a = ir.Nod(ir.OAS, hp, addptr(hp, t.Elem().Width))
a = typecheck(a, ctxStmt)
n.List.Set1(a)
n.PtrList().Set1(a)
case types.TMAP:
// order.stmt allocated the iterator for us.
@ -301,8 +301,8 @@ func walkrange(n *ir.Node) *ir.Node {
ha := a
hit := prealloc[n]
th := hit.Type
n.Left = nil
th := hit.Type()
n.SetLeft(nil)
keysym := th.Field(0).Sym // depends on layout of iterator struct. See reflect.go:hiter
elemsym := th.Field(1).Sym // ditto
@ -310,11 +310,11 @@ func walkrange(n *ir.Node) *ir.Node {
fn = substArgTypes(fn, t.Key(), t.Elem(), th)
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, ir.Nod(ir.OADDR, hit, nil)))
n.Left = ir.Nod(ir.ONE, nodSym(ir.ODOT, hit, keysym), nodnil())
n.SetLeft(ir.Nod(ir.ONE, nodSym(ir.ODOT, hit, keysym), nodnil()))
fn = syslook("mapiternext")
fn = substArgTypes(fn, th)
n.Right = mkcall1(fn, nil, nil, ir.Nod(ir.OADDR, hit, nil))
n.SetRight(mkcall1(fn, nil, nil, ir.Nod(ir.OADDR, hit, nil)))
key := nodSym(ir.ODOT, hit, keysym)
key = ir.Nod(ir.ODEREF, key, nil)
@ -326,8 +326,8 @@ func walkrange(n *ir.Node) *ir.Node {
elem := nodSym(ir.ODOT, hit, elemsym)
elem = ir.Nod(ir.ODEREF, elem, nil)
a := ir.Nod(ir.OAS2, nil, nil)
a.List.Set2(v1, v2)
a.Rlist.Set2(key, elem)
a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(key, elem)
body = []*ir.Node{a}
}
@ -335,7 +335,7 @@ func walkrange(n *ir.Node) *ir.Node {
// order.stmt arranged for a copy of the channel variable.
ha := a
n.Left = nil
n.SetLeft(nil)
hv1 := temp(t.Elem())
hv1.SetTypecheck(1)
@ -344,12 +344,12 @@ func walkrange(n *ir.Node) *ir.Node {
}
hb := temp(types.Types[types.TBOOL])
n.Left = ir.Nod(ir.ONE, hb, nodbool(false))
n.SetLeft(ir.Nod(ir.ONE, hb, nodbool(false)))
a := ir.Nod(ir.OAS2RECV, nil, nil)
a.SetTypecheck(1)
a.List.Set2(hv1, hb)
a.Right = ir.Nod(ir.ORECV, ha, nil)
n.Left.Ninit.Set1(a)
a.PtrList().Set2(hv1, hb)
a.SetRight(ir.Nod(ir.ORECV, ha, nil))
n.Left().PtrInit().Set1(a)
if v1 == nil {
body = nil
} else {
@ -387,7 +387,7 @@ func walkrange(n *ir.Node) *ir.Node {
init = append(init, ir.Nod(ir.OAS, hv1, nil))
// hv1 < len(ha)
n.Left = ir.Nod(ir.OLT, hv1, ir.Nod(ir.OLEN, ha, nil))
n.SetLeft(ir.Nod(ir.OLT, hv1, ir.Nod(ir.OLEN, ha, nil)))
if v1 != nil {
// hv1t = hv1
@ -401,19 +401,19 @@ func walkrange(n *ir.Node) *ir.Node {
// if hv2 < utf8.RuneSelf
nif := ir.Nod(ir.OIF, nil, nil)
nif.Left = ir.Nod(ir.OLT, hv2, nodintconst(utf8.RuneSelf))
nif.SetLeft(ir.Nod(ir.OLT, hv2, nodintconst(utf8.RuneSelf)))
// hv1++
nif.Nbody.Set1(ir.Nod(ir.OAS, hv1, ir.Nod(ir.OADD, hv1, nodintconst(1))))
nif.PtrBody().Set1(ir.Nod(ir.OAS, hv1, ir.Nod(ir.OADD, hv1, nodintconst(1))))
// } else {
eif := ir.Nod(ir.OAS2, nil, nil)
nif.Rlist.Set1(eif)
nif.PtrRlist().Set1(eif)
// hv2, hv1 = decoderune(ha, hv1)
eif.List.Set2(hv2, hv1)
eif.PtrList().Set2(hv2, hv1)
fn := syslook("decoderune")
eif.Rlist.Set1(mkcall1(fn, fn.Type.Results(), nil, ha, hv1))
eif.PtrRlist().Set1(mkcall1(fn, fn.Type().Results(), nil, ha, hv1))
body = append(body, nif)
@ -421,8 +421,8 @@ func walkrange(n *ir.Node) *ir.Node {
if v2 != nil {
// v1, v2 = hv1t, hv2
a := ir.Nod(ir.OAS2, nil, nil)
a.List.Set2(v1, v2)
a.Rlist.Set2(hv1t, hv2)
a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(hv1t, hv2)
body = append(body, a)
} else {
// v1 = hv1t
@ -431,26 +431,26 @@ func walkrange(n *ir.Node) *ir.Node {
}
}
n.Op = translatedLoopOp
n.SetOp(translatedLoopOp)
typecheckslice(init, ctxStmt)
if ifGuard != nil {
ifGuard.Ninit.Append(init...)
ifGuard.PtrInit().Append(init...)
ifGuard = typecheck(ifGuard, ctxStmt)
} else {
n.Ninit.Append(init...)
n.PtrInit().Append(init...)
}
typecheckslice(n.Left.Ninit.Slice(), ctxStmt)
typecheckslice(n.Left().Init().Slice(), ctxStmt)
n.Left = typecheck(n.Left, ctxExpr)
n.Left = defaultlit(n.Left, nil)
n.Right = typecheck(n.Right, ctxStmt)
n.SetLeft(typecheck(n.Left(), ctxExpr))
n.SetLeft(defaultlit(n.Left(), nil))
n.SetRight(typecheck(n.Right(), ctxStmt))
typecheckslice(body, ctxStmt)
n.Nbody.Prepend(body...)
n.PtrBody().Prepend(body...)
if ifGuard != nil {
ifGuard.Nbody.Set1(n)
ifGuard.PtrBody().Set1(n)
n = ifGuard
}
@ -472,36 +472,36 @@ func isMapClear(n *ir.Node) bool {
return false
}
if n.Op != ir.ORANGE || n.Type.Etype != types.TMAP || n.List.Len() != 1 {
if n.Op() != ir.ORANGE || n.Type().Etype != types.TMAP || n.List().Len() != 1 {
return false
}
k := n.List.First()
k := n.List().First()
if k == nil || ir.IsBlank(k) {
return false
}
// Require k to be a new variable name.
if k.Name == nil || k.Name.Defn != n {
if k.Name() == nil || k.Name().Defn != n {
return false
}
if n.Nbody.Len() != 1 {
if n.Body().Len() != 1 {
return false
}
stmt := n.Nbody.First() // only stmt in body
if stmt == nil || stmt.Op != ir.ODELETE {
stmt := n.Body().First() // only stmt in body
if stmt == nil || stmt.Op() != ir.ODELETE {
return false
}
m := n.Right
if !samesafeexpr(stmt.List.First(), m) || !samesafeexpr(stmt.List.Second(), k) {
m := n.Right()
if !samesafeexpr(stmt.List().First(), m) || !samesafeexpr(stmt.List().Second(), k) {
return false
}
// Keys where equality is not reflexive can not be deleted from maps.
if !isreflexive(m.Type.Key()) {
if !isreflexive(m.Type().Key()) {
return false
}
@ -510,7 +510,7 @@ func isMapClear(n *ir.Node) bool {
// mapClear constructs a call to runtime.mapclear for the map m.
func mapClear(m *ir.Node) *ir.Node {
t := m.Type
t := m.Type()
// instantiate mapclear(typ *type, hmap map[any]any)
fn := syslook("mapclear")
@ -543,21 +543,21 @@ func arrayClear(n, v1, v2, a *ir.Node) bool {
return false
}
if n.Nbody.Len() != 1 || n.Nbody.First() == nil {
if n.Body().Len() != 1 || n.Body().First() == nil {
return false
}
stmt := n.Nbody.First() // only stmt in body
if stmt.Op != ir.OAS || stmt.Left.Op != ir.OINDEX {
stmt := n.Body().First() // only stmt in body
if stmt.Op() != ir.OAS || stmt.Left().Op() != ir.OINDEX {
return false
}
if !samesafeexpr(stmt.Left.Left, a) || !samesafeexpr(stmt.Left.Right, v1) {
if !samesafeexpr(stmt.Left().Left(), a) || !samesafeexpr(stmt.Left().Right(), v1) {
return false
}
elemsize := n.Type.Elem().Width
if elemsize <= 0 || !isZero(stmt.Right) {
elemsize := n.Type().Elem().Width
if elemsize <= 0 || !isZero(stmt.Right()) {
return false
}
@ -568,10 +568,10 @@ func arrayClear(n, v1, v2, a *ir.Node) bool {
// memclr{NoHeap,Has}Pointers(hp, hn)
// i = len(a) - 1
// }
n.Op = ir.OIF
n.SetOp(ir.OIF)
n.Nbody.Set(nil)
n.Left = ir.Nod(ir.ONE, ir.Nod(ir.OLEN, a, nil), nodintconst(0))
n.PtrBody().Set(nil)
n.SetLeft(ir.Nod(ir.ONE, ir.Nod(ir.OLEN, a, nil), nodintconst(0)))
// hp = &a[0]
hp := temp(types.Types[types.TUNSAFEPTR])
@ -580,7 +580,7 @@ func arrayClear(n, v1, v2, a *ir.Node) bool {
tmp.SetBounded(true)
tmp = ir.Nod(ir.OADDR, tmp, nil)
tmp = convnop(tmp, types.Types[types.TUNSAFEPTR])
n.Nbody.Append(ir.Nod(ir.OAS, hp, tmp))
n.PtrBody().Append(ir.Nod(ir.OAS, hp, tmp))
// hn = len(a) * sizeof(elem(a))
hn := temp(types.Types[types.TUINTPTR])
@ -588,43 +588,43 @@ func arrayClear(n, v1, v2, a *ir.Node) bool {
tmp = ir.Nod(ir.OLEN, a, nil)
tmp = ir.Nod(ir.OMUL, tmp, nodintconst(elemsize))
tmp = conv(tmp, types.Types[types.TUINTPTR])
n.Nbody.Append(ir.Nod(ir.OAS, hn, tmp))
n.PtrBody().Append(ir.Nod(ir.OAS, hn, tmp))
var fn *ir.Node
if a.Type.Elem().HasPointers() {
if a.Type().Elem().HasPointers() {
// memclrHasPointers(hp, hn)
Curfn.Func.SetWBPos(stmt.Pos)
Curfn.Func().SetWBPos(stmt.Pos())
fn = mkcall("memclrHasPointers", nil, nil, hp, hn)
} else {
// memclrNoHeapPointers(hp, hn)
fn = mkcall("memclrNoHeapPointers", nil, nil, hp, hn)
}
n.Nbody.Append(fn)
n.PtrBody().Append(fn)
// i = len(a) - 1
v1 = ir.Nod(ir.OAS, v1, ir.Nod(ir.OSUB, ir.Nod(ir.OLEN, a, nil), nodintconst(1)))
n.Nbody.Append(v1)
n.PtrBody().Append(v1)
n.Left = typecheck(n.Left, ctxExpr)
n.Left = defaultlit(n.Left, nil)
typecheckslice(n.Nbody.Slice(), ctxStmt)
n.SetLeft(typecheck(n.Left(), ctxExpr))
n.SetLeft(defaultlit(n.Left(), nil))
typecheckslice(n.Body().Slice(), ctxStmt)
n = walkstmt(n)
return true
}
// addptr returns (*T)(uintptr(p) + n).
func addptr(p *ir.Node, n int64) *ir.Node {
t := p.Type
t := p.Type()
p = ir.Nod(ir.OCONVNOP, p, nil)
p.Type = types.Types[types.TUINTPTR]
p.SetType(types.Types[types.TUINTPTR])
p = ir.Nod(ir.OADD, p, nodintconst(n))
p = ir.Nod(ir.OCONVNOP, p, nil)
p.Type = t
p.SetType(t)
return p
}

View file

@ -994,14 +994,14 @@ func typename(t *types.Type) *ir.Node {
s := typenamesym(t)
if s.Def == nil {
n := ir.NewNameAt(src.NoXPos, s)
n.Type = types.Types[types.TUINT8]
n.SetType(types.Types[types.TUINT8])
n.SetClass(ir.PEXTERN)
n.SetTypecheck(1)
s.Def = ir.AsTypesNode(n)
}
n := ir.Nod(ir.OADDR, ir.AsNode(s.Def), nil)
n.Type = types.NewPtr(ir.AsNode(s.Def).Type)
n.SetType(types.NewPtr(ir.AsNode(s.Def).Type()))
n.SetTypecheck(1)
return n
}
@ -1013,7 +1013,7 @@ func itabname(t, itype *types.Type) *ir.Node {
s := itabpkg.Lookup(t.ShortString() + "," + itype.ShortString())
if s.Def == nil {
n := NewName(s)
n.Type = types.Types[types.TUINT8]
n.SetType(types.Types[types.TUINT8])
n.SetClass(ir.PEXTERN)
n.SetTypecheck(1)
s.Def = ir.AsTypesNode(n)
@ -1021,7 +1021,7 @@ func itabname(t, itype *types.Type) *ir.Node {
}
n := ir.Nod(ir.OADDR, ir.AsNode(s.Def), nil)
n.Type = types.NewPtr(ir.AsNode(s.Def).Type)
n.SetType(types.NewPtr(ir.AsNode(s.Def).Type()))
n.SetTypecheck(1)
return n
}
@ -1519,8 +1519,8 @@ func addsignat(t *types.Type) {
func addsignats(dcls []*ir.Node) {
// copy types from dcl list to signatset
for _, n := range dcls {
if n.Op == ir.OTYPE {
addsignat(n.Type)
if n.Op() == ir.OTYPE {
addsignat(n.Type())
}
}
}
@ -1879,13 +1879,13 @@ func zeroaddr(size int64) *ir.Node {
s := mappkg.Lookup("zero")
if s.Def == nil {
x := NewName(s)
x.Type = types.Types[types.TUINT8]
x.SetType(types.Types[types.TUINT8])
x.SetClass(ir.PEXTERN)
x.SetTypecheck(1)
s.Def = ir.AsTypesNode(x)
}
z := ir.Nod(ir.OADDR, ir.AsNode(s.Def), nil)
z.Type = types.NewPtr(types.Types[types.TUINT8])
z.SetType(types.NewPtr(types.Types[types.TUINT8]))
z.SetTypecheck(1)
return z
}

View file

@ -56,7 +56,7 @@ func visitBottomUp(list []*ir.Node, analyze func(list []*ir.Node, recursive bool
v.analyze = analyze
v.nodeID = make(map[*ir.Node]uint32)
for _, n := range list {
if n.Op == ir.ODCLFUNC && !n.Func.IsHiddenClosure() {
if n.Op() == ir.ODCLFUNC && !n.Func().IsHiddenClosure() {
v.visit(n)
}
}
@ -75,46 +75,46 @@ func (v *bottomUpVisitor) visit(n *ir.Node) uint32 {
min := v.visitgen
v.stack = append(v.stack, n)
ir.InspectList(n.Nbody, func(n *ir.Node) bool {
switch n.Op {
ir.InspectList(n.Body(), func(n *ir.Node) bool {
switch n.Op() {
case ir.ONAME:
if n.Class() == ir.PFUNC {
if n != nil && n.Name.Defn != nil {
if m := v.visit(n.Name.Defn); m < min {
if n != nil && n.Name().Defn != nil {
if m := v.visit(n.Name().Defn); m < min {
min = m
}
}
}
case ir.OMETHEXPR:
fn := methodExprName(n)
if fn != nil && fn.Name.Defn != nil {
if m := v.visit(fn.Name.Defn); m < min {
if fn != nil && fn.Name().Defn != nil {
if m := v.visit(fn.Name().Defn); m < min {
min = m
}
}
case ir.ODOTMETH:
fn := methodExprName(n)
if fn != nil && fn.Op == ir.ONAME && fn.Class() == ir.PFUNC && fn.Name.Defn != nil {
if m := v.visit(fn.Name.Defn); m < min {
if fn != nil && fn.Op() == ir.ONAME && fn.Class() == ir.PFUNC && fn.Name().Defn != nil {
if m := v.visit(fn.Name().Defn); m < min {
min = m
}
}
case ir.OCALLPART:
fn := ir.AsNode(callpartMethod(n).Nname)
if fn != nil && fn.Op == ir.ONAME && fn.Class() == ir.PFUNC && fn.Name.Defn != nil {
if m := v.visit(fn.Name.Defn); m < min {
if fn != nil && fn.Op() == ir.ONAME && fn.Class() == ir.PFUNC && fn.Name().Defn != nil {
if m := v.visit(fn.Name().Defn); m < min {
min = m
}
}
case ir.OCLOSURE:
if m := v.visit(n.Func.Decl); m < min {
if m := v.visit(n.Func().Decl); m < min {
min = m
}
}
return true
})
if (min == id || min == id+1) && !n.Func.IsHiddenClosure() {
if (min == id || min == id+1) && !n.Func().IsHiddenClosure() {
// This node is the root of a strongly connected component.
// The original min passed to visitcodelist was v.nodeID[n]+1.

View file

@ -30,13 +30,13 @@ func findScope(marks []ir.Mark, pos src.XPos) ir.ScopeID {
func assembleScopes(fnsym *obj.LSym, fn *ir.Node, dwarfVars []*dwarf.Var, varScopes []ir.ScopeID) []dwarf.Scope {
// Initialize the DWARF scope tree based on lexical scopes.
dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func.Parents))
for i, parent := range fn.Func.Parents {
dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func().Parents))
for i, parent := range fn.Func().Parents {
dwarfScopes[i+1].Parent = int32(parent)
}
scopeVariables(dwarfVars, varScopes, dwarfScopes)
scopePCs(fnsym, fn.Func.Marks, dwarfScopes)
scopePCs(fnsym, fn.Func().Marks, dwarfScopes)
return compactScopes(dwarfScopes)
}

View file

@ -14,36 +14,36 @@ import (
func typecheckselect(sel *ir.Node) {
var def *ir.Node
lno := setlineno(sel)
typecheckslice(sel.Ninit.Slice(), ctxStmt)
for _, ncase := range sel.List.Slice() {
if ncase.Op != ir.OCASE {
typecheckslice(sel.Init().Slice(), ctxStmt)
for _, ncase := range sel.List().Slice() {
if ncase.Op() != ir.OCASE {
setlineno(ncase)
base.Fatalf("typecheckselect %v", ncase.Op)
base.Fatalf("typecheckselect %v", ncase.Op())
}
if ncase.List.Len() == 0 {
if ncase.List().Len() == 0 {
// default
if def != nil {
base.ErrorfAt(ncase.Pos, "multiple defaults in select (first at %v)", ir.Line(def))
base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
} else {
def = ncase
}
} else if ncase.List.Len() > 1 {
base.ErrorfAt(ncase.Pos, "select cases cannot be lists")
} else if ncase.List().Len() > 1 {
base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
} else {
ncase.List.SetFirst(typecheck(ncase.List.First(), ctxStmt))
n := ncase.List.First()
ncase.Left = n
ncase.List.Set(nil)
switch n.Op {
ncase.List().SetFirst(typecheck(ncase.List().First(), ctxStmt))
n := ncase.List().First()
ncase.SetLeft(n)
ncase.PtrList().Set(nil)
switch n.Op() {
default:
pos := n.Pos
if n.Op == ir.ONAME {
pos := n.Pos()
if n.Op() == ir.ONAME {
// We don't have the right position for ONAME nodes (see #15459 and
// others). Using ncase.Pos for now as it will provide the correct
// line number (assuming the expression follows the "case" keyword
// on the same line). This matches the approach before 1.10.
pos = ncase.Pos
pos = ncase.Pos()
}
base.ErrorfAt(pos, "select case must be receive, send or assign recv")
@ -51,41 +51,41 @@ func typecheckselect(sel *ir.Node) {
// remove implicit conversions; the eventual assignment
// will reintroduce them.
case ir.OAS:
if (n.Right.Op == ir.OCONVNOP || n.Right.Op == ir.OCONVIFACE) && n.Right.Implicit() {
n.Right = n.Right.Left
if (n.Right().Op() == ir.OCONVNOP || n.Right().Op() == ir.OCONVIFACE) && n.Right().Implicit() {
n.SetRight(n.Right().Left())
}
if n.Right.Op != ir.ORECV {
base.ErrorfAt(n.Pos, "select assignment must have receive on right hand side")
if n.Right().Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
n.Op = ir.OSELRECV
n.SetOp(ir.OSELRECV)
// convert x, ok = <-c into OSELRECV2(x, <-c) with ntest=ok
case ir.OAS2RECV:
if n.Right.Op != ir.ORECV {
base.ErrorfAt(n.Pos, "select assignment must have receive on right hand side")
if n.Right().Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
n.Op = ir.OSELRECV2
n.Left = n.List.First()
n.List.Set1(n.List.Second())
n.SetOp(ir.OSELRECV2)
n.SetLeft(n.List().First())
n.PtrList().Set1(n.List().Second())
// convert <-c into OSELRECV(N, <-c)
case ir.ORECV:
n = ir.NodAt(n.Pos, ir.OSELRECV, nil, n)
n = ir.NodAt(n.Pos(), ir.OSELRECV, nil, n)
n.SetTypecheck(1)
ncase.Left = n
ncase.SetLeft(n)
case ir.OSEND:
break
}
}
typecheckslice(ncase.Nbody.Slice(), ctxStmt)
typecheckslice(ncase.Body().Slice(), ctxStmt)
}
base.Pos = lno
@ -93,18 +93,18 @@ func typecheckselect(sel *ir.Node) {
func walkselect(sel *ir.Node) {
lno := setlineno(sel)
if sel.Nbody.Len() != 0 {
if sel.Body().Len() != 0 {
base.Fatalf("double walkselect")
}
init := sel.Ninit.Slice()
sel.Ninit.Set(nil)
init := sel.Init().Slice()
sel.PtrInit().Set(nil)
init = append(init, walkselectcases(&sel.List)...)
sel.List.Set(nil)
init = append(init, walkselectcases(sel.PtrList())...)
sel.PtrList().Set(nil)
sel.Nbody.Set(init)
walkstmtlist(sel.Nbody.Slice())
sel.PtrBody().Set(init)
walkstmtlist(sel.Body().Slice())
base.Pos = lno
}
@ -122,38 +122,38 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
if ncas == 1 {
cas := cases.First()
setlineno(cas)
l := cas.Ninit.Slice()
if cas.Left != nil { // not default:
n := cas.Left
l = append(l, n.Ninit.Slice()...)
n.Ninit.Set(nil)
switch n.Op {
l := cas.Init().Slice()
if cas.Left() != nil { // not default:
n := cas.Left()
l = append(l, n.Init().Slice()...)
n.PtrInit().Set(nil)
switch n.Op() {
default:
base.Fatalf("select %v", n.Op)
base.Fatalf("select %v", n.Op())
case ir.OSEND:
// already ok
case ir.OSELRECV, ir.OSELRECV2:
if n.Op == ir.OSELRECV || n.List.Len() == 0 {
if n.Left == nil {
n = n.Right
if n.Op() == ir.OSELRECV || n.List().Len() == 0 {
if n.Left() == nil {
n = n.Right()
} else {
n.Op = ir.OAS
n.SetOp(ir.OAS)
}
break
}
if n.Left == nil {
if n.Left() == nil {
ir.BlankNode = typecheck(ir.BlankNode, ctxExpr|ctxAssign)
n.Left = ir.BlankNode
n.SetLeft(ir.BlankNode)
}
n.Op = ir.OAS2
n.List.Prepend(n.Left)
n.Rlist.Set1(n.Right)
n.Right = nil
n.Left = nil
n.SetOp(ir.OAS2)
n.PtrList().Prepend(n.Left())
n.PtrRlist().Set1(n.Right())
n.SetRight(nil)
n.SetLeft(nil)
n.SetTypecheck(0)
n = typecheck(n, ctxStmt)
}
@ -161,7 +161,7 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
l = append(l, n)
}
l = append(l, cas.Nbody.Slice()...)
l = append(l, cas.Body().Slice()...)
l = append(l, ir.Nod(ir.OBREAK, nil, nil))
return l
}
@ -171,24 +171,24 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
var dflt *ir.Node
for _, cas := range cases.Slice() {
setlineno(cas)
n := cas.Left
n := cas.Left()
if n == nil {
dflt = cas
continue
}
switch n.Op {
switch n.Op() {
case ir.OSEND:
n.Right = ir.Nod(ir.OADDR, n.Right, nil)
n.Right = typecheck(n.Right, ctxExpr)
n.SetRight(ir.Nod(ir.OADDR, n.Right(), nil))
n.SetRight(typecheck(n.Right(), ctxExpr))
case ir.OSELRECV, ir.OSELRECV2:
if n.Op == ir.OSELRECV2 && n.List.Len() == 0 {
n.Op = ir.OSELRECV
if n.Op() == ir.OSELRECV2 && n.List().Len() == 0 {
n.SetOp(ir.OSELRECV)
}
if n.Left != nil {
n.Left = ir.Nod(ir.OADDR, n.Left, nil)
n.Left = typecheck(n.Left, ctxExpr)
if n.Left() != nil {
n.SetLeft(ir.Nod(ir.OADDR, n.Left(), nil))
n.SetLeft(typecheck(n.Left(), ctxExpr))
}
}
}
@ -200,43 +200,43 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
cas = cases.Second()
}
n := cas.Left
n := cas.Left()
setlineno(n)
r := ir.Nod(ir.OIF, nil, nil)
r.Ninit.Set(cas.Ninit.Slice())
switch n.Op {
r.PtrInit().Set(cas.Init().Slice())
switch n.Op() {
default:
base.Fatalf("select %v", n.Op)
base.Fatalf("select %v", n.Op())
case ir.OSEND:
// if selectnbsend(c, v) { body } else { default body }
ch := n.Left
r.Left = mkcall1(chanfn("selectnbsend", 2, ch.Type), types.Types[types.TBOOL], &r.Ninit, ch, n.Right)
ch := n.Left()
r.SetLeft(mkcall1(chanfn("selectnbsend", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), ch, n.Right()))
case ir.OSELRECV:
// if selectnbrecv(&v, c) { body } else { default body }
ch := n.Right.Left
elem := n.Left
ch := n.Right().Left()
elem := n.Left()
if elem == nil {
elem = nodnil()
}
r.Left = mkcall1(chanfn("selectnbrecv", 2, ch.Type), types.Types[types.TBOOL], &r.Ninit, elem, ch)
r.SetLeft(mkcall1(chanfn("selectnbrecv", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, ch))
case ir.OSELRECV2:
// if selectnbrecv2(&v, &received, c) { body } else { default body }
ch := n.Right.Left
elem := n.Left
ch := n.Right().Left()
elem := n.Left()
if elem == nil {
elem = nodnil()
}
receivedp := ir.Nod(ir.OADDR, n.List.First(), nil)
receivedp := ir.Nod(ir.OADDR, n.List().First(), nil)
receivedp = typecheck(receivedp, ctxExpr)
r.Left = mkcall1(chanfn("selectnbrecv2", 2, ch.Type), types.Types[types.TBOOL], &r.Ninit, elem, receivedp, ch)
r.SetLeft(mkcall1(chanfn("selectnbrecv2", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, receivedp, ch))
}
r.Left = typecheck(r.Left, ctxExpr)
r.Nbody.Set(cas.Nbody.Slice())
r.Rlist.Set(append(dflt.Ninit.Slice(), dflt.Nbody.Slice()...))
r.SetLeft(typecheck(r.Left(), ctxExpr))
r.PtrBody().Set(cas.Body().Slice())
r.PtrRlist().Set(append(dflt.Init().Slice(), dflt.Body().Slice()...))
return []*ir.Node{r, ir.Nod(ir.OBREAK, nil, nil)}
}
@ -270,29 +270,29 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
for _, cas := range cases.Slice() {
setlineno(cas)
init = append(init, cas.Ninit.Slice()...)
cas.Ninit.Set(nil)
init = append(init, cas.Init().Slice()...)
cas.PtrInit().Set(nil)
n := cas.Left
n := cas.Left()
if n == nil { // default:
continue
}
var i int
var c, elem *ir.Node
switch n.Op {
switch n.Op() {
default:
base.Fatalf("select %v", n.Op)
base.Fatalf("select %v", n.Op())
case ir.OSEND:
i = nsends
nsends++
c = n.Left
elem = n.Right
c = n.Left()
elem = n.Right()
case ir.OSELRECV, ir.OSELRECV2:
nrecvs++
i = ncas - nrecvs
c = n.Right.Left
elem = n.Left
c = n.Right().Left()
elem = n.Left()
}
casorder[i] = cas
@ -326,9 +326,9 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
chosen := temp(types.Types[types.TINT])
recvOK := temp(types.Types[types.TBOOL])
r = ir.Nod(ir.OAS2, nil, nil)
r.List.Set2(chosen, recvOK)
r.PtrList().Set2(chosen, recvOK)
fn := syslook("selectgo")
r.Rlist.Set1(mkcall1(fn, fn.Type.Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
r.PtrRlist().Set1(mkcall1(fn, fn.Type().Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
r = typecheck(r, ctxStmt)
init = append(init, r)
@ -346,14 +346,14 @@ func walkselectcases(cases *ir.Nodes) []*ir.Node {
r := ir.Nod(ir.OIF, cond, nil)
if n := cas.Left; n != nil && n.Op == ir.OSELRECV2 {
x := ir.Nod(ir.OAS, n.List.First(), recvOK)
if n := cas.Left(); n != nil && n.Op() == ir.OSELRECV2 {
x := ir.Nod(ir.OAS, n.List().First(), recvOK)
x = typecheck(x, ctxStmt)
r.Nbody.Append(x)
r.PtrBody().Append(x)
}
r.Nbody.AppendNodes(&cas.Nbody)
r.Nbody.Append(ir.Nod(ir.OBREAK, nil, nil))
r.PtrBody().AppendNodes(cas.PtrBody())
r.PtrBody().Append(ir.Nod(ir.OBREAK, nil, nil))
init = append(init, r)
}

View file

@ -57,54 +57,54 @@ func (s *InitSchedule) tryStaticInit(n *ir.Node) bool {
// replaced by multiple simple OAS assignments, and the other
// OAS2* assignments mostly necessitate dynamic execution
// anyway.
if n.Op != ir.OAS {
if n.Op() != ir.OAS {
return false
}
if ir.IsBlank(n.Left) && candiscard(n.Right) {
if ir.IsBlank(n.Left()) && candiscard(n.Right()) {
return true
}
lno := setlineno(n)
defer func() { base.Pos = lno }()
return s.staticassign(n.Left, n.Right)
return s.staticassign(n.Left(), n.Right())
}
// like staticassign but we are copying an already
// initialized value r.
func (s *InitSchedule) staticcopy(l *ir.Node, r *ir.Node) bool {
if r.Op != ir.ONAME && r.Op != ir.OMETHEXPR {
if r.Op() != ir.ONAME && r.Op() != ir.OMETHEXPR {
return false
}
if r.Class() == ir.PFUNC {
pfuncsym(l, r)
return true
}
if r.Class() != ir.PEXTERN || r.Sym.Pkg != ir.LocalPkg {
if r.Class() != ir.PEXTERN || r.Sym().Pkg != ir.LocalPkg {
return false
}
if r.Name.Defn == nil { // probably zeroed but perhaps supplied externally and of unknown value
if r.Name().Defn == nil { // probably zeroed but perhaps supplied externally and of unknown value
return false
}
if r.Name.Defn.Op != ir.OAS {
if r.Name().Defn.Op() != ir.OAS {
return false
}
if r.Type.IsString() { // perhaps overwritten by cmd/link -X (#34675)
if r.Type().IsString() { // perhaps overwritten by cmd/link -X (#34675)
return false
}
orig := r
r = r.Name.Defn.Right
r = r.Name().Defn.Right()
for r.Op == ir.OCONVNOP && !types.Identical(r.Type, l.Type) {
r = r.Left
for r.Op() == ir.OCONVNOP && !types.Identical(r.Type(), l.Type()) {
r = r.Left()
}
switch r.Op {
switch r.Op() {
case ir.ONAME, ir.OMETHEXPR:
if s.staticcopy(l, r) {
return true
}
// We may have skipped past one or more OCONVNOPs, so
// use conv to ensure r is assignable to l (#13263).
s.append(ir.Nod(ir.OAS, l, conv(r, l.Type)))
s.append(ir.Nod(ir.OAS, l, conv(r, l.Type())))
return true
case ir.ONIL:
@ -114,17 +114,17 @@ func (s *InitSchedule) staticcopy(l *ir.Node, r *ir.Node) bool {
if isZero(r) {
return true
}
litsym(l, r, int(l.Type.Width))
litsym(l, r, int(l.Type().Width))
return true
case ir.OADDR:
if a := r.Left; a.Op == ir.ONAME {
if a := r.Left(); a.Op() == ir.ONAME {
addrsym(l, a)
return true
}
case ir.OPTRLIT:
switch r.Left.Op {
switch r.Left().Op() {
case ir.OARRAYLIT, ir.OSLICELIT, ir.OSTRUCTLIT, ir.OMAPLIT:
// copy pointer
addrsym(l, s.inittemps[r])
@ -134,7 +134,7 @@ func (s *InitSchedule) staticcopy(l *ir.Node, r *ir.Node) bool {
case ir.OSLICELIT:
// copy slice
a := s.inittemps[r]
slicesym(l, a, r.Right.Int64Val())
slicesym(l, a, r.Right().Int64Val())
return true
case ir.OARRAYLIT, ir.OSTRUCTLIT:
@ -143,10 +143,10 @@ func (s *InitSchedule) staticcopy(l *ir.Node, r *ir.Node) bool {
n := ir.Copy(l)
for i := range p.E {
e := &p.E[i]
n.Xoffset = l.Xoffset + e.Xoffset
n.Type = e.Expr.Type
if e.Expr.Op == ir.OLITERAL || e.Expr.Op == ir.ONIL {
litsym(n, e.Expr, int(n.Type.Width))
n.SetOffset(l.Offset() + e.Xoffset)
n.SetType(e.Expr.Type())
if e.Expr.Op() == ir.OLITERAL || e.Expr.Op() == ir.ONIL {
litsym(n, e.Expr, int(n.Type().Width))
continue
}
ll := ir.SepCopy(n)
@ -156,8 +156,8 @@ func (s *InitSchedule) staticcopy(l *ir.Node, r *ir.Node) bool {
// Requires computation, but we're
// copying someone else's computation.
rr := ir.SepCopy(orig)
rr.Type = ll.Type
rr.Xoffset = rr.Xoffset + e.Xoffset
rr.SetType(ll.Type())
rr.SetOffset(rr.Offset() + e.Xoffset)
setlineno(rr)
s.append(ir.Nod(ir.OAS, ll, rr))
}
@ -169,11 +169,11 @@ func (s *InitSchedule) staticcopy(l *ir.Node, r *ir.Node) bool {
}
func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool {
for r.Op == ir.OCONVNOP {
r = r.Left
for r.Op() == ir.OCONVNOP {
r = r.Left()
}
switch r.Op {
switch r.Op() {
case ir.ONAME, ir.OMETHEXPR:
return s.staticcopy(l, r)
@ -184,36 +184,36 @@ func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool {
if isZero(r) {
return true
}
litsym(l, r, int(l.Type.Width))
litsym(l, r, int(l.Type().Width))
return true
case ir.OADDR:
if nam := stataddr(r.Left); nam != nil {
if nam := stataddr(r.Left()); nam != nil {
addrsym(l, nam)
return true
}
fallthrough
case ir.OPTRLIT:
switch r.Left.Op {
switch r.Left().Op() {
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT:
// Init pointer.
a := staticname(r.Left.Type)
a := staticname(r.Left().Type())
s.inittemps[r] = a
addrsym(l, a)
// Init underlying literal.
if !s.staticassign(a, r.Left) {
s.append(ir.Nod(ir.OAS, a, r.Left))
if !s.staticassign(a, r.Left()) {
s.append(ir.Nod(ir.OAS, a, r.Left()))
}
return true
}
//dump("not static ptrlit", r);
case ir.OSTR2BYTES:
if l.Class() == ir.PEXTERN && r.Left.Op == ir.OLITERAL {
sval := r.Left.StringVal()
if l.Class() == ir.PEXTERN && r.Left().Op() == ir.OLITERAL {
sval := r.Left().StringVal()
slicebytes(l, sval)
return true
}
@ -221,8 +221,8 @@ func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool {
case ir.OSLICELIT:
s.initplan(r)
// Init slice.
bound := r.Right.Int64Val()
ta := types.NewArray(r.Type.Elem(), bound)
bound := r.Right().Int64Val()
ta := types.NewArray(r.Type().Elem(), bound)
ta.SetNoalg(true)
a := staticname(ta)
s.inittemps[r] = a
@ -238,10 +238,10 @@ func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool {
n := ir.Copy(l)
for i := range p.E {
e := &p.E[i]
n.Xoffset = l.Xoffset + e.Xoffset
n.Type = e.Expr.Type
if e.Expr.Op == ir.OLITERAL || e.Expr.Op == ir.ONIL {
litsym(n, e.Expr, int(n.Type.Width))
n.SetOffset(l.Offset() + e.Xoffset)
n.SetType(e.Expr.Type())
if e.Expr.Op() == ir.OLITERAL || e.Expr.Op() == ir.ONIL {
litsym(n, e.Expr, int(n.Type().Width))
continue
}
setlineno(e.Expr)
@ -259,11 +259,11 @@ func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool {
case ir.OCLOSURE:
if hasemptycvars(r) {
if base.Debug.Closure > 0 {
base.WarnfAt(r.Pos, "closure converted to global")
base.WarnfAt(r.Pos(), "closure converted to global")
}
// Closures with no captured variables are globals,
// so the assignment can be done at link time.
pfuncsym(l, r.Func.Nname)
pfuncsym(l, r.Func().Nname)
return true
}
closuredebugruntimecheck(r)
@ -274,43 +274,43 @@ func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool {
// Determine the underlying concrete type and value we are converting from.
val := r
for val.Op == ir.OCONVIFACE {
val = val.Left
for val.Op() == ir.OCONVIFACE {
val = val.Left()
}
if val.Type.IsInterface() {
if val.Type().IsInterface() {
// val is an interface type.
// If val is nil, we can statically initialize l;
// both words are zero and so there no work to do, so report success.
// If val is non-nil, we have no concrete type to record,
// and we won't be able to statically initialize its value, so report failure.
return val.Op == ir.ONIL
return val.Op() == ir.ONIL
}
markTypeUsedInInterface(val.Type, l.Sym.Linksym())
markTypeUsedInInterface(val.Type(), l.Sym().Linksym())
var itab *ir.Node
if l.Type.IsEmptyInterface() {
itab = typename(val.Type)
if l.Type().IsEmptyInterface() {
itab = typename(val.Type())
} else {
itab = itabname(val.Type, l.Type)
itab = itabname(val.Type(), l.Type())
}
// Create a copy of l to modify while we emit data.
n := ir.Copy(l)
// Emit itab, advance offset.
addrsym(n, itab.Left) // itab is an OADDR node
n.Xoffset = n.Xoffset + int64(Widthptr)
addrsym(n, itab.Left()) // itab is an OADDR node
n.SetOffset(n.Offset() + int64(Widthptr))
// Emit data.
if isdirectiface(val.Type) {
if val.Op == ir.ONIL {
if isdirectiface(val.Type()) {
if val.Op() == ir.ONIL {
// Nil is zero, nothing to do.
return true
}
// Copy val directly into n.
n.Type = val.Type
n.SetType(val.Type())
setlineno(val)
a := ir.SepCopy(n)
if !s.staticassign(a, val) {
@ -318,7 +318,7 @@ func (s *InitSchedule) staticassign(l *ir.Node, r *ir.Node) bool {
}
} else {
// Construct temp to hold val, write pointer to temp into n.
a := staticname(val.Type)
a := staticname(val.Type())
s.inittemps[val] = a
if !s.staticassign(a, val) {
s.append(ir.Nod(ir.OAS, a, val))
@ -372,7 +372,7 @@ func staticname(t *types.Type) *ir.Node {
n := NewName(lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen)))
statuniqgen++
addvar(n, t, ir.PEXTERN)
n.Sym.Linksym().Set(obj.AttrLocal, true)
n.Sym().Linksym().Set(obj.AttrLocal, true)
return n
}
@ -380,12 +380,12 @@ func staticname(t *types.Type) *ir.Node {
func readonlystaticname(t *types.Type) *ir.Node {
n := staticname(t)
n.MarkReadonly()
n.Sym.Linksym().Set(obj.AttrContentAddressable, true)
n.Sym().Linksym().Set(obj.AttrContentAddressable, true)
return n
}
func isSimpleName(n *ir.Node) bool {
return (n.Op == ir.ONAME || n.Op == ir.OMETHEXPR) && n.Class() != ir.PAUTOHEAP && n.Class() != ir.PEXTERN
return (n.Op() == ir.ONAME || n.Op() == ir.OMETHEXPR) && n.Class() != ir.PAUTOHEAP && n.Class() != ir.PEXTERN
}
func litas(l *ir.Node, r *ir.Node, init *ir.Nodes) {
@ -406,7 +406,7 @@ const (
// getdyn calculates the initGenType for n.
// If top is false, getdyn is recursing.
func getdyn(n *ir.Node, top bool) initGenType {
switch n.Op {
switch n.Op() {
default:
if isGoConst(n) {
return initConst
@ -417,7 +417,7 @@ func getdyn(n *ir.Node, top bool) initGenType {
if !top {
return initDynamic
}
if n.Right.Int64Val()/4 > int64(n.List.Len()) {
if n.Right().Int64Val()/4 > int64(n.List().Len()) {
// <25% of entries have explicit values.
// Very rough estimation, it takes 4 bytes of instructions
// to initialize 1 byte of result. So don't use a static
@ -431,12 +431,12 @@ func getdyn(n *ir.Node, top bool) initGenType {
}
var mode initGenType
for _, n1 := range n.List.Slice() {
switch n1.Op {
for _, n1 := range n.List().Slice() {
switch n1.Op() {
case ir.OKEY:
n1 = n1.Right
n1 = n1.Right()
case ir.OSTRUCTKEY:
n1 = n1.Left
n1 = n1.Left()
}
mode |= getdyn(n1, false)
if mode == initDynamic|initConst {
@ -448,13 +448,13 @@ func getdyn(n *ir.Node, top bool) initGenType {
// isStaticCompositeLiteral reports whether n is a compile-time constant.
func isStaticCompositeLiteral(n *ir.Node) bool {
switch n.Op {
switch n.Op() {
case ir.OSLICELIT:
return false
case ir.OARRAYLIT:
for _, r := range n.List.Slice() {
if r.Op == ir.OKEY {
r = r.Right
for _, r := range n.List().Slice() {
if r.Op() == ir.OKEY {
r = r.Right()
}
if !isStaticCompositeLiteral(r) {
return false
@ -462,11 +462,11 @@ func isStaticCompositeLiteral(n *ir.Node) bool {
}
return true
case ir.OSTRUCTLIT:
for _, r := range n.List.Slice() {
if r.Op != ir.OSTRUCTKEY {
for _, r := range n.List().Slice() {
if r.Op() != ir.OSTRUCTKEY {
base.Fatalf("isStaticCompositeLiteral: rhs not OSTRUCTKEY: %v", r)
}
if !isStaticCompositeLiteral(r.Left) {
if !isStaticCompositeLiteral(r.Left()) {
return false
}
}
@ -476,13 +476,13 @@ func isStaticCompositeLiteral(n *ir.Node) bool {
case ir.OCONVIFACE:
// See staticassign's OCONVIFACE case for comments.
val := n
for val.Op == ir.OCONVIFACE {
val = val.Left
for val.Op() == ir.OCONVIFACE {
val = val.Left()
}
if val.Type.IsInterface() {
return val.Op == ir.ONIL
if val.Type().IsInterface() {
return val.Op() == ir.ONIL
}
if isdirectiface(val.Type) && val.Op == ir.ONIL {
if isdirectiface(val.Type()) && val.Op() == ir.ONIL {
return true
}
return isStaticCompositeLiteral(val)
@ -512,16 +512,16 @@ const (
func fixedlit(ctxt initContext, kind initKind, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
isBlank := var_ == ir.BlankNode
var splitnode func(*ir.Node) (a *ir.Node, value *ir.Node)
switch n.Op {
switch n.Op() {
case ir.OARRAYLIT, ir.OSLICELIT:
var k int64
splitnode = func(r *ir.Node) (*ir.Node, *ir.Node) {
if r.Op == ir.OKEY {
k = indexconst(r.Left)
if r.Op() == ir.OKEY {
k = indexconst(r.Left())
if k < 0 {
base.Fatalf("fixedlit: invalid index %v", r.Left)
base.Fatalf("fixedlit: invalid index %v", r.Left())
}
r = r.Right
r = r.Right()
}
a := ir.Nod(ir.OINDEX, var_, nodintconst(k))
k++
@ -532,26 +532,26 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.Node, var_ *ir.Node, init *
}
case ir.OSTRUCTLIT:
splitnode = func(r *ir.Node) (*ir.Node, *ir.Node) {
if r.Op != ir.OSTRUCTKEY {
if r.Op() != ir.OSTRUCTKEY {
base.Fatalf("fixedlit: rhs not OSTRUCTKEY: %v", r)
}
if r.Sym.IsBlank() || isBlank {
return ir.BlankNode, r.Left
if r.Sym().IsBlank() || isBlank {
return ir.BlankNode, r.Left()
}
setlineno(r)
return nodSym(ir.ODOT, var_, r.Sym), r.Left
return nodSym(ir.ODOT, var_, r.Sym()), r.Left()
}
default:
base.Fatalf("fixedlit bad op: %v", n.Op)
base.Fatalf("fixedlit bad op: %v", n.Op())
}
for _, r := range n.List.Slice() {
for _, r := range n.List().Slice() {
a, value := splitnode(r)
if a == ir.BlankNode && candiscard(value) {
continue
}
switch value.Op {
switch value.Op() {
case ir.OSLICELIT:
if (kind == initKindStatic && ctxt == inNonInitFunction) || (kind == initKindDynamic && ctxt == inInitFunction) {
slicelit(ctxt, value, a, init)
@ -587,18 +587,18 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.Node, var_ *ir.Node, init *
}
func isSmallSliceLit(n *ir.Node) bool {
if n.Op != ir.OSLICELIT {
if n.Op() != ir.OSLICELIT {
return false
}
r := n.Right
r := n.Right()
return smallintconst(r) && (n.Type.Elem().Width == 0 || r.Int64Val() <= smallArrayBytes/n.Type.Elem().Width)
return smallintconst(r) && (n.Type().Elem().Width == 0 || r.Int64Val() <= smallArrayBytes/n.Type().Elem().Width)
}
func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
// make an array type corresponding the number of elements we have
t := types.NewArray(n.Type.Elem(), n.Right.Int64Val())
t := types.NewArray(n.Type().Elem(), n.Right().Int64Val())
dowidth(t)
if ctxt == inNonInitFunction {
@ -658,7 +658,7 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
var a *ir.Node
if x := prealloc[n]; x != nil {
// temp allocated during order.go for dddarg
if !types.Identical(t, x.Type) {
if !types.Identical(t, x.Type()) {
panic("dotdotdot base type does not match order's assigned type")
}
@ -673,13 +673,13 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
}
a = ir.Nod(ir.OADDR, x, nil)
} else if n.Esc == EscNone {
} else if n.Esc() == EscNone {
a = temp(t)
if vstat == nil {
a = ir.Nod(ir.OAS, temp(t), nil)
a = typecheck(a, ctxStmt)
init.Append(a) // zero new temp
a = a.Left
a = a.Left()
} else {
init.Append(ir.Nod(ir.OVARDEF, a, nil))
}
@ -687,7 +687,7 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
a = ir.Nod(ir.OADDR, a, nil)
} else {
a = ir.Nod(ir.ONEW, nil, nil)
a.List.Set1(typenod(t))
a.PtrList().Set1(typenod(t))
}
a = ir.Nod(ir.OAS, vauto, a)
@ -707,13 +707,13 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
// put dynamics into array (5)
var index int64
for _, value := range n.List.Slice() {
if value.Op == ir.OKEY {
index = indexconst(value.Left)
for _, value := range n.List().Slice() {
if value.Op() == ir.OKEY {
index = indexconst(value.Left())
if index < 0 {
base.Fatalf("slicelit: invalid index %v", value.Left)
base.Fatalf("slicelit: invalid index %v", value.Left())
}
value = value.Right
value = value.Right()
}
a := ir.Nod(ir.OINDEX, vauto, nodintconst(index))
a.SetBounded(true)
@ -721,7 +721,7 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
// TODO need to check bounds?
switch value.Op {
switch value.Op() {
case ir.OSLICELIT:
break
@ -762,16 +762,16 @@ func slicelit(ctxt initContext, n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
func maplit(n *ir.Node, m *ir.Node, init *ir.Nodes) {
// make the map var
a := ir.Nod(ir.OMAKE, nil, nil)
a.Esc = n.Esc
a.List.Set2(typenod(n.Type), nodintconst(int64(n.List.Len())))
a.SetEsc(n.Esc())
a.PtrList().Set2(typenod(n.Type()), nodintconst(int64(n.List().Len())))
litas(m, a, init)
entries := n.List.Slice()
entries := n.List().Slice()
// The order pass already removed any dynamic (runtime-computed) entries.
// All remaining entries are static. Double-check that.
for _, r := range entries {
if !isStaticCompositeLiteral(r.Left) || !isStaticCompositeLiteral(r.Right) {
if !isStaticCompositeLiteral(r.Left()) || !isStaticCompositeLiteral(r.Right()) {
base.Fatalf("maplit: entry is not a literal: %v", r)
}
}
@ -780,8 +780,8 @@ func maplit(n *ir.Node, m *ir.Node, init *ir.Nodes) {
// For a large number of entries, put them in an array and loop.
// build types [count]Tindex and [count]Tvalue
tk := types.NewArray(n.Type.Key(), int64(len(entries)))
te := types.NewArray(n.Type.Elem(), int64(len(entries)))
tk := types.NewArray(n.Type().Key(), int64(len(entries)))
te := types.NewArray(n.Type().Elem(), int64(len(entries)))
tk.SetNoalg(true)
te.SetNoalg(true)
@ -796,8 +796,8 @@ func maplit(n *ir.Node, m *ir.Node, init *ir.Nodes) {
datak := ir.Nod(ir.OARRAYLIT, nil, nil)
datae := ir.Nod(ir.OARRAYLIT, nil, nil)
for _, r := range entries {
datak.List.Append(r.Left)
datae.List.Append(r.Right)
datak.PtrList().Append(r.Left())
datae.PtrList().Append(r.Right())
}
fixedlit(inInitFunction, initKindStatic, datak, vstatk, init)
fixedlit(inInitFunction, initKindStatic, datae, vstate, init)
@ -820,8 +820,8 @@ func maplit(n *ir.Node, m *ir.Node, init *ir.Nodes) {
body := ir.Nod(ir.OAS, lhs, rhs)
loop := ir.Nod(ir.OFOR, cond, incr)
loop.Nbody.Set1(body)
loop.Ninit.Set1(zero)
loop.PtrBody().Set1(body)
loop.PtrInit().Set1(zero)
loop = typecheck(loop, ctxStmt)
loop = walkstmt(loop)
@ -833,11 +833,11 @@ func maplit(n *ir.Node, m *ir.Node, init *ir.Nodes) {
// Build list of var[c] = expr.
// Use temporaries so that mapassign1 can have addressable key, elem.
// TODO(josharian): avoid map key temporaries for mapfast_* assignments with literal keys.
tmpkey := temp(m.Type.Key())
tmpelem := temp(m.Type.Elem())
tmpkey := temp(m.Type().Key())
tmpelem := temp(m.Type().Elem())
for _, r := range entries {
index, elem := r.Left, r.Right
index, elem := r.Left(), r.Right()
setlineno(index)
a := ir.Nod(ir.OAS, tmpkey, index)
@ -867,10 +867,10 @@ func maplit(n *ir.Node, m *ir.Node, init *ir.Nodes) {
}
func anylit(n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
t := n.Type
switch n.Op {
t := n.Type()
switch n.Op() {
default:
base.Fatalf("anylit: not lit, op=%v node=%v", n.Op, n)
base.Fatalf("anylit: not lit, op=%v node=%v", n.Op(), n)
case ir.ONAME, ir.OMETHEXPR:
a := ir.Nod(ir.OAS, var_, n)
@ -883,16 +883,16 @@ func anylit(n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
}
var r *ir.Node
if n.Right != nil {
if n.Right() != nil {
// n.Right is stack temporary used as backing store.
init.Append(ir.Nod(ir.OAS, n.Right, nil)) // zero backing store, just in case (#18410)
r = ir.Nod(ir.OADDR, n.Right, nil)
init.Append(ir.Nod(ir.OAS, n.Right(), nil)) // zero backing store, just in case (#18410)
r = ir.Nod(ir.OADDR, n.Right(), nil)
r = typecheck(r, ctxExpr)
} else {
r = ir.Nod(ir.ONEW, nil, nil)
r.SetTypecheck(1)
r.Type = t
r.Esc = n.Esc
r.SetType(t)
r.SetEsc(n.Esc())
}
r = walkexpr(r, init)
@ -903,19 +903,19 @@ func anylit(n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
var_ = ir.Nod(ir.ODEREF, var_, nil)
var_ = typecheck(var_, ctxExpr|ctxAssign)
anylit(n.Left, var_, init)
anylit(n.Left(), var_, init)
case ir.OSTRUCTLIT, ir.OARRAYLIT:
if !t.IsStruct() && !t.IsArray() {
base.Fatalf("anylit: not struct/array")
}
if isSimpleName(var_) && n.List.Len() > 4 {
if isSimpleName(var_) && n.List().Len() > 4 {
// lay out static data
vstat := readonlystaticname(t)
ctxt := inInitFunction
if n.Op == ir.OARRAYLIT {
if n.Op() == ir.OARRAYLIT {
ctxt = inNonInitFunction
}
fixedlit(ctxt, initKindStatic, n, vstat, init)
@ -933,13 +933,13 @@ func anylit(n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
}
var components int64
if n.Op == ir.OARRAYLIT {
if n.Op() == ir.OARRAYLIT {
components = t.NumElem()
} else {
components = int64(t.NumFields())
}
// initialization of an array or struct with unspecified components (missing fields or arrays)
if isSimpleName(var_) || int64(n.List.Len()) < components {
if isSimpleName(var_) || int64(n.List().Len()) < components {
a := ir.Nod(ir.OAS, var_, nil)
a = typecheck(a, ctxStmt)
a = walkexpr(a, init)
@ -960,38 +960,38 @@ func anylit(n *ir.Node, var_ *ir.Node, init *ir.Nodes) {
}
func oaslit(n *ir.Node, init *ir.Nodes) bool {
if n.Left == nil || n.Right == nil {
if n.Left() == nil || n.Right() == nil {
// not a special composite literal assignment
return false
}
if n.Left.Type == nil || n.Right.Type == nil {
if n.Left().Type() == nil || n.Right().Type() == nil {
// not a special composite literal assignment
return false
}
if !isSimpleName(n.Left) {
if !isSimpleName(n.Left()) {
// not a special composite literal assignment
return false
}
if !types.Identical(n.Left.Type, n.Right.Type) {
if !types.Identical(n.Left().Type(), n.Right().Type()) {
// not a special composite literal assignment
return false
}
switch n.Right.Op {
switch n.Right().Op() {
default:
// not a special composite literal assignment
return false
case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
if vmatch1(n.Left, n.Right) {
if vmatch1(n.Left(), n.Right()) {
// not a special composite literal assignment
return false
}
anylit(n.Right, n.Left, init)
anylit(n.Right(), n.Left(), init)
}
n.Op = ir.OEMPTY
n.Right = nil
n.SetOp(ir.OEMPTY)
n.SetRight(nil)
return true
}
@ -1008,38 +1008,38 @@ func stataddr(n *ir.Node) *ir.Node {
return nil
}
switch n.Op {
switch n.Op() {
case ir.ONAME, ir.OMETHEXPR:
return ir.SepCopy(n)
case ir.ODOT:
nam := stataddr(n.Left)
nam := stataddr(n.Left())
if nam == nil {
break
}
nam.Xoffset = nam.Xoffset + n.Xoffset
nam.Type = n.Type
nam.SetOffset(nam.Offset() + n.Offset())
nam.SetType(n.Type())
return nam
case ir.OINDEX:
if n.Left.Type.IsSlice() {
if n.Left().Type().IsSlice() {
break
}
nam := stataddr(n.Left)
nam := stataddr(n.Left())
if nam == nil {
break
}
l := getlit(n.Right)
l := getlit(n.Right())
if l < 0 {
break
}
// Check for overflow.
if n.Type.Width != 0 && thearch.MAXWIDTH/n.Type.Width <= int64(l) {
if n.Type().Width != 0 && thearch.MAXWIDTH/n.Type().Width <= int64(l) {
break
}
nam.Xoffset = nam.Xoffset + int64(l)*n.Type.Width
nam.Type = n.Type
nam.SetOffset(nam.Offset() + int64(l)*n.Type().Width)
nam.SetType(n.Type())
return nam
}
@ -1052,41 +1052,41 @@ func (s *InitSchedule) initplan(n *ir.Node) {
}
p := new(InitPlan)
s.initplans[n] = p
switch n.Op {
switch n.Op() {
default:
base.Fatalf("initplan")
case ir.OARRAYLIT, ir.OSLICELIT:
var k int64
for _, a := range n.List.Slice() {
if a.Op == ir.OKEY {
k = indexconst(a.Left)
for _, a := range n.List().Slice() {
if a.Op() == ir.OKEY {
k = indexconst(a.Left())
if k < 0 {
base.Fatalf("initplan arraylit: invalid index %v", a.Left)
base.Fatalf("initplan arraylit: invalid index %v", a.Left())
}
a = a.Right
a = a.Right()
}
s.addvalue(p, k*n.Type.Elem().Width, a)
s.addvalue(p, k*n.Type().Elem().Width, a)
k++
}
case ir.OSTRUCTLIT:
for _, a := range n.List.Slice() {
if a.Op != ir.OSTRUCTKEY {
for _, a := range n.List().Slice() {
if a.Op() != ir.OSTRUCTKEY {
base.Fatalf("initplan structlit")
}
if a.Sym.IsBlank() {
if a.Sym().IsBlank() {
continue
}
s.addvalue(p, a.Xoffset, a.Left)
s.addvalue(p, a.Offset(), a.Left())
}
case ir.OMAPLIT:
for _, a := range n.List.Slice() {
if a.Op != ir.OKEY {
for _, a := range n.List().Slice() {
if a.Op() != ir.OKEY {
base.Fatalf("initplan maplit")
}
s.addvalue(p, -1, a.Right)
s.addvalue(p, -1, a.Right())
}
}
}
@ -1114,7 +1114,7 @@ func (s *InitSchedule) addvalue(p *InitPlan, xoffset int64, n *ir.Node) {
}
func isZero(n *ir.Node) bool {
switch n.Op {
switch n.Op() {
case ir.ONIL:
return true
@ -1129,9 +1129,9 @@ func isZero(n *ir.Node) bool {
}
case ir.OARRAYLIT:
for _, n1 := range n.List.Slice() {
if n1.Op == ir.OKEY {
n1 = n1.Right
for _, n1 := range n.List().Slice() {
if n1.Op() == ir.OKEY {
n1 = n1.Right()
}
if !isZero(n1) {
return false
@ -1140,8 +1140,8 @@ func isZero(n *ir.Node) bool {
return true
case ir.OSTRUCTLIT:
for _, n1 := range n.List.Slice() {
if !isZero(n1.Left) {
for _, n1 := range n.List().Slice() {
if !isZero(n1.Left()) {
return false
}
}
@ -1152,25 +1152,25 @@ func isZero(n *ir.Node) bool {
}
func isvaluelit(n *ir.Node) bool {
return n.Op == ir.OARRAYLIT || n.Op == ir.OSTRUCTLIT
return n.Op() == ir.OARRAYLIT || n.Op() == ir.OSTRUCTLIT
}
func genAsStatic(as *ir.Node) {
if as.Left.Type == nil {
if as.Left().Type() == nil {
base.Fatalf("genAsStatic as.Left not typechecked")
}
nam := stataddr(as.Left)
if nam == nil || (nam.Class() != ir.PEXTERN && as.Left != ir.BlankNode) {
base.Fatalf("genAsStatic: lhs %v", as.Left)
nam := stataddr(as.Left())
if nam == nil || (nam.Class() != ir.PEXTERN && as.Left() != ir.BlankNode) {
base.Fatalf("genAsStatic: lhs %v", as.Left())
}
switch {
case as.Right.Op == ir.OLITERAL:
litsym(nam, as.Right, int(as.Right.Type.Width))
case (as.Right.Op == ir.ONAME || as.Right.Op == ir.OMETHEXPR) && as.Right.Class() == ir.PFUNC:
pfuncsym(nam, as.Right)
case as.Right().Op() == ir.OLITERAL:
litsym(nam, as.Right(), int(as.Right().Type().Width))
case (as.Right().Op() == ir.ONAME || as.Right().Op() == ir.OMETHEXPR) && as.Right().Class() == ir.PFUNC:
pfuncsym(nam, as.Right())
default:
base.Fatalf("genAsStatic: rhs %v", as.Right)
base.Fatalf("genAsStatic: rhs %v", as.Right())
}
}

File diff suppressed because it is too large Load diff

View file

@ -41,16 +41,16 @@ var (
// whose Pos will point back to their declaration position rather than
// their usage position.
func hasUniquePos(n *ir.Node) bool {
switch n.Op {
switch n.Op() {
case ir.ONAME, ir.OPACK:
return false
case ir.OLITERAL, ir.ONIL, ir.OTYPE:
if n.Sym != nil {
if n.Sym() != nil {
return false
}
}
if !n.Pos.IsKnown() {
if !n.Pos().IsKnown() {
if base.Flag.K != 0 {
base.Warn("setlineno: unknown position (line 0)")
}
@ -63,7 +63,7 @@ func hasUniquePos(n *ir.Node) bool {
func setlineno(n *ir.Node) src.XPos {
lno := base.Pos
if n != nil && hasUniquePos(n) {
base.Pos = n.Pos
base.Pos = n.Pos()
}
return lno
}
@ -95,8 +95,8 @@ func autolabel(prefix string) *types.Sym {
if Curfn == nil {
base.Fatalf("autolabel outside function")
}
n := fn.Func.Label
fn.Func.Label++
n := fn.Func().Label
fn.Func().Label++
return lookupN(prefix, int(n))
}
@ -120,25 +120,25 @@ func importdot(opkg *types.Pkg, pack *ir.Node) {
s1.Def = s.Def
s1.Block = s.Block
if ir.AsNode(s1.Def).Name == nil {
if ir.AsNode(s1.Def).Name() == nil {
ir.Dump("s1def", ir.AsNode(s1.Def))
base.Fatalf("missing Name")
}
ir.AsNode(s1.Def).Name.Pack = pack
ir.AsNode(s1.Def).Name().Pack = pack
s1.Origpkg = opkg
n++
}
if n == 0 {
// can't possibly be used - there were no symbols
base.ErrorfAt(pack.Pos, "imported and not used: %q", opkg.Path)
base.ErrorfAt(pack.Pos(), "imported and not used: %q", opkg.Path)
}
}
// newname returns a new ONAME Node associated with symbol s.
func NewName(s *types.Sym) *ir.Node {
n := ir.NewNameAt(base.Pos, s)
n.Name.Curfn = Curfn
n.Name().Curfn = Curfn
return n
}
@ -152,7 +152,7 @@ func nodSym(op ir.Op, left *ir.Node, sym *types.Sym) *ir.Node {
// and the Sym field set to sym. This is for ODOT and friends.
func nodlSym(pos src.XPos, op ir.Op, left *ir.Node, sym *types.Sym) *ir.Node {
n := ir.NodAt(pos, op, left, nil)
n.Sym = sym
n.SetSym(sym)
return n
}
@ -169,7 +169,7 @@ func nodintconst(v int64) *ir.Node {
func nodnil() *ir.Node {
n := ir.Nod(ir.ONIL, nil, nil)
n.Type = types.Types[types.TNIL]
n.SetType(types.Types[types.TNIL])
return n
}
@ -190,16 +190,16 @@ func treecopy(n *ir.Node, pos src.XPos) *ir.Node {
return nil
}
switch n.Op {
switch n.Op() {
default:
m := ir.SepCopy(n)
m.Left = treecopy(n.Left, pos)
m.Right = treecopy(n.Right, pos)
m.List.Set(listtreecopy(n.List.Slice(), pos))
m.SetLeft(treecopy(n.Left(), pos))
m.SetRight(treecopy(n.Right(), pos))
m.PtrList().Set(listtreecopy(n.List().Slice(), pos))
if pos.IsKnown() {
m.Pos = pos
m.SetPos(pos)
}
if m.Name != nil && n.Op != ir.ODCLFIELD {
if m.Name() != nil && n.Op() != ir.ODCLFIELD {
ir.Dump("treecopy", n)
base.Fatalf("treecopy Name")
}
@ -517,16 +517,16 @@ func assignconv(n *ir.Node, t *types.Type, context string) *ir.Node {
// Convert node n for assignment to type t.
func assignconvfn(n *ir.Node, t *types.Type, context func() string) *ir.Node {
if n == nil || n.Type == nil || n.Type.Broke() {
if n == nil || n.Type() == nil || n.Type().Broke() {
return n
}
if t.Etype == types.TBLANK && n.Type.Etype == types.TNIL {
if t.Etype == types.TBLANK && n.Type().Etype == types.TNIL {
base.Errorf("use of untyped nil")
}
n = convlit1(n, t, false, context)
if n.Type == nil {
if n.Type() == nil {
return n
}
if t.Etype == types.TBLANK {
@ -535,31 +535,31 @@ func assignconvfn(n *ir.Node, t *types.Type, context func() string) *ir.Node {
// Convert ideal bool from comparison to plain bool
// if the next step is non-bool (like interface{}).
if n.Type == types.UntypedBool && !t.IsBoolean() {
if n.Op == ir.ONAME || n.Op == ir.OLITERAL {
if n.Type() == types.UntypedBool && !t.IsBoolean() {
if n.Op() == ir.ONAME || n.Op() == ir.OLITERAL {
r := ir.Nod(ir.OCONVNOP, n, nil)
r.Type = types.Types[types.TBOOL]
r.SetType(types.Types[types.TBOOL])
r.SetTypecheck(1)
r.SetImplicit(true)
n = r
}
}
if types.Identical(n.Type, t) {
if types.Identical(n.Type(), t) {
return n
}
op, why := assignop(n.Type, t)
op, why := assignop(n.Type(), t)
if op == ir.OXXX {
base.Errorf("cannot use %L as type %v in %s%s", n, t, context(), why)
op = ir.OCONV
}
r := ir.Nod(op, n, nil)
r.Type = t
r.SetType(t)
r.SetTypecheck(1)
r.SetImplicit(true)
r.Orig = n.Orig
r.SetOrig(n.Orig())
return r
}
@ -572,27 +572,27 @@ func backingArrayPtrLen(n *ir.Node) (ptr, len *ir.Node) {
base.Fatalf("backingArrayPtrLen not cheap: %v", n)
}
ptr = ir.Nod(ir.OSPTR, n, nil)
if n.Type.IsString() {
ptr.Type = types.Types[types.TUINT8].PtrTo()
if n.Type().IsString() {
ptr.SetType(types.Types[types.TUINT8].PtrTo())
} else {
ptr.Type = n.Type.Elem().PtrTo()
ptr.SetType(n.Type().Elem().PtrTo())
}
len = ir.Nod(ir.OLEN, n, nil)
len.Type = types.Types[types.TINT]
len.SetType(types.Types[types.TINT])
return ptr, len
}
// labeledControl returns the control flow Node (for, switch, select)
// associated with the label n, if any.
func labeledControl(n *ir.Node) *ir.Node {
if n.Op != ir.OLABEL {
base.Fatalf("labeledControl %v", n.Op)
if n.Op() != ir.OLABEL {
base.Fatalf("labeledControl %v", n.Op())
}
ctl := n.Name.Defn
ctl := n.Name().Defn
if ctl == nil {
return nil
}
switch ctl.Op {
switch ctl.Op() {
case ir.OFOR, ir.OFORUNTIL, ir.OSWITCH, ir.OSELECT:
return ctl
}
@ -626,12 +626,12 @@ func updateHasCall(n *ir.Node) {
}
func calcHasCall(n *ir.Node) bool {
if n.Ninit.Len() != 0 {
if n.Init().Len() != 0 {
// TODO(mdempsky): This seems overly conservative.
return true
}
switch n.Op {
switch n.Op() {
case ir.OLITERAL, ir.ONIL, ir.ONAME, ir.OTYPE:
if n.HasCall() {
base.Fatalf("OLITERAL/ONAME/OTYPE should never have calls: %+v", n)
@ -653,23 +653,23 @@ func calcHasCall(n *ir.Node) bool {
// When using soft-float, these ops might be rewritten to function calls
// so we ensure they are evaluated first.
case ir.OADD, ir.OSUB, ir.ONEG, ir.OMUL:
if thearch.SoftFloat && (isFloat[n.Type.Etype] || isComplex[n.Type.Etype]) {
if thearch.SoftFloat && (isFloat[n.Type().Etype] || isComplex[n.Type().Etype]) {
return true
}
case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
if thearch.SoftFloat && (isFloat[n.Left.Type.Etype] || isComplex[n.Left.Type.Etype]) {
if thearch.SoftFloat && (isFloat[n.Left().Type().Etype] || isComplex[n.Left().Type().Etype]) {
return true
}
case ir.OCONV:
if thearch.SoftFloat && ((isFloat[n.Type.Etype] || isComplex[n.Type.Etype]) || (isFloat[n.Left.Type.Etype] || isComplex[n.Left.Type.Etype])) {
if thearch.SoftFloat && ((isFloat[n.Type().Etype] || isComplex[n.Type().Etype]) || (isFloat[n.Left().Type().Etype] || isComplex[n.Left().Type().Etype])) {
return true
}
}
if n.Left != nil && n.Left.HasCall() {
if n.Left() != nil && n.Left().HasCall() {
return true
}
if n.Right != nil && n.Right.HasCall() {
if n.Right() != nil && n.Right().HasCall() {
return true
}
return false
@ -745,45 +745,45 @@ func safeexpr(n *ir.Node, init *ir.Nodes) *ir.Node {
return nil
}
if n.Ninit.Len() != 0 {
walkstmtlist(n.Ninit.Slice())
init.AppendNodes(&n.Ninit)
if n.Init().Len() != 0 {
walkstmtlist(n.Init().Slice())
init.AppendNodes(n.PtrInit())
}
switch n.Op {
switch n.Op() {
case ir.ONAME, ir.OLITERAL, ir.ONIL:
return n
case ir.ODOT, ir.OLEN, ir.OCAP:
l := safeexpr(n.Left, init)
if l == n.Left {
l := safeexpr(n.Left(), init)
if l == n.Left() {
return n
}
r := ir.Copy(n)
r.Left = l
r.SetLeft(l)
r = typecheck(r, ctxExpr)
r = walkexpr(r, init)
return r
case ir.ODOTPTR, ir.ODEREF:
l := safeexpr(n.Left, init)
if l == n.Left {
l := safeexpr(n.Left(), init)
if l == n.Left() {
return n
}
a := ir.Copy(n)
a.Left = l
a.SetLeft(l)
a = walkexpr(a, init)
return a
case ir.OINDEX, ir.OINDEXMAP:
l := safeexpr(n.Left, init)
r := safeexpr(n.Right, init)
if l == n.Left && r == n.Right {
l := safeexpr(n.Left(), init)
r := safeexpr(n.Right(), init)
if l == n.Left() && r == n.Right() {
return n
}
a := ir.Copy(n)
a.Left = l
a.Right = r
a.SetLeft(l)
a.SetRight(r)
a = walkexpr(a, init)
return a
@ -812,12 +812,12 @@ func copyexpr(n *ir.Node, t *types.Type, init *ir.Nodes) *ir.Node {
// return side-effect free and cheap n, appending side effects to init.
// result may not be assignable.
func cheapexpr(n *ir.Node, init *ir.Nodes) *ir.Node {
switch n.Op {
switch n.Op() {
case ir.ONAME, ir.OLITERAL, ir.ONIL:
return n
}
return copyexpr(n, n.Type, init)
return copyexpr(n, n.Type(), init)
}
// Code to resolve elided DOTs in embedded types.
@ -958,20 +958,20 @@ func dotpath(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) (
// will give shortest unique addressing.
// modify the tree with missing type names.
func adddot(n *ir.Node) *ir.Node {
n.Left = typecheck(n.Left, ctxType|ctxExpr)
if n.Left.Diag() {
n.SetLeft(typecheck(n.Left(), ctxType|ctxExpr))
if n.Left().Diag() {
n.SetDiag(true)
}
t := n.Left.Type
t := n.Left().Type()
if t == nil {
return n
}
if n.Left.Op == ir.OTYPE {
if n.Left().Op() == ir.OTYPE {
return n
}
s := n.Sym
s := n.Sym()
if s == nil {
return n
}
@ -980,12 +980,12 @@ func adddot(n *ir.Node) *ir.Node {
case path != nil:
// rebuild elided dots
for c := len(path) - 1; c >= 0; c-- {
n.Left = nodSym(ir.ODOT, n.Left, path[c].field.Sym)
n.Left.SetImplicit(true)
n.SetLeft(nodSym(ir.ODOT, n.Left(), path[c].field.Sym))
n.Left().SetImplicit(true)
}
case ambig:
base.Errorf("ambiguous selector %v", n)
n.Left = nil
n.SetLeft(nil)
}
return n
@ -1127,7 +1127,7 @@ func structargs(tl *types.Type, mustname bool) []*ir.Node {
gen++
}
a := symfield(s, t.Type)
a.Pos = t.Pos
a.SetPos(t.Pos)
a.SetIsDDD(t.IsDDD())
args = append(args, a)
}
@ -1177,14 +1177,14 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
dclcontext = ir.PEXTERN
tfn := ir.Nod(ir.OTFUNC, nil, nil)
tfn.Left = namedfield(".this", rcvr)
tfn.List.Set(structargs(method.Type.Params(), true))
tfn.Rlist.Set(structargs(method.Type.Results(), false))
tfn.SetLeft(namedfield(".this", rcvr))
tfn.PtrList().Set(structargs(method.Type.Params(), true))
tfn.PtrRlist().Set(structargs(method.Type.Results(), false))
fn := dclfunc(newnam, tfn)
fn.Func.SetDupok(true)
fn.Func().SetDupok(true)
nthis := ir.AsNode(tfn.Type.Recv().Nname)
nthis := ir.AsNode(tfn.Type().Recv().Nname)
methodrcvr := method.Type.Recv().Type
@ -1192,10 +1192,10 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
if rcvr.IsPtr() && rcvr.Elem() == methodrcvr {
// generating wrapper from *T to T.
n := ir.Nod(ir.OIF, nil, nil)
n.Left = ir.Nod(ir.OEQ, nthis, nodnil())
n.SetLeft(ir.Nod(ir.OEQ, nthis, nodnil()))
call := ir.Nod(ir.OCALL, syslook("panicwrap"), nil)
n.Nbody.Set1(call)
fn.Nbody.Append(n)
n.PtrBody().Set1(call)
fn.PtrBody().Append(n)
}
dot := adddot(nodSym(ir.OXDOT, nthis, method.Sym))
@ -1209,29 +1209,29 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
// value for that function.
if !instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) {
// generate tail call: adjust pointer receiver and jump to embedded method.
dot = dot.Left // skip final .M
dot = dot.Left() // skip final .M
// TODO(mdempsky): Remove dependency on dotlist.
if !dotlist[0].field.Type.IsPtr() {
dot = ir.Nod(ir.OADDR, dot, nil)
}
as := ir.Nod(ir.OAS, nthis, convnop(dot, rcvr))
fn.Nbody.Append(as)
fn.Nbody.Append(nodSym(ir.ORETJMP, nil, methodSym(methodrcvr, method.Sym)))
fn.PtrBody().Append(as)
fn.PtrBody().Append(nodSym(ir.ORETJMP, nil, methodSym(methodrcvr, method.Sym)))
} else {
fn.Func.SetWrapper(true) // ignore frame for panic+recover matching
fn.Func().SetWrapper(true) // ignore frame for panic+recover matching
call := ir.Nod(ir.OCALL, dot, nil)
call.List.Set(paramNnames(tfn.Type))
call.SetIsDDD(tfn.Type.IsVariadic())
call.PtrList().Set(paramNnames(tfn.Type()))
call.SetIsDDD(tfn.Type().IsVariadic())
if method.Type.NumResults() > 0 {
n := ir.Nod(ir.ORETURN, nil, nil)
n.List.Set1(call)
n.PtrList().Set1(call)
call = n
}
fn.Nbody.Append(call)
fn.PtrBody().Append(call)
}
if false && base.Flag.LowerR != 0 {
ir.DumpList("genwrapper body", fn.Nbody)
ir.DumpList("genwrapper body", fn.Body())
}
funcbody()
@ -1242,7 +1242,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
fn = typecheck(fn, ctxStmt)
Curfn = fn
typecheckslice(fn.Nbody.Slice(), ctxStmt)
typecheckslice(fn.Body().Slice(), ctxStmt)
// Inline calls within (*T).M wrappers. This is safe because we only
// generate those wrappers within the same compilation unit as (T).M.
@ -1269,13 +1269,13 @@ func hashmem(t *types.Type) *ir.Node {
n := NewName(sym)
setNodeNameFunc(n)
n.Type = functype(nil, []*ir.Node{
n.SetType(functype(nil, []*ir.Node{
anonfield(types.NewPtr(t)),
anonfield(types.Types[types.TUINTPTR]),
anonfield(types.Types[types.TUINTPTR]),
}, []*ir.Node{
anonfield(types.Types[types.TUINTPTR]),
})
}))
return n
}
@ -1403,16 +1403,16 @@ func listtreecopy(l []*ir.Node, pos src.XPos) []*ir.Node {
func liststmt(l []*ir.Node) *ir.Node {
n := ir.Nod(ir.OBLOCK, nil, nil)
n.List.Set(l)
n.PtrList().Set(l)
if len(l) != 0 {
n.Pos = l[0].Pos
n.SetPos(l[0].Pos())
}
return n
}
func ngotype(n *ir.Node) *types.Sym {
if n.Type != nil {
return typenamesym(n.Type)
if n.Type() != nil {
return typenamesym(n.Type())
}
return nil
}
@ -1426,11 +1426,11 @@ func addinit(n *ir.Node, init []*ir.Node) *ir.Node {
if ir.MayBeShared(n) {
// Introduce OCONVNOP to hold init list.
n = ir.Nod(ir.OCONVNOP, n, nil)
n.Type = n.Left.Type
n.SetType(n.Left().Type())
n.SetTypecheck(1)
}
n.Ninit.Prepend(init...)
n.PtrInit().Prepend(init...)
n.SetHasCall(true)
return n
}
@ -1520,9 +1520,9 @@ func isdirectiface(t *types.Type) bool {
// itabType loads the _type field from a runtime.itab struct.
func itabType(itab *ir.Node) *ir.Node {
typ := nodSym(ir.ODOTPTR, itab, nil)
typ.Type = types.NewPtr(types.Types[types.TUINT8])
typ.SetType(types.NewPtr(types.Types[types.TUINT8]))
typ.SetTypecheck(1)
typ.Xoffset = int64(Widthptr) // offset of _type in runtime.itab
typ.SetOffset(int64(Widthptr)) // offset of _type in runtime.itab
typ.SetBounded(true) // guaranteed not to fault
return typ
}
@ -1536,14 +1536,14 @@ func ifaceData(pos src.XPos, n *ir.Node, t *types.Type) *ir.Node {
}
ptr := nodlSym(pos, ir.OIDATA, n, nil)
if isdirectiface(t) {
ptr.Type = t
ptr.SetType(t)
ptr.SetTypecheck(1)
return ptr
}
ptr.Type = types.NewPtr(t)
ptr.SetType(types.NewPtr(t))
ptr.SetTypecheck(1)
ind := ir.NodAt(pos, ir.ODEREF, ptr, nil)
ind.Type = t
ind.SetType(t)
ind.SetTypecheck(1)
ind.SetBounded(true)
return ind
@ -1553,8 +1553,8 @@ func ifaceData(pos src.XPos, n *ir.Node, t *types.Type) *ir.Node {
// This is where t was declared or where it appeared as a type expression.
func typePos(t *types.Type) src.XPos {
n := ir.AsNode(t.Nod)
if n == nil || !n.Pos.IsKnown() {
if n == nil || !n.Pos().IsKnown() {
base.Fatalf("bad type: %v", t)
}
return n.Pos
return n.Pos()
}

View file

@ -16,8 +16,8 @@ import (
// typecheckswitch typechecks a switch statement.
func typecheckswitch(n *ir.Node) {
typecheckslice(n.Ninit.Slice(), ctxStmt)
if n.Left != nil && n.Left.Op == ir.OTYPESW {
typecheckslice(n.Init().Slice(), ctxStmt)
if n.Left() != nil && n.Left().Op() == ir.OTYPESW {
typecheckTypeSwitch(n)
} else {
typecheckExprSwitch(n)
@ -25,27 +25,27 @@ func typecheckswitch(n *ir.Node) {
}
func typecheckTypeSwitch(n *ir.Node) {
n.Left.Right = typecheck(n.Left.Right, ctxExpr)
t := n.Left.Right.Type
n.Left().SetRight(typecheck(n.Left().Right(), ctxExpr))
t := n.Left().Right().Type()
if t != nil && !t.IsInterface() {
base.ErrorfAt(n.Pos, "cannot type switch on non-interface value %L", n.Left.Right)
base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", n.Left().Right())
t = nil
}
// We don't actually declare the type switch's guarded
// declaration itself. So if there are no cases, we won't
// notice that it went unused.
if v := n.Left.Left; v != nil && !ir.IsBlank(v) && n.List.Len() == 0 {
base.ErrorfAt(v.Pos, "%v declared but not used", v.Sym)
if v := n.Left().Left(); v != nil && !ir.IsBlank(v) && n.List().Len() == 0 {
base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
}
var defCase, nilCase *ir.Node
var ts typeSet
for _, ncase := range n.List.Slice() {
ls := ncase.List.Slice()
for _, ncase := range n.List().Slice() {
ls := ncase.List().Slice()
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", ir.Line(defCase))
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
} else {
defCase = ncase
}
@ -54,7 +54,7 @@ func typecheckTypeSwitch(n *ir.Node) {
for i := range ls {
ls[i] = typecheck(ls[i], ctxExpr|ctxType)
n1 := ls[i]
if t == nil || n1.Type == nil {
if t == nil || n1.Type() == nil {
continue
}
@ -63,36 +63,36 @@ func typecheckTypeSwitch(n *ir.Node) {
switch {
case ir.IsNil(n1): // case nil:
if nilCase != nil {
base.ErrorfAt(ncase.Pos, "multiple nil cases in type switch (first at %v)", ir.Line(nilCase))
base.ErrorfAt(ncase.Pos(), "multiple nil cases in type switch (first at %v)", ir.Line(nilCase))
} else {
nilCase = ncase
}
case n1.Op != ir.OTYPE:
base.ErrorfAt(ncase.Pos, "%L is not a type", n1)
case !n1.Type.IsInterface() && !implements(n1.Type, t, &missing, &have, &ptr) && !missing.Broke():
case n1.Op() != ir.OTYPE:
base.ErrorfAt(ncase.Pos(), "%L is not a type", n1)
case !n1.Type().IsInterface() && !implements(n1.Type(), t, &missing, &have, &ptr) && !missing.Broke():
if have != nil && !have.Broke() {
base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", n.Left.Right, n1.Type, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", n.Left().Right(), n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else if ptr != 0 {
base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
" (%v method has pointer receiver)", n.Left.Right, n1.Type, missing.Sym)
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (%v method has pointer receiver)", n.Left().Right(), n1.Type(), missing.Sym)
} else {
base.ErrorfAt(ncase.Pos, "impossible type switch case: %L cannot have dynamic type %v"+
" (missing %v method)", n.Left.Right, n1.Type, missing.Sym)
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (missing %v method)", n.Left().Right(), n1.Type(), missing.Sym)
}
}
if n1.Op == ir.OTYPE {
ts.add(ncase.Pos, n1.Type)
if n1.Op() == ir.OTYPE {
ts.add(ncase.Pos(), n1.Type())
}
}
if ncase.Rlist.Len() != 0 {
if ncase.Rlist().Len() != 0 {
// Assign the clause variable's type.
vt := t
if len(ls) == 1 {
if ls[0].Op == ir.OTYPE {
vt = ls[0].Type
if ls[0].Op() == ir.OTYPE {
vt = ls[0].Type()
} else if !ir.IsNil(ls[0]) {
// Invalid single-type case;
// mark variable as broken.
@ -100,8 +100,8 @@ func typecheckTypeSwitch(n *ir.Node) {
}
}
nvar := ncase.Rlist.First()
nvar.Type = vt
nvar := ncase.Rlist().First()
nvar.SetType(vt)
if vt != nil {
nvar = typecheck(nvar, ctxExpr|ctxAssign)
} else {
@ -109,10 +109,10 @@ func typecheckTypeSwitch(n *ir.Node) {
nvar.SetTypecheck(1)
nvar.SetWalkdef(1)
}
ncase.Rlist.SetFirst(nvar)
ncase.Rlist().SetFirst(nvar)
}
typecheckslice(ncase.Nbody.Slice(), ctxStmt)
typecheckslice(ncase.Body().Slice(), ctxStmt)
}
}
@ -146,10 +146,10 @@ func (s *typeSet) add(pos src.XPos, typ *types.Type) {
func typecheckExprSwitch(n *ir.Node) {
t := types.Types[types.TBOOL]
if n.Left != nil {
n.Left = typecheck(n.Left, ctxExpr)
n.Left = defaultlit(n.Left, nil)
t = n.Left.Type
if n.Left() != nil {
n.SetLeft(typecheck(n.Left(), ctxExpr))
n.SetLeft(defaultlit(n.Left(), nil))
t = n.Left().Type()
}
var nilonly string
@ -164,9 +164,9 @@ func typecheckExprSwitch(n *ir.Node) {
case !IsComparable(t):
if t.IsStruct() {
base.ErrorfAt(n.Pos, "cannot switch on %L (struct containing %v cannot be compared)", n.Left, IncomparableField(t).Type)
base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Left(), IncomparableField(t).Type)
} else {
base.ErrorfAt(n.Pos, "cannot switch on %L", n.Left)
base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Left())
}
t = nil
}
@ -174,11 +174,11 @@ func typecheckExprSwitch(n *ir.Node) {
var defCase *ir.Node
var cs constSet
for _, ncase := range n.List.Slice() {
ls := ncase.List.Slice()
for _, ncase := range n.List().Slice() {
ls := ncase.List().Slice()
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", ir.Line(defCase))
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
} else {
defCase = ncase
}
@ -189,22 +189,22 @@ func typecheckExprSwitch(n *ir.Node) {
ls[i] = typecheck(ls[i], ctxExpr)
ls[i] = defaultlit(ls[i], t)
n1 := ls[i]
if t == nil || n1.Type == nil {
if t == nil || n1.Type() == nil {
continue
}
if nilonly != "" && !ir.IsNil(n1) {
base.ErrorfAt(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left)
} else if t.IsInterface() && !n1.Type.IsInterface() && !IsComparable(n1.Type) {
base.ErrorfAt(ncase.Pos, "invalid case %L in switch (incomparable type)", n1)
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left())
} else if t.IsInterface() && !n1.Type().IsInterface() && !IsComparable(n1.Type()) {
base.ErrorfAt(ncase.Pos(), "invalid case %L in switch (incomparable type)", n1)
} else {
op1, _ := assignop(n1.Type, t)
op2, _ := assignop(t, n1.Type)
op1, _ := assignop(n1.Type(), t)
op2, _ := assignop(t, n1.Type())
if op1 == ir.OXXX && op2 == ir.OXXX {
if n.Left != nil {
base.ErrorfAt(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t)
if n.Left() != nil {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left(), n1.Type(), t)
} else {
base.ErrorfAt(ncase.Pos, "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type)
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type())
}
}
}
@ -215,23 +215,23 @@ func typecheckExprSwitch(n *ir.Node) {
// case GOARCH == "arm" && GOARM == "5":
// case GOARCH == "arm":
// which would both evaluate to false for non-ARM compiles.
if !n1.Type.IsBoolean() {
cs.add(ncase.Pos, n1, "case", "switch")
if !n1.Type().IsBoolean() {
cs.add(ncase.Pos(), n1, "case", "switch")
}
}
typecheckslice(ncase.Nbody.Slice(), ctxStmt)
typecheckslice(ncase.Body().Slice(), ctxStmt)
}
}
// walkswitch walks a switch statement.
func walkswitch(sw *ir.Node) {
// Guard against double walk, see #25776.
if sw.List.Len() == 0 && sw.Nbody.Len() > 0 {
if sw.List().Len() == 0 && sw.Body().Len() > 0 {
return // Was fatal, but eliminating every possible source of double-walking is hard
}
if sw.Left != nil && sw.Left.Op == ir.OTYPESW {
if sw.Left() != nil && sw.Left().Op() == ir.OTYPESW {
walkTypeSwitch(sw)
} else {
walkExprSwitch(sw)
@ -243,8 +243,8 @@ func walkswitch(sw *ir.Node) {
func walkExprSwitch(sw *ir.Node) {
lno := setlineno(sw)
cond := sw.Left
sw.Left = nil
cond := sw.Left()
sw.SetLeft(nil)
// convert switch {...} to switch true {...}
if cond == nil {
@ -260,13 +260,13 @@ func walkExprSwitch(sw *ir.Node) {
// because walkexpr will lower the string
// conversion into a runtime call.
// See issue 24937 for more discussion.
if cond.Op == ir.OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
cond.Op = ir.OBYTES2STRTMP
if cond.Op() == ir.OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
cond.SetOp(ir.OBYTES2STRTMP)
}
cond = walkexpr(cond, &sw.Ninit)
if cond.Op != ir.OLITERAL && cond.Op != ir.ONIL {
cond = copyexpr(cond, cond.Type, &sw.Nbody)
cond = walkexpr(cond, sw.PtrInit())
if cond.Op() != ir.OLITERAL && cond.Op() != ir.ONIL {
cond = copyexpr(cond, cond.Type(), sw.PtrBody())
}
base.Pos = lno
@ -277,43 +277,43 @@ func walkExprSwitch(sw *ir.Node) {
var defaultGoto *ir.Node
var body ir.Nodes
for _, ncase := range sw.List.Slice() {
for _, ncase := range sw.List().Slice() {
label := autolabel(".s")
jmp := npos(ncase.Pos, nodSym(ir.OGOTO, nil, label))
jmp := npos(ncase.Pos(), nodSym(ir.OGOTO, nil, label))
// Process case dispatch.
if ncase.List.Len() == 0 {
if ncase.List().Len() == 0 {
if defaultGoto != nil {
base.Fatalf("duplicate default case not detected during typechecking")
}
defaultGoto = jmp
}
for _, n1 := range ncase.List.Slice() {
s.Add(ncase.Pos, n1, jmp)
for _, n1 := range ncase.List().Slice() {
s.Add(ncase.Pos(), n1, jmp)
}
// Process body.
body.Append(npos(ncase.Pos, nodSym(ir.OLABEL, nil, label)))
body.Append(ncase.Nbody.Slice()...)
if fall, pos := hasFall(ncase.Nbody.Slice()); !fall {
body.Append(npos(ncase.Pos(), nodSym(ir.OLABEL, nil, label)))
body.Append(ncase.Body().Slice()...)
if fall, pos := hasFall(ncase.Body().Slice()); !fall {
br := ir.Nod(ir.OBREAK, nil, nil)
br.Pos = pos
br.SetPos(pos)
body.Append(br)
}
}
sw.List.Set(nil)
sw.PtrList().Set(nil)
if defaultGoto == nil {
br := ir.Nod(ir.OBREAK, nil, nil)
br.Pos = br.Pos.WithNotStmt()
br.SetPos(br.Pos().WithNotStmt())
defaultGoto = br
}
s.Emit(&sw.Nbody)
sw.Nbody.Append(defaultGoto)
sw.Nbody.AppendNodes(&body)
walkstmtlist(sw.Nbody.Slice())
s.Emit(sw.PtrBody())
sw.PtrBody().Append(defaultGoto)
sw.PtrBody().AppendNodes(&body)
walkstmtlist(sw.Body().Slice())
}
// An exprSwitch walks an expression switch.
@ -332,7 +332,7 @@ type exprClause struct {
func (s *exprSwitch) Add(pos src.XPos, expr, jmp *ir.Node) {
c := exprClause{pos: pos, lo: expr, hi: expr, jmp: jmp}
if okforcmp[s.exprname.Type.Etype] && expr.Op == ir.OLITERAL {
if okforcmp[s.exprname.Type().Etype] && expr.Op() == ir.OLITERAL {
s.clauses = append(s.clauses, c)
return
}
@ -359,7 +359,7 @@ func (s *exprSwitch) flush() {
// (e.g., sort.Slice doesn't need to invoke the less function
// when there's only a single slice element).
if s.exprname.Type.IsString() && len(cc) >= 2 {
if s.exprname.Type().IsString() && len(cc) >= 2 {
// Sort strings by length and then by value. It is
// much cheaper to compare lengths than values, and
// all we need here is consistency. We respect this
@ -395,8 +395,8 @@ func (s *exprSwitch) flush() {
},
func(i int, nif *ir.Node) {
run := runs[i]
nif.Left = ir.Nod(ir.OEQ, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(run)))
s.search(run, &nif.Nbody)
nif.SetLeft(ir.Nod(ir.OEQ, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(run))))
s.search(run, nif.PtrBody())
},
)
return
@ -407,7 +407,7 @@ func (s *exprSwitch) flush() {
})
// Merge consecutive integer cases.
if s.exprname.Type.IsInteger() {
if s.exprname.Type().IsInteger() {
merged := cc[:1]
for _, c := range cc[1:] {
last := &merged[len(merged)-1]
@ -430,8 +430,8 @@ func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
},
func(i int, nif *ir.Node) {
c := &cc[i]
nif.Left = c.test(s.exprname)
nif.Nbody.Set1(c.jmp)
nif.SetLeft(c.test(s.exprname))
nif.PtrBody().Set1(c.jmp)
},
)
}
@ -445,7 +445,7 @@ func (c *exprClause) test(exprname *ir.Node) *ir.Node {
}
// Optimize "switch true { ...}" and "switch false { ... }".
if ir.IsConst(exprname, constant.Bool) && !c.lo.Type.IsInterface() {
if ir.IsConst(exprname, constant.Bool) && !c.lo.Type().IsInterface() {
if exprname.BoolVal() {
return c.lo
} else {
@ -464,12 +464,12 @@ func allCaseExprsAreSideEffectFree(sw *ir.Node) bool {
// Restricting to constants is simple and probably powerful
// enough.
for _, ncase := range sw.List.Slice() {
if ncase.Op != ir.OCASE {
base.Fatalf("switch string(byteslice) bad op: %v", ncase.Op)
for _, ncase := range sw.List().Slice() {
if ncase.Op() != ir.OCASE {
base.Fatalf("switch string(byteslice) bad op: %v", ncase.Op())
}
for _, v := range ncase.List.Slice() {
if v.Op != ir.OLITERAL {
for _, v := range ncase.List().Slice() {
if v.Op() != ir.OLITERAL {
return false
}
}
@ -486,24 +486,24 @@ func hasFall(stmts []*ir.Node) (bool, src.XPos) {
// nodes will be at the end of the list.
i := len(stmts) - 1
for i >= 0 && stmts[i].Op == ir.OVARKILL {
for i >= 0 && stmts[i].Op() == ir.OVARKILL {
i--
}
if i < 0 {
return false, src.NoXPos
}
return stmts[i].Op == ir.OFALL, stmts[i].Pos
return stmts[i].Op() == ir.OFALL, stmts[i].Pos()
}
// walkTypeSwitch generates an AST that implements sw, where sw is a
// type switch.
func walkTypeSwitch(sw *ir.Node) {
var s typeSwitch
s.facename = sw.Left.Right
sw.Left = nil
s.facename = sw.Left().Right()
sw.SetLeft(nil)
s.facename = walkexpr(s.facename, &sw.Ninit)
s.facename = copyexpr(s.facename, s.facename.Type, &sw.Nbody)
s.facename = walkexpr(s.facename, sw.PtrInit())
s.facename = copyexpr(s.facename, s.facename.Type(), sw.PtrBody())
s.okname = temp(types.Types[types.TBOOL])
// Get interface descriptor word.
@ -518,54 +518,54 @@ func walkTypeSwitch(sw *ir.Node) {
// h := e._type.hash
// Use a similar strategy for non-empty interfaces.
ifNil := ir.Nod(ir.OIF, nil, nil)
ifNil.Left = ir.Nod(ir.OEQ, itab, nodnil())
ifNil.SetLeft(ir.Nod(ir.OEQ, itab, nodnil()))
base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
ifNil.Left = typecheck(ifNil.Left, ctxExpr)
ifNil.Left = defaultlit(ifNil.Left, nil)
ifNil.SetLeft(typecheck(ifNil.Left(), ctxExpr))
ifNil.SetLeft(defaultlit(ifNil.Left(), nil))
// ifNil.Nbody assigned at end.
sw.Nbody.Append(ifNil)
sw.PtrBody().Append(ifNil)
// Load hash from type or itab.
dotHash := nodSym(ir.ODOTPTR, itab, nil)
dotHash.Type = types.Types[types.TUINT32]
dotHash.SetType(types.Types[types.TUINT32])
dotHash.SetTypecheck(1)
if s.facename.Type.IsEmptyInterface() {
dotHash.Xoffset = int64(2 * Widthptr) // offset of hash in runtime._type
if s.facename.Type().IsEmptyInterface() {
dotHash.SetOffset(int64(2 * Widthptr)) // offset of hash in runtime._type
} else {
dotHash.Xoffset = int64(2 * Widthptr) // offset of hash in runtime.itab
dotHash.SetOffset(int64(2 * Widthptr)) // offset of hash in runtime.itab
}
dotHash.SetBounded(true) // guaranteed not to fault
s.hashname = copyexpr(dotHash, dotHash.Type, &sw.Nbody)
s.hashname = copyexpr(dotHash, dotHash.Type(), sw.PtrBody())
br := ir.Nod(ir.OBREAK, nil, nil)
var defaultGoto, nilGoto *ir.Node
var body ir.Nodes
for _, ncase := range sw.List.Slice() {
for _, ncase := range sw.List().Slice() {
var caseVar *ir.Node
if ncase.Rlist.Len() != 0 {
caseVar = ncase.Rlist.First()
if ncase.Rlist().Len() != 0 {
caseVar = ncase.Rlist().First()
}
// For single-type cases with an interface type,
// we initialize the case variable as part of the type assertion.
// In other cases, we initialize it in the body.
var singleType *types.Type
if ncase.List.Len() == 1 && ncase.List.First().Op == ir.OTYPE {
singleType = ncase.List.First().Type
if ncase.List().Len() == 1 && ncase.List().First().Op() == ir.OTYPE {
singleType = ncase.List().First().Type()
}
caseVarInitialized := false
label := autolabel(".s")
jmp := npos(ncase.Pos, nodSym(ir.OGOTO, nil, label))
jmp := npos(ncase.Pos(), nodSym(ir.OGOTO, nil, label))
if ncase.List.Len() == 0 { // default:
if ncase.List().Len() == 0 { // default:
if defaultGoto != nil {
base.Fatalf("duplicate default case not detected during typechecking")
}
defaultGoto = jmp
}
for _, n1 := range ncase.List.Slice() {
for _, n1 := range ncase.List().Slice() {
if ir.IsNil(n1) { // case nil:
if nilGoto != nil {
base.Fatalf("duplicate nil case not detected during typechecking")
@ -575,14 +575,14 @@ func walkTypeSwitch(sw *ir.Node) {
}
if singleType != nil && singleType.IsInterface() {
s.Add(ncase.Pos, n1.Type, caseVar, jmp)
s.Add(ncase.Pos(), n1.Type(), caseVar, jmp)
caseVarInitialized = true
} else {
s.Add(ncase.Pos, n1.Type, nil, jmp)
s.Add(ncase.Pos(), n1.Type(), nil, jmp)
}
}
body.Append(npos(ncase.Pos, nodSym(ir.OLABEL, nil, label)))
body.Append(npos(ncase.Pos(), nodSym(ir.OLABEL, nil, label)))
if caseVar != nil && !caseVarInitialized {
val := s.facename
if singleType != nil {
@ -590,19 +590,19 @@ func walkTypeSwitch(sw *ir.Node) {
if singleType.IsInterface() {
base.Fatalf("singleType interface should have been handled in Add")
}
val = ifaceData(ncase.Pos, s.facename, singleType)
val = ifaceData(ncase.Pos(), s.facename, singleType)
}
l := []*ir.Node{
ir.NodAt(ncase.Pos, ir.ODCL, caseVar, nil),
ir.NodAt(ncase.Pos, ir.OAS, caseVar, val),
ir.NodAt(ncase.Pos(), ir.ODCL, caseVar, nil),
ir.NodAt(ncase.Pos(), ir.OAS, caseVar, val),
}
typecheckslice(l, ctxStmt)
body.Append(l...)
}
body.Append(ncase.Nbody.Slice()...)
body.Append(ncase.Body().Slice()...)
body.Append(br)
}
sw.List.Set(nil)
sw.PtrList().Set(nil)
if defaultGoto == nil {
defaultGoto = br
@ -610,13 +610,13 @@ func walkTypeSwitch(sw *ir.Node) {
if nilGoto == nil {
nilGoto = defaultGoto
}
ifNil.Nbody.Set1(nilGoto)
ifNil.PtrBody().Set1(nilGoto)
s.Emit(&sw.Nbody)
sw.Nbody.Append(defaultGoto)
sw.Nbody.AppendNodes(&body)
s.Emit(sw.PtrBody())
sw.PtrBody().Append(defaultGoto)
sw.PtrBody().AppendNodes(&body)
walkstmtlist(sw.Nbody.Slice())
walkstmtlist(sw.Body().Slice())
}
// A typeSwitch walks a type switch.
@ -650,18 +650,18 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp *ir.Node) {
// cv, ok = iface.(type)
as := ir.NodAt(pos, ir.OAS2, nil, nil)
as.List.Set2(caseVar, s.okname) // cv, ok =
as.PtrList().Set2(caseVar, s.okname) // cv, ok =
dot := ir.NodAt(pos, ir.ODOTTYPE, s.facename, nil)
dot.Type = typ // iface.(type)
as.Rlist.Set1(dot)
dot.SetType(typ) // iface.(type)
as.PtrRlist().Set1(dot)
as = typecheck(as, ctxStmt)
as = walkexpr(as, &body)
body.Append(as)
// if ok { goto label }
nif := ir.NodAt(pos, ir.OIF, nil, nil)
nif.Left = s.okname
nif.Nbody.Set1(jmp)
nif.SetLeft(s.okname)
nif.PtrBody().Set1(jmp)
body.Append(nif)
if !typ.IsInterface() {
@ -710,8 +710,8 @@ func (s *typeSwitch) flush() {
// TODO(mdempsky): Omit hash equality check if
// there's only one type.
c := cc[i]
nif.Left = ir.Nod(ir.OEQ, s.hashname, nodintconst(int64(c.hash)))
nif.Nbody.AppendNodes(&c.body)
nif.SetLeft(ir.Nod(ir.OEQ, s.hashname, nodintconst(int64(c.hash))))
nif.PtrBody().AppendNodes(&c.body)
},
)
}
@ -736,22 +736,22 @@ func binarySearch(n int, out *ir.Nodes, less func(i int) *ir.Node, leaf func(i i
nif := ir.Nod(ir.OIF, nil, nil)
leaf(i, nif)
base.Pos = base.Pos.WithNotStmt()
nif.Left = typecheck(nif.Left, ctxExpr)
nif.Left = defaultlit(nif.Left, nil)
nif.SetLeft(typecheck(nif.Left(), ctxExpr))
nif.SetLeft(defaultlit(nif.Left(), nil))
out.Append(nif)
out = &nif.Rlist
out = nif.PtrRlist()
}
return
}
half := lo + n/2
nif := ir.Nod(ir.OIF, nil, nil)
nif.Left = less(half)
nif.SetLeft(less(half))
base.Pos = base.Pos.WithNotStmt()
nif.Left = typecheck(nif.Left, ctxExpr)
nif.Left = defaultlit(nif.Left, nil)
do(lo, half, &nif.Nbody)
do(half, hi, &nif.Rlist)
nif.SetLeft(typecheck(nif.Left(), ctxExpr))
nif.SetLeft(defaultlit(nif.Left(), nil))
do(lo, half, nif.PtrBody())
do(half, hi, nif.PtrRlist())
out.Append(nif)
}

File diff suppressed because it is too large Load diff

View file

@ -110,7 +110,7 @@ func lexinit() {
types.Types[etype] = t
}
s2.Def = ir.AsTypesNode(typenod(t))
ir.AsNode(s2.Def).Name = new(ir.Name)
ir.AsNode(s2.Def).SetName(new(ir.Name))
}
for _, s := range &builtinFuncs {
@ -131,39 +131,39 @@ func lexinit() {
s := ir.BuiltinPkg.Lookup("true")
s.Def = ir.AsTypesNode(nodbool(true))
ir.AsNode(s.Def).Sym = lookup("true")
ir.AsNode(s.Def).Name = new(ir.Name)
ir.AsNode(s.Def).Type = types.UntypedBool
ir.AsNode(s.Def).SetSym(lookup("true"))
ir.AsNode(s.Def).SetName(new(ir.Name))
ir.AsNode(s.Def).SetType(types.UntypedBool)
s = ir.BuiltinPkg.Lookup("false")
s.Def = ir.AsTypesNode(nodbool(false))
ir.AsNode(s.Def).Sym = lookup("false")
ir.AsNode(s.Def).Name = new(ir.Name)
ir.AsNode(s.Def).Type = types.UntypedBool
ir.AsNode(s.Def).SetSym(lookup("false"))
ir.AsNode(s.Def).SetName(new(ir.Name))
ir.AsNode(s.Def).SetType(types.UntypedBool)
s = lookup("_")
s.Block = -100
s.Def = ir.AsTypesNode(NewName(s))
types.Types[types.TBLANK] = types.New(types.TBLANK)
ir.AsNode(s.Def).Type = types.Types[types.TBLANK]
ir.AsNode(s.Def).SetType(types.Types[types.TBLANK])
ir.BlankNode = ir.AsNode(s.Def)
s = ir.BuiltinPkg.Lookup("_")
s.Block = -100
s.Def = ir.AsTypesNode(NewName(s))
types.Types[types.TBLANK] = types.New(types.TBLANK)
ir.AsNode(s.Def).Type = types.Types[types.TBLANK]
ir.AsNode(s.Def).SetType(types.Types[types.TBLANK])
types.Types[types.TNIL] = types.New(types.TNIL)
s = ir.BuiltinPkg.Lookup("nil")
s.Def = ir.AsTypesNode(nodnil())
ir.AsNode(s.Def).Sym = s
ir.AsNode(s.Def).Name = new(ir.Name)
ir.AsNode(s.Def).SetSym(s)
ir.AsNode(s.Def).SetName(new(ir.Name))
s = ir.BuiltinPkg.Lookup("iota")
s.Def = ir.AsTypesNode(ir.Nod(ir.OIOTA, nil, nil))
ir.AsNode(s.Def).Sym = s
ir.AsNode(s.Def).Name = new(ir.Name)
ir.AsNode(s.Def).SetSym(s)
ir.AsNode(s.Def).SetName(new(ir.Name))
}
func typeinit() {
@ -182,7 +182,7 @@ func typeinit() {
types.Types[types.TUNSAFEPTR] = t
t.Sym = unsafepkg.Lookup("Pointer")
t.Sym.Def = ir.AsTypesNode(typenod(t))
ir.AsNode(t.Sym.Def).Name = new(ir.Name)
ir.AsNode(t.Sym.Def).SetName(new(ir.Name))
dowidth(types.Types[types.TUNSAFEPTR])
for et := types.TINT8; et <= types.TUINT64; et++ {
@ -359,7 +359,7 @@ func lexinit1() {
types.Bytetype = types.New(types.TUINT8)
types.Bytetype.Sym = s
s.Def = ir.AsTypesNode(typenod(types.Bytetype))
ir.AsNode(s.Def).Name = new(ir.Name)
ir.AsNode(s.Def).SetName(new(ir.Name))
dowidth(types.Bytetype)
// rune alias
@ -367,7 +367,7 @@ func lexinit1() {
types.Runetype = types.New(types.TINT32)
types.Runetype.Sym = s
s.Def = ir.AsTypesNode(typenod(types.Runetype))
ir.AsNode(s.Def).Name = new(ir.Name)
ir.AsNode(s.Def).SetName(new(ir.Name))
dowidth(types.Runetype)
// backend-dependent builtin types (e.g. int).
@ -385,7 +385,7 @@ func lexinit1() {
t.Sym = s1
types.Types[s.etype] = t
s1.Def = ir.AsTypesNode(typenod(t))
ir.AsNode(s1.Def).Name = new(ir.Name)
ir.AsNode(s1.Def).SetName(new(ir.Name))
s1.Origpkg = ir.BuiltinPkg
dowidth(t)
@ -412,7 +412,7 @@ func finishUniverse() {
}
nodfp = NewName(lookup(".fp"))
nodfp.Type = types.Types[types.TINT32]
nodfp.SetType(types.Types[types.TINT32])
nodfp.SetClass(ir.PPARAM)
nodfp.Name.SetUsed(true)
nodfp.Name().SetUsed(true)
}

View file

@ -11,23 +11,23 @@ import (
// evalunsafe evaluates a package unsafe operation and returns the result.
func evalunsafe(n *ir.Node) int64 {
switch n.Op {
switch n.Op() {
case ir.OALIGNOF, ir.OSIZEOF:
n.Left = typecheck(n.Left, ctxExpr)
n.Left = defaultlit(n.Left, nil)
tr := n.Left.Type
n.SetLeft(typecheck(n.Left(), ctxExpr))
n.SetLeft(defaultlit(n.Left(), nil))
tr := n.Left().Type()
if tr == nil {
return 0
}
dowidth(tr)
if n.Op == ir.OALIGNOF {
if n.Op() == ir.OALIGNOF {
return int64(tr.Align)
}
return tr.Width
case ir.OOFFSETOF:
// must be a selector.
if n.Left.Op != ir.OXDOT {
if n.Left().Op() != ir.OXDOT {
base.Errorf("invalid expression %v", n)
return 0
}
@ -35,14 +35,14 @@ func evalunsafe(n *ir.Node) int64 {
// Remember base of selector to find it back after dot insertion.
// Since r->left may be mutated by typechecking, check it explicitly
// first to track it correctly.
n.Left.Left = typecheck(n.Left.Left, ctxExpr)
sbase := n.Left.Left
n.Left().SetLeft(typecheck(n.Left().Left(), ctxExpr))
sbase := n.Left().Left()
n.Left = typecheck(n.Left, ctxExpr)
if n.Left.Type == nil {
n.SetLeft(typecheck(n.Left(), ctxExpr))
if n.Left().Type() == nil {
return 0
}
switch n.Left.Op {
switch n.Left().Op() {
case ir.ODOT, ir.ODOTPTR:
break
case ir.OCALLPART:
@ -55,27 +55,27 @@ func evalunsafe(n *ir.Node) int64 {
// Sum offsets for dots until we reach sbase.
var v int64
for r := n.Left; r != sbase; r = r.Left {
switch r.Op {
for r := n.Left(); r != sbase; r = r.Left() {
switch r.Op() {
case ir.ODOTPTR:
// For Offsetof(s.f), s may itself be a pointer,
// but accessing f must not otherwise involve
// indirection via embedded pointer types.
if r.Left != sbase {
base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.Left)
if r.Left() != sbase {
base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.Left())
return 0
}
fallthrough
case ir.ODOT:
v += r.Xoffset
v += r.Offset()
default:
ir.Dump("unsafenmagic", n.Left)
base.Fatalf("impossible %#v node after dot insertion", r.Op)
ir.Dump("unsafenmagic", n.Left())
base.Fatalf("impossible %#v node after dot insertion", r.Op())
}
}
return v
}
base.Fatalf("unexpected op %v", n.Op)
base.Fatalf("unexpected op %v", n.Op())
return 0
}

File diff suppressed because it is too large Load diff

View file

@ -205,7 +205,7 @@ func (p *dumper) dump(x reflect.Value, depth int) {
isNode := false
if n, ok := x.Interface().(Node); ok {
isNode = true
p.printf("%s %s {", n.Op.String(), p.addr(x))
p.printf("%s %s {", n.op.String(), p.addr(x))
} else {
p.printf("%s {", typ)
}

View file

@ -351,28 +351,28 @@ func jconvFmt(n *Node, s fmt.State, flag FmtFlag) {
if base.Debug.DumpPtrs != 0 {
fmt.Fprintf(s, " p(%p)", n)
}
if !short && n.Name != nil && n.Name.Vargen != 0 {
fmt.Fprintf(s, " g(%d)", n.Name.Vargen)
if !short && n.Name() != nil && n.Name().Vargen != 0 {
fmt.Fprintf(s, " g(%d)", n.Name().Vargen)
}
if base.Debug.DumpPtrs != 0 && !short && n.Name != nil && n.Name.Defn != nil {
if base.Debug.DumpPtrs != 0 && !short && n.Name() != nil && n.Name().Defn != nil {
// Useful to see where Defn is set and what node it points to
fmt.Fprintf(s, " defn(%p)", n.Name.Defn)
fmt.Fprintf(s, " defn(%p)", n.Name().Defn)
}
if n.Pos.IsKnown() {
if n.Pos().IsKnown() {
pfx := ""
switch n.Pos.IsStmt() {
switch n.Pos().IsStmt() {
case src.PosNotStmt:
pfx = "_" // "-" would be confusing
case src.PosIsStmt:
pfx = "+"
}
fmt.Fprintf(s, " l(%s%d)", pfx, n.Pos.Line())
fmt.Fprintf(s, " l(%s%d)", pfx, n.Pos().Line())
}
if !short && n.Xoffset != types.BADWIDTH {
fmt.Fprintf(s, " x(%d)", n.Xoffset)
if !short && n.Offset() != types.BADWIDTH {
fmt.Fprintf(s, " x(%d)", n.Offset())
}
if n.Class() != 0 {
@ -405,20 +405,20 @@ func jconvFmt(n *Node, s fmt.State, flag FmtFlag) {
fmt.Fprintf(s, " embedded")
}
if n.Op == ONAME {
if n.Name.Addrtaken() {
if n.Op() == ONAME {
if n.Name().Addrtaken() {
fmt.Fprint(s, " addrtaken")
}
if n.Name.Assigned() {
if n.Name().Assigned() {
fmt.Fprint(s, " assigned")
}
if n.Name.IsClosureVar() {
if n.Name().IsClosureVar() {
fmt.Fprint(s, " closurevar")
}
if n.Name.Captured() {
if n.Name().Captured() {
fmt.Fprint(s, " captured")
}
if n.Name.IsOutputParamHeapAddr() {
if n.Name().IsOutputParamHeapAddr() {
fmt.Fprint(s, " outputparamheapaddr")
}
}
@ -433,7 +433,7 @@ func jconvFmt(n *Node, s fmt.State, flag FmtFlag) {
fmt.Fprint(s, " hascall")
}
if !short && n.Name != nil && n.Name.Used() {
if !short && n.Name() != nil && n.Name().Used() {
fmt.Fprint(s, " used")
}
}
@ -899,31 +899,31 @@ func stmtFmt(n *Node, s fmt.State, mode FmtMode) {
// block starting with the init statements.
// if we can just say "for" n->ninit; ... then do so
simpleinit := n.Ninit.Len() == 1 && n.Ninit.First().Ninit.Len() == 0 && StmtWithInit(n.Op)
simpleinit := n.Init().Len() == 1 && n.Init().First().Init().Len() == 0 && StmtWithInit(n.Op())
// otherwise, print the inits as separate statements
complexinit := n.Ninit.Len() != 0 && !simpleinit && (mode != FErr)
complexinit := n.Init().Len() != 0 && !simpleinit && (mode != FErr)
// but if it was for if/for/switch, put in an extra surrounding block to limit the scope
extrablock := complexinit && StmtWithInit(n.Op)
extrablock := complexinit && StmtWithInit(n.Op())
if extrablock {
fmt.Fprint(s, "{")
}
if complexinit {
mode.Fprintf(s, " %v; ", n.Ninit)
mode.Fprintf(s, " %v; ", n.Init())
}
switch n.Op {
switch n.Op() {
case ODCL:
mode.Fprintf(s, "var %v %v", n.Left.Sym, n.Left.Type)
mode.Fprintf(s, "var %v %v", n.Left().Sym(), n.Left().Type())
case ODCLFIELD:
if n.Sym != nil {
mode.Fprintf(s, "%v %v", n.Sym, n.Left)
if n.Sym() != nil {
mode.Fprintf(s, "%v %v", n.Sym(), n.Left())
} else {
mode.Fprintf(s, "%v", n.Left)
mode.Fprintf(s, "%v", n.Left())
}
// Don't export "v = <N>" initializing statements, hope they're always
@ -931,61 +931,61 @@ func stmtFmt(n *Node, s fmt.State, mode FmtMode) {
// the "v = <N>" again.
case OAS:
if n.Colas() && !complexinit {
mode.Fprintf(s, "%v := %v", n.Left, n.Right)
mode.Fprintf(s, "%v := %v", n.Left(), n.Right())
} else {
mode.Fprintf(s, "%v = %v", n.Left, n.Right)
mode.Fprintf(s, "%v = %v", n.Left(), n.Right())
}
case OASOP:
if n.Implicit() {
if n.SubOp() == OADD {
mode.Fprintf(s, "%v++", n.Left)
mode.Fprintf(s, "%v++", n.Left())
} else {
mode.Fprintf(s, "%v--", n.Left)
mode.Fprintf(s, "%v--", n.Left())
}
break
}
mode.Fprintf(s, "%v %#v= %v", n.Left, n.SubOp(), n.Right)
mode.Fprintf(s, "%v %#v= %v", n.Left(), n.SubOp(), n.Right())
case OAS2:
if n.Colas() && !complexinit {
mode.Fprintf(s, "%.v := %.v", n.List, n.Rlist)
mode.Fprintf(s, "%.v := %.v", n.List(), n.Rlist())
break
}
fallthrough
case OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
mode.Fprintf(s, "%.v = %v", n.List, n.Right)
mode.Fprintf(s, "%.v = %v", n.List(), n.Right())
case ORETURN:
mode.Fprintf(s, "return %.v", n.List)
mode.Fprintf(s, "return %.v", n.List())
case ORETJMP:
mode.Fprintf(s, "retjmp %v", n.Sym)
mode.Fprintf(s, "retjmp %v", n.Sym())
case OINLMARK:
mode.Fprintf(s, "inlmark %d", n.Xoffset)
mode.Fprintf(s, "inlmark %d", n.Offset())
case OGO:
mode.Fprintf(s, "go %v", n.Left)
mode.Fprintf(s, "go %v", n.Left())
case ODEFER:
mode.Fprintf(s, "defer %v", n.Left)
mode.Fprintf(s, "defer %v", n.Left())
case OIF:
if simpleinit {
mode.Fprintf(s, "if %v; %v { %v }", n.Ninit.First(), n.Left, n.Nbody)
mode.Fprintf(s, "if %v; %v { %v }", n.Init().First(), n.Left(), n.Body())
} else {
mode.Fprintf(s, "if %v { %v }", n.Left, n.Nbody)
mode.Fprintf(s, "if %v { %v }", n.Left(), n.Body())
}
if n.Rlist.Len() != 0 {
mode.Fprintf(s, " else { %v }", n.Rlist)
if n.Rlist().Len() != 0 {
mode.Fprintf(s, " else { %v }", n.Rlist())
}
case OFOR, OFORUNTIL:
opname := "for"
if n.Op == OFORUNTIL {
if n.Op() == OFORUNTIL {
opname = "foruntil"
}
if mode == FErr { // TODO maybe only if FmtShort, same below
@ -995,26 +995,26 @@ func stmtFmt(n *Node, s fmt.State, mode FmtMode) {
fmt.Fprint(s, opname)
if simpleinit {
mode.Fprintf(s, " %v;", n.Ninit.First())
} else if n.Right != nil {
mode.Fprintf(s, " %v;", n.Init().First())
} else if n.Right() != nil {
fmt.Fprint(s, " ;")
}
if n.Left != nil {
mode.Fprintf(s, " %v", n.Left)
if n.Left() != nil {
mode.Fprintf(s, " %v", n.Left())
}
if n.Right != nil {
mode.Fprintf(s, "; %v", n.Right)
if n.Right() != nil {
mode.Fprintf(s, "; %v", n.Right())
} else if simpleinit {
fmt.Fprint(s, ";")
}
if n.Op == OFORUNTIL && n.List.Len() != 0 {
mode.Fprintf(s, "; %v", n.List)
if n.Op() == OFORUNTIL && n.List().Len() != 0 {
mode.Fprintf(s, "; %v", n.List())
}
mode.Fprintf(s, " { %v }", n.Nbody)
mode.Fprintf(s, " { %v }", n.Body())
case ORANGE:
if mode == FErr {
@ -1022,49 +1022,49 @@ func stmtFmt(n *Node, s fmt.State, mode FmtMode) {
break
}
if n.List.Len() == 0 {
mode.Fprintf(s, "for range %v { %v }", n.Right, n.Nbody)
if n.List().Len() == 0 {
mode.Fprintf(s, "for range %v { %v }", n.Right(), n.Body())
break
}
mode.Fprintf(s, "for %.v = range %v { %v }", n.List, n.Right, n.Nbody)
mode.Fprintf(s, "for %.v = range %v { %v }", n.List(), n.Right(), n.Body())
case OSELECT, OSWITCH:
if mode == FErr {
mode.Fprintf(s, "%v statement", n.Op)
mode.Fprintf(s, "%v statement", n.Op())
break
}
mode.Fprintf(s, "%#v", n.Op)
mode.Fprintf(s, "%#v", n.Op())
if simpleinit {
mode.Fprintf(s, " %v;", n.Ninit.First())
mode.Fprintf(s, " %v;", n.Init().First())
}
if n.Left != nil {
mode.Fprintf(s, " %v ", n.Left)
if n.Left() != nil {
mode.Fprintf(s, " %v ", n.Left())
}
mode.Fprintf(s, " { %v }", n.List)
mode.Fprintf(s, " { %v }", n.List())
case OCASE:
if n.List.Len() != 0 {
mode.Fprintf(s, "case %.v", n.List)
if n.List().Len() != 0 {
mode.Fprintf(s, "case %.v", n.List())
} else {
fmt.Fprint(s, "default")
}
mode.Fprintf(s, ": %v", n.Nbody)
mode.Fprintf(s, ": %v", n.Body())
case OBREAK, OCONTINUE, OGOTO, OFALL:
if n.Sym != nil {
mode.Fprintf(s, "%#v %v", n.Op, n.Sym)
if n.Sym() != nil {
mode.Fprintf(s, "%#v %v", n.Op(), n.Sym())
} else {
mode.Fprintf(s, "%#v", n.Op)
mode.Fprintf(s, "%#v", n.Op())
}
case OEMPTY:
break
case OLABEL:
mode.Fprintf(s, "%v: ", n.Sym)
mode.Fprintf(s, "%v: ", n.Sym())
}
if extrablock {
@ -1193,8 +1193,8 @@ var OpPrec = []int{
}
func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
for n != nil && n.Implicit() && (n.Op == ODEREF || n.Op == OADDR) {
n = n.Left
for n != nil && n.Implicit() && (n.Op() == ODEREF || n.Op() == OADDR) {
n = n.Left()
}
if n == nil {
@ -1202,8 +1202,8 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
return
}
nprec := OpPrec[n.Op]
if n.Op == OTYPE && n.Sym != nil {
nprec := OpPrec[n.Op()]
if n.Op() == OTYPE && n.Sym() != nil {
nprec = 8
}
@ -1212,38 +1212,38 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
return
}
switch n.Op {
switch n.Op() {
case OPAREN:
mode.Fprintf(s, "(%v)", n.Left)
mode.Fprintf(s, "(%v)", n.Left())
case ONIL:
fmt.Fprint(s, "nil")
case OLITERAL: // this is a bit of a mess
if mode == FErr {
if n.Orig != nil && n.Orig != n {
exprFmt(n.Orig, s, prec, mode)
if n.Orig() != nil && n.Orig() != n {
exprFmt(n.Orig(), s, prec, mode)
return
}
if n.Sym != nil {
fmt.Fprint(s, smodeString(n.Sym, mode))
if n.Sym() != nil {
fmt.Fprint(s, smodeString(n.Sym(), mode))
return
}
}
needUnparen := false
if n.Type != nil && !n.Type.IsUntyped() {
if n.Type() != nil && !n.Type().IsUntyped() {
// Need parens when type begins with what might
// be misinterpreted as a unary operator: * or <-.
if n.Type.IsPtr() || (n.Type.IsChan() && n.Type.ChanDir() == types.Crecv) {
mode.Fprintf(s, "(%v)(", n.Type)
if n.Type().IsPtr() || (n.Type().IsChan() && n.Type().ChanDir() == types.Crecv) {
mode.Fprintf(s, "(%v)(", n.Type())
} else {
mode.Fprintf(s, "%v(", n.Type)
mode.Fprintf(s, "%v(", n.Type())
}
needUnparen = true
}
if n.Type == types.UntypedRune {
if n.Type() == types.UntypedRune {
switch x, ok := constant.Int64Val(n.Val()); {
case !ok:
fallthrough
@ -1270,44 +1270,44 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
case ONAME:
// Special case: name used as local variable in export.
// _ becomes ~b%d internally; print as _ for export
if mode == FErr && n.Sym != nil && n.Sym.Name[0] == '~' && n.Sym.Name[1] == 'b' {
if mode == FErr && n.Sym() != nil && n.Sym().Name[0] == '~' && n.Sym().Name[1] == 'b' {
fmt.Fprint(s, "_")
return
}
fallthrough
case OPACK, ONONAME, OMETHEXPR:
fmt.Fprint(s, smodeString(n.Sym, mode))
fmt.Fprint(s, smodeString(n.Sym(), mode))
case OTYPE:
if n.Type == nil && n.Sym != nil {
fmt.Fprint(s, smodeString(n.Sym, mode))
if n.Type() == nil && n.Sym() != nil {
fmt.Fprint(s, smodeString(n.Sym(), mode))
return
}
mode.Fprintf(s, "%v", n.Type)
mode.Fprintf(s, "%v", n.Type())
case OTARRAY:
if n.Left != nil {
mode.Fprintf(s, "[%v]%v", n.Left, n.Right)
if n.Left() != nil {
mode.Fprintf(s, "[%v]%v", n.Left(), n.Right())
return
}
mode.Fprintf(s, "[]%v", n.Right) // happens before typecheck
mode.Fprintf(s, "[]%v", n.Right()) // happens before typecheck
case OTMAP:
mode.Fprintf(s, "map[%v]%v", n.Left, n.Right)
mode.Fprintf(s, "map[%v]%v", n.Left(), n.Right())
case OTCHAN:
switch n.TChanDir() {
case types.Crecv:
mode.Fprintf(s, "<-chan %v", n.Left)
mode.Fprintf(s, "<-chan %v", n.Left())
case types.Csend:
mode.Fprintf(s, "chan<- %v", n.Left)
mode.Fprintf(s, "chan<- %v", n.Left())
default:
if n.Left != nil && n.Left.Op == OTCHAN && n.Left.Sym == nil && n.Left.TChanDir() == types.Crecv {
mode.Fprintf(s, "chan (%v)", n.Left)
if n.Left() != nil && n.Left().Op() == OTCHAN && n.Left().Sym() == nil && n.Left().TChanDir() == types.Crecv {
mode.Fprintf(s, "chan (%v)", n.Left())
} else {
mode.Fprintf(s, "chan %v", n.Left)
mode.Fprintf(s, "chan %v", n.Left())
}
}
@ -1325,11 +1325,11 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
fmt.Fprint(s, "func literal")
return
}
if n.Nbody.Len() != 0 {
mode.Fprintf(s, "%v { %v }", n.Type, n.Nbody)
if n.Body().Len() != 0 {
mode.Fprintf(s, "%v { %v }", n.Type(), n.Body())
return
}
mode.Fprintf(s, "%v { %v }", n.Type, n.Func.Decl.Nbody)
mode.Fprintf(s, "%v { %v }", n.Type(), n.Func().Decl.Body())
case OCOMPLIT:
if mode == FErr {
@ -1337,75 +1337,75 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
mode.Fprintf(s, "... argument")
return
}
if n.Right != nil {
mode.Fprintf(s, "%v{%s}", n.Right, ellipsisIf(n.List.Len() != 0))
if n.Right() != nil {
mode.Fprintf(s, "%v{%s}", n.Right(), ellipsisIf(n.List().Len() != 0))
return
}
fmt.Fprint(s, "composite literal")
return
}
mode.Fprintf(s, "(%v{ %.v })", n.Right, n.List)
mode.Fprintf(s, "(%v{ %.v })", n.Right(), n.List())
case OPTRLIT:
mode.Fprintf(s, "&%v", n.Left)
mode.Fprintf(s, "&%v", n.Left())
case OSTRUCTLIT, OARRAYLIT, OSLICELIT, OMAPLIT:
if mode == FErr {
mode.Fprintf(s, "%v{%s}", n.Type, ellipsisIf(n.List.Len() != 0))
mode.Fprintf(s, "%v{%s}", n.Type(), ellipsisIf(n.List().Len() != 0))
return
}
mode.Fprintf(s, "(%v{ %.v })", n.Type, n.List)
mode.Fprintf(s, "(%v{ %.v })", n.Type(), n.List())
case OKEY:
if n.Left != nil && n.Right != nil {
mode.Fprintf(s, "%v:%v", n.Left, n.Right)
if n.Left() != nil && n.Right() != nil {
mode.Fprintf(s, "%v:%v", n.Left(), n.Right())
return
}
if n.Left == nil && n.Right != nil {
mode.Fprintf(s, ":%v", n.Right)
if n.Left() == nil && n.Right() != nil {
mode.Fprintf(s, ":%v", n.Right())
return
}
if n.Left != nil && n.Right == nil {
mode.Fprintf(s, "%v:", n.Left)
if n.Left() != nil && n.Right() == nil {
mode.Fprintf(s, "%v:", n.Left())
return
}
fmt.Fprint(s, ":")
case OSTRUCTKEY:
mode.Fprintf(s, "%v:%v", n.Sym, n.Left)
mode.Fprintf(s, "%v:%v", n.Sym(), n.Left())
case OCALLPART:
exprFmt(n.Left, s, nprec, mode)
if n.Right == nil || n.Right.Sym == nil {
exprFmt(n.Left(), s, nprec, mode)
if n.Right() == nil || n.Right().Sym() == nil {
fmt.Fprint(s, ".<nil>")
return
}
mode.Fprintf(s, ".%0S", n.Right.Sym)
mode.Fprintf(s, ".%0S", n.Right().Sym())
case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
exprFmt(n.Left, s, nprec, mode)
if n.Sym == nil {
exprFmt(n.Left(), s, nprec, mode)
if n.Sym() == nil {
fmt.Fprint(s, ".<nil>")
return
}
mode.Fprintf(s, ".%0S", n.Sym)
mode.Fprintf(s, ".%0S", n.Sym())
case ODOTTYPE, ODOTTYPE2:
exprFmt(n.Left, s, nprec, mode)
if n.Right != nil {
mode.Fprintf(s, ".(%v)", n.Right)
exprFmt(n.Left(), s, nprec, mode)
if n.Right() != nil {
mode.Fprintf(s, ".(%v)", n.Right())
return
}
mode.Fprintf(s, ".(%v)", n.Type)
mode.Fprintf(s, ".(%v)", n.Type())
case OINDEX, OINDEXMAP:
exprFmt(n.Left, s, nprec, mode)
mode.Fprintf(s, "[%v]", n.Right)
exprFmt(n.Left(), s, nprec, mode)
mode.Fprintf(s, "[%v]", n.Right())
case OSLICE, OSLICESTR, OSLICEARR, OSLICE3, OSLICE3ARR:
exprFmt(n.Left, s, nprec, mode)
exprFmt(n.Left(), s, nprec, mode)
fmt.Fprint(s, "[")
low, high, max := n.SliceBounds()
if low != nil {
@ -1415,7 +1415,7 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
if high != nil {
fmt.Fprint(s, modeString(high, mode))
}
if n.Op.IsSlice3() {
if n.Op().IsSlice3() {
fmt.Fprint(s, ":")
if max != nil {
fmt.Fprint(s, modeString(max, mode))
@ -1424,16 +1424,16 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
fmt.Fprint(s, "]")
case OSLICEHEADER:
if n.List.Len() != 2 {
base.Fatalf("bad OSLICEHEADER list length %d", n.List.Len())
if n.List().Len() != 2 {
base.Fatalf("bad OSLICEHEADER list length %d", n.List().Len())
}
mode.Fprintf(s, "sliceheader{%v,%v,%v}", n.Left, n.List.First(), n.List.Second())
mode.Fprintf(s, "sliceheader{%v,%v,%v}", n.Left(), n.List().First(), n.List().Second())
case OCOMPLEX, OCOPY:
if n.Left != nil {
mode.Fprintf(s, "%#v(%v, %v)", n.Op, n.Left, n.Right)
if n.Left() != nil {
mode.Fprintf(s, "%#v(%v, %v)", n.Op(), n.Left(), n.Right())
} else {
mode.Fprintf(s, "%#v(%.v)", n.Op, n.List)
mode.Fprintf(s, "%#v(%.v)", n.Op(), n.List())
}
case OCONV,
@ -1444,15 +1444,15 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
OSTR2BYTES,
OSTR2RUNES,
ORUNESTR:
if n.Type == nil || n.Type.Sym == nil {
mode.Fprintf(s, "(%v)", n.Type)
if n.Type() == nil || n.Type().Sym == nil {
mode.Fprintf(s, "(%v)", n.Type())
} else {
mode.Fprintf(s, "%v", n.Type)
mode.Fprintf(s, "%v", n.Type())
}
if n.Left != nil {
mode.Fprintf(s, "(%v)", n.Left)
if n.Left() != nil {
mode.Fprintf(s, "(%v)", n.Left())
} else {
mode.Fprintf(s, "(%.v)", n.List)
mode.Fprintf(s, "(%.v)", n.List())
}
case OREAL,
@ -1471,49 +1471,49 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
OSIZEOF,
OPRINT,
OPRINTN:
if n.Left != nil {
mode.Fprintf(s, "%#v(%v)", n.Op, n.Left)
if n.Left() != nil {
mode.Fprintf(s, "%#v(%v)", n.Op(), n.Left())
return
}
if n.IsDDD() {
mode.Fprintf(s, "%#v(%.v...)", n.Op, n.List)
mode.Fprintf(s, "%#v(%.v...)", n.Op(), n.List())
return
}
mode.Fprintf(s, "%#v(%.v)", n.Op, n.List)
mode.Fprintf(s, "%#v(%.v)", n.Op(), n.List())
case OCALL, OCALLFUNC, OCALLINTER, OCALLMETH, OGETG:
exprFmt(n.Left, s, nprec, mode)
exprFmt(n.Left(), s, nprec, mode)
if n.IsDDD() {
mode.Fprintf(s, "(%.v...)", n.List)
mode.Fprintf(s, "(%.v...)", n.List())
return
}
mode.Fprintf(s, "(%.v)", n.List)
mode.Fprintf(s, "(%.v)", n.List())
case OMAKEMAP, OMAKECHAN, OMAKESLICE:
if n.List.Len() != 0 { // pre-typecheck
mode.Fprintf(s, "make(%v, %.v)", n.Type, n.List)
if n.List().Len() != 0 { // pre-typecheck
mode.Fprintf(s, "make(%v, %.v)", n.Type(), n.List())
return
}
if n.Right != nil {
mode.Fprintf(s, "make(%v, %v, %v)", n.Type, n.Left, n.Right)
if n.Right() != nil {
mode.Fprintf(s, "make(%v, %v, %v)", n.Type(), n.Left(), n.Right())
return
}
if n.Left != nil && (n.Op == OMAKESLICE || !n.Left.Type.IsUntyped()) {
mode.Fprintf(s, "make(%v, %v)", n.Type, n.Left)
if n.Left() != nil && (n.Op() == OMAKESLICE || !n.Left().Type().IsUntyped()) {
mode.Fprintf(s, "make(%v, %v)", n.Type(), n.Left())
return
}
mode.Fprintf(s, "make(%v)", n.Type)
mode.Fprintf(s, "make(%v)", n.Type())
case OMAKESLICECOPY:
mode.Fprintf(s, "makeslicecopy(%v, %v, %v)", n.Type, n.Left, n.Right)
mode.Fprintf(s, "makeslicecopy(%v, %v, %v)", n.Type(), n.Left(), n.Right())
case OPLUS, ONEG, OADDR, OBITNOT, ODEREF, ONOT, ORECV:
// Unary
mode.Fprintf(s, "%#v", n.Op)
if n.Left != nil && n.Left.Op == n.Op {
mode.Fprintf(s, "%#v", n.Op())
if n.Left() != nil && n.Left().Op() == n.Op() {
fmt.Fprint(s, " ")
}
exprFmt(n.Left, s, nprec+1, mode)
exprFmt(n.Left(), s, nprec+1, mode)
// Binary
case OADD,
@ -1536,12 +1536,12 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
OSEND,
OSUB,
OXOR:
exprFmt(n.Left, s, nprec, mode)
mode.Fprintf(s, " %#v ", n.Op)
exprFmt(n.Right, s, nprec+1, mode)
exprFmt(n.Left(), s, nprec, mode)
mode.Fprintf(s, " %#v ", n.Op())
exprFmt(n.Right(), s, nprec+1, mode)
case OADDSTR:
for i, n1 := range n.List.Slice() {
for i, n1 := range n.List().Slice() {
if i != 0 {
fmt.Fprint(s, " + ")
}
@ -1550,23 +1550,23 @@ func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
case ODDD:
mode.Fprintf(s, "...")
default:
mode.Fprintf(s, "<node %v>", n.Op)
mode.Fprintf(s, "<node %v>", n.Op())
}
}
func nodeFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) {
t := n.Type
t := n.Type()
// We almost always want the original.
// TODO(gri) Why the special case for OLITERAL?
if n.Op != OLITERAL && n.Orig != nil {
n = n.Orig
if n.Op() != OLITERAL && n.Orig() != nil {
n = n.Orig()
}
if flag&FmtLong != 0 && t != nil {
if t.Etype == types.TNIL {
fmt.Fprint(s, "nil")
} else if n.Op == ONAME && n.Name.AutoTemp() {
} else if n.Op() == ONAME && n.Name().AutoTemp() {
mode.Fprintf(s, "%v value", t)
} else {
mode.Fprintf(s, "%v (type %v)", n, t)
@ -1576,7 +1576,7 @@ func nodeFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) {
// TODO inlining produces expressions with ninits. we can't print these yet.
if OpPrec[n.Op] < 0 {
if OpPrec[n.Op()] < 0 {
stmtFmt(n, s, mode)
return
}
@ -1594,82 +1594,82 @@ func nodeDumpFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) {
return
}
if n.Ninit.Len() != 0 {
mode.Fprintf(s, "%v-init%v", n.Op, n.Ninit)
if n.Init().Len() != 0 {
mode.Fprintf(s, "%v-init%v", n.Op(), n.Init())
indent(s)
}
}
switch n.Op {
switch n.Op() {
default:
mode.Fprintf(s, "%v%j", n.Op, n)
mode.Fprintf(s, "%v%j", n.Op(), n)
case OLITERAL:
mode.Fprintf(s, "%v-%v%j", n.Op, n.Val(), n)
mode.Fprintf(s, "%v-%v%j", n.Op(), n.Val(), n)
case ONAME, ONONAME, OMETHEXPR:
if n.Sym != nil {
mode.Fprintf(s, "%v-%v%j", n.Op, n.Sym, n)
if n.Sym() != nil {
mode.Fprintf(s, "%v-%v%j", n.Op(), n.Sym(), n)
} else {
mode.Fprintf(s, "%v%j", n.Op, n)
mode.Fprintf(s, "%v%j", n.Op(), n)
}
if recur && n.Type == nil && n.Name != nil && n.Name.Param != nil && n.Name.Param.Ntype != nil {
if recur && n.Type() == nil && n.Name() != nil && n.Name().Param != nil && n.Name().Param.Ntype != nil {
indent(s)
mode.Fprintf(s, "%v-ntype%v", n.Op, n.Name.Param.Ntype)
mode.Fprintf(s, "%v-ntype%v", n.Op(), n.Name().Param.Ntype)
}
case OASOP:
mode.Fprintf(s, "%v-%v%j", n.Op, n.SubOp(), n)
mode.Fprintf(s, "%v-%v%j", n.Op(), n.SubOp(), n)
case OTYPE:
mode.Fprintf(s, "%v %v%j type=%v", n.Op, n.Sym, n, n.Type)
if recur && n.Type == nil && n.Name != nil && n.Name.Param != nil && n.Name.Param.Ntype != nil {
mode.Fprintf(s, "%v %v%j type=%v", n.Op(), n.Sym(), n, n.Type())
if recur && n.Type() == nil && n.Name() != nil && n.Name().Param != nil && n.Name().Param.Ntype != nil {
indent(s)
mode.Fprintf(s, "%v-ntype%v", n.Op, n.Name.Param.Ntype)
mode.Fprintf(s, "%v-ntype%v", n.Op(), n.Name().Param.Ntype)
}
}
if n.Op == OCLOSURE && n.Func.Decl != nil && n.Func.Nname.Sym != nil {
mode.Fprintf(s, " fnName %v", n.Func.Nname.Sym)
if n.Op() == OCLOSURE && n.Func().Decl != nil && n.Func().Nname.Sym() != nil {
mode.Fprintf(s, " fnName %v", n.Func().Nname.Sym())
}
if n.Sym != nil && n.Op != ONAME {
mode.Fprintf(s, " %v", n.Sym)
if n.Sym() != nil && n.Op() != ONAME {
mode.Fprintf(s, " %v", n.Sym())
}
if n.Type != nil {
mode.Fprintf(s, " %v", n.Type)
if n.Type() != nil {
mode.Fprintf(s, " %v", n.Type())
}
if recur {
if n.Left != nil {
mode.Fprintf(s, "%v", n.Left)
if n.Left() != nil {
mode.Fprintf(s, "%v", n.Left())
}
if n.Right != nil {
mode.Fprintf(s, "%v", n.Right)
if n.Right() != nil {
mode.Fprintf(s, "%v", n.Right())
}
if n.Op == OCLOSURE && n.Func != nil && n.Func.Decl != nil && n.Func.Decl.Nbody.Len() != 0 {
if n.Op() == OCLOSURE && n.Func() != nil && n.Func().Decl != nil && n.Func().Decl.Body().Len() != 0 {
indent(s)
// The function associated with a closure
mode.Fprintf(s, "%v-clofunc%v", n.Op, n.Func.Decl)
mode.Fprintf(s, "%v-clofunc%v", n.Op(), n.Func().Decl)
}
if n.Op == ODCLFUNC && n.Func != nil && n.Func.Dcl != nil && len(n.Func.Dcl) != 0 {
if n.Op() == ODCLFUNC && n.Func() != nil && n.Func().Dcl != nil && len(n.Func().Dcl) != 0 {
indent(s)
// The dcls for a func or closure
mode.Fprintf(s, "%v-dcl%v", n.Op, AsNodes(n.Func.Dcl))
mode.Fprintf(s, "%v-dcl%v", n.Op(), AsNodes(n.Func().Dcl))
}
if n.List.Len() != 0 {
if n.List().Len() != 0 {
indent(s)
mode.Fprintf(s, "%v-list%v", n.Op, n.List)
mode.Fprintf(s, "%v-list%v", n.Op(), n.List())
}
if n.Rlist.Len() != 0 {
if n.Rlist().Len() != 0 {
indent(s)
mode.Fprintf(s, "%v-rlist%v", n.Op, n.Rlist)
mode.Fprintf(s, "%v-rlist%v", n.Op(), n.Rlist())
}
if n.Nbody.Len() != 0 {
if n.Body().Len() != 0 {
indent(s)
mode.Fprintf(s, "%v-body%v", n.Op, n.Nbody)
mode.Fprintf(s, "%v-body%v", n.Op(), n.Body())
}
}
}
@ -1910,5 +1910,5 @@ func InstallTypeFormats() {
// Line returns n's position as a string. If n has been inlined,
// it uses the outermost position where n has been inlined.
func Line(n *Node) string {
return base.FmtPos(n.Pos)
return base.FmtPos(n.Pos())
}

View file

@ -26,25 +26,25 @@ import (
type Node struct {
// Tree structure.
// Generic recursive walks should follow these fields.
Left *Node
Right *Node
Ninit Nodes
Nbody Nodes
List Nodes
Rlist Nodes
left *Node
right *Node
init Nodes
body Nodes
list Nodes
rlist Nodes
// most nodes
Type *types.Type
Orig *Node // original form, for printing, and tracking copies of ONAMEs
typ *types.Type
orig *Node // original form, for printing, and tracking copies of ONAMEs
// func
Func *Func
fn *Func
// ONAME, OTYPE, OPACK, OLABEL, some OLITERAL
Name *Name
name *Name
Sym *types.Sym // various
E interface{} // Opt or Val, see methods below
sym *types.Sym // various
e interface{} // Opt or Val, see methods below
// Various. Usually an offset into a struct. For example:
// - ONAME nodes that refer to local variables use it to identify their stack frame position.
@ -54,85 +54,85 @@ type Node struct {
// - OINLMARK stores an index into the inlTree data structure.
// - OCLOSURE uses it to store ambient iota value, if any.
// Possibly still more uses. If you find any, document them.
Xoffset int64
offset int64
Pos src.XPos
pos src.XPos
flags bitset32
Esc uint16 // EscXXX
esc uint16 // EscXXX
Op Op
op Op
aux uint8
}
func (n *Node) GetLeft() *Node { return n.Left }
func (n *Node) SetLeft(x *Node) { n.Left = x }
func (n *Node) GetRight() *Node { return n.Right }
func (n *Node) SetRight(x *Node) { n.Right = x }
func (n *Node) GetOrig() *Node { return n.Orig }
func (n *Node) SetOrig(x *Node) { n.Orig = x }
func (n *Node) GetType() *types.Type { return n.Type }
func (n *Node) SetType(x *types.Type) { n.Type = x }
func (n *Node) GetFunc() *Func { return n.Func }
func (n *Node) SetFunc(x *Func) { n.Func = x }
func (n *Node) GetName() *Name { return n.Name }
func (n *Node) SetName(x *Name) { n.Name = x }
func (n *Node) GetSym() *types.Sym { return n.Sym }
func (n *Node) SetSym(x *types.Sym) { n.Sym = x }
func (n *Node) GetPos() src.XPos { return n.Pos }
func (n *Node) SetPos(x src.XPos) { n.Pos = x }
func (n *Node) GetXoffset() int64 { return n.Xoffset }
func (n *Node) SetXoffset(x int64) { n.Xoffset = x }
func (n *Node) GetEsc() uint16 { return n.Esc }
func (n *Node) SetEsc(x uint16) { n.Esc = x }
func (n *Node) GetOp() Op { return n.Op }
func (n *Node) SetOp(x Op) { n.Op = x }
func (n *Node) GetNinit() Nodes { return n.Ninit }
func (n *Node) SetNinit(x Nodes) { n.Ninit = x }
func (n *Node) PtrNinit() *Nodes { return &n.Ninit }
func (n *Node) GetNbody() Nodes { return n.Nbody }
func (n *Node) SetNbody(x Nodes) { n.Nbody = x }
func (n *Node) PtrNbody() *Nodes { return &n.Nbody }
func (n *Node) GetList() Nodes { return n.List }
func (n *Node) SetList(x Nodes) { n.List = x }
func (n *Node) PtrList() *Nodes { return &n.List }
func (n *Node) GetRlist() Nodes { return n.Rlist }
func (n *Node) SetRlist(x Nodes) { n.Rlist = x }
func (n *Node) PtrRlist() *Nodes { return &n.Rlist }
func (n *Node) Left() *Node { return n.left }
func (n *Node) SetLeft(x *Node) { n.left = x }
func (n *Node) Right() *Node { return n.right }
func (n *Node) SetRight(x *Node) { n.right = x }
func (n *Node) Orig() *Node { return n.orig }
func (n *Node) SetOrig(x *Node) { n.orig = x }
func (n *Node) Type() *types.Type { return n.typ }
func (n *Node) SetType(x *types.Type) { n.typ = x }
func (n *Node) Func() *Func { return n.fn }
func (n *Node) SetFunc(x *Func) { n.fn = x }
func (n *Node) Name() *Name { return n.name }
func (n *Node) SetName(x *Name) { n.name = x }
func (n *Node) Sym() *types.Sym { return n.sym }
func (n *Node) SetSym(x *types.Sym) { n.sym = x }
func (n *Node) Pos() src.XPos { return n.pos }
func (n *Node) SetPos(x src.XPos) { n.pos = x }
func (n *Node) Offset() int64 { return n.offset }
func (n *Node) SetOffset(x int64) { n.offset = x }
func (n *Node) Esc() uint16 { return n.esc }
func (n *Node) SetEsc(x uint16) { n.esc = x }
func (n *Node) Op() Op { return n.op }
func (n *Node) SetOp(x Op) { n.op = x }
func (n *Node) Init() Nodes { return n.init }
func (n *Node) SetInit(x Nodes) { n.init = x }
func (n *Node) PtrInit() *Nodes { return &n.init }
func (n *Node) Body() Nodes { return n.body }
func (n *Node) SetBody(x Nodes) { n.body = x }
func (n *Node) PtrBody() *Nodes { return &n.body }
func (n *Node) List() Nodes { return n.list }
func (n *Node) SetList(x Nodes) { n.list = x }
func (n *Node) PtrList() *Nodes { return &n.list }
func (n *Node) Rlist() Nodes { return n.rlist }
func (n *Node) SetRlist(x Nodes) { n.rlist = x }
func (n *Node) PtrRlist() *Nodes { return &n.rlist }
func (n *Node) ResetAux() {
n.aux = 0
}
func (n *Node) SubOp() Op {
switch n.Op {
switch n.Op() {
case OASOP, ONAME:
default:
base.Fatalf("unexpected op: %v", n.Op)
base.Fatalf("unexpected op: %v", n.Op())
}
return Op(n.aux)
}
func (n *Node) SetSubOp(op Op) {
switch n.Op {
switch n.Op() {
case OASOP, ONAME:
default:
base.Fatalf("unexpected op: %v", n.Op)
base.Fatalf("unexpected op: %v", n.Op())
}
n.aux = uint8(op)
}
func (n *Node) IndexMapLValue() bool {
if n.Op != OINDEXMAP {
base.Fatalf("unexpected op: %v", n.Op)
if n.Op() != OINDEXMAP {
base.Fatalf("unexpected op: %v", n.Op())
}
return n.aux != 0
}
func (n *Node) SetIndexMapLValue(b bool) {
if n.Op != OINDEXMAP {
base.Fatalf("unexpected op: %v", n.Op)
if n.Op() != OINDEXMAP {
base.Fatalf("unexpected op: %v", n.Op())
}
if b {
n.aux = 1
@ -142,31 +142,31 @@ func (n *Node) SetIndexMapLValue(b bool) {
}
func (n *Node) TChanDir() types.ChanDir {
if n.Op != OTCHAN {
base.Fatalf("unexpected op: %v", n.Op)
if n.Op() != OTCHAN {
base.Fatalf("unexpected op: %v", n.Op())
}
return types.ChanDir(n.aux)
}
func (n *Node) SetTChanDir(dir types.ChanDir) {
if n.Op != OTCHAN {
base.Fatalf("unexpected op: %v", n.Op)
if n.Op() != OTCHAN {
base.Fatalf("unexpected op: %v", n.Op())
}
n.aux = uint8(dir)
}
func IsSynthetic(n *Node) bool {
name := n.Sym.Name
name := n.Sym().Name
return name[0] == '.' || name[0] == '~'
}
// IsAutoTmp indicates if n was created by the compiler as a temporary,
// based on the setting of the .AutoTemp flag in n's Name.
func IsAutoTmp(n *Node) bool {
if n == nil || n.Op != ONAME {
if n == nil || n.Op() != ONAME {
return false
}
return n.Name.AutoTemp()
return n.Name().AutoTemp()
}
const (
@ -229,8 +229,8 @@ func (n *Node) SetColas(b bool) { n.flags.set(nodeColas, b) }
func (n *Node) SetTransient(b bool) { n.flags.set(nodeTransient, b) }
func (n *Node) SetHasCall(b bool) { n.flags.set(nodeHasCall, b) }
func (n *Node) SetLikely(b bool) { n.flags.set(nodeLikely, b) }
func (n *Node) SetHasVal(b bool) { n.flags.set(nodeHasVal, b) }
func (n *Node) SetHasOpt(b bool) { n.flags.set(nodeHasOpt, b) }
func (n *Node) setHasVal(b bool) { n.flags.set(nodeHasVal, b) }
func (n *Node) setHasOpt(b bool) { n.flags.set(nodeHasOpt, b) }
func (n *Node) SetEmbedded(b bool) { n.flags.set(nodeEmbedded, b) }
// MarkNonNil marks a pointer n as being guaranteed non-nil,
@ -238,8 +238,8 @@ func (n *Node) SetEmbedded(b bool) { n.flags.set(nodeEmbedded, b) }
// During conversion to SSA, non-nil pointers won't have nil checks
// inserted before dereferencing. See state.exprPtr.
func (n *Node) MarkNonNil() {
if !n.Type.IsPtr() && !n.Type.IsUnsafePtr() {
base.Fatalf("MarkNonNil(%v), type %v", n, n.Type)
if !n.Type().IsPtr() && !n.Type().IsUnsafePtr() {
base.Fatalf("MarkNonNil(%v), type %v", n, n.Type())
}
n.flags.set(nodeNonNil, true)
}
@ -249,7 +249,7 @@ func (n *Node) MarkNonNil() {
// When n is a dereferencing operation, n does not need nil checks.
// When n is a makeslice+copy operation, n does not need length and cap checks.
func (n *Node) SetBounded(b bool) {
switch n.Op {
switch n.Op() {
case OINDEX, OSLICE, OSLICEARR, OSLICE3, OSLICE3ARR, OSLICESTR:
// No bounds checks needed.
case ODOTPTR, ODEREF:
@ -265,14 +265,14 @@ func (n *Node) SetBounded(b bool) {
// MarkReadonly indicates that n is an ONAME with readonly contents.
func (n *Node) MarkReadonly() {
if n.Op != ONAME {
base.Fatalf("Node.MarkReadonly %v", n.Op)
if n.Op() != ONAME {
base.Fatalf("Node.MarkReadonly %v", n.Op())
}
n.Name.SetReadonly(true)
n.Name().SetReadonly(true)
// Mark the linksym as readonly immediately
// so that the SSA backend can use this information.
// It will be overridden later during dumpglobls.
n.Sym.Linksym().Type = objabi.SRODATA
n.Sym().Linksym().Type = objabi.SRODATA
}
// Val returns the constant.Value for the node.
@ -280,7 +280,7 @@ func (n *Node) Val() constant.Value {
if !n.HasVal() {
return constant.MakeUnknown()
}
return *n.E.(*constant.Value)
return *n.e.(*constant.Value)
}
// SetVal sets the constant.Value for the node,
@ -291,11 +291,11 @@ func (n *Node) SetVal(v constant.Value) {
Dump("have Opt", n)
base.Fatalf("have Opt")
}
if n.Op == OLITERAL {
AssertValidTypeForConst(n.Type, v)
if n.Op() == OLITERAL {
AssertValidTypeForConst(n.Type(), v)
}
n.SetHasVal(true)
n.E = &v
n.setHasVal(true)
n.e = &v
}
// Opt returns the optimizer data for the node.
@ -303,7 +303,7 @@ func (n *Node) Opt() interface{} {
if !n.HasOpt() {
return nil
}
return n.E
return n.e
}
// SetOpt sets the optimizer data for the node, which must not have been used with SetVal.
@ -311,8 +311,8 @@ func (n *Node) Opt() interface{} {
func (n *Node) SetOpt(x interface{}) {
if x == nil {
if n.HasOpt() {
n.SetHasOpt(false)
n.E = nil
n.setHasOpt(false)
n.e = nil
}
return
}
@ -321,22 +321,22 @@ func (n *Node) SetOpt(x interface{}) {
Dump("have Val", n)
base.Fatalf("have Val")
}
n.SetHasOpt(true)
n.E = x
n.setHasOpt(true)
n.e = x
}
func (n *Node) Iota() int64 {
return n.Xoffset
return n.Offset()
}
func (n *Node) SetIota(x int64) {
n.Xoffset = x
n.SetOffset(x)
}
// mayBeShared reports whether n may occur in multiple places in the AST.
// Extra care must be taken when mutating such a node.
func MayBeShared(n *Node) bool {
switch n.Op {
switch n.Op() {
case ONAME, OLITERAL, ONIL, OTYPE:
return true
}
@ -345,10 +345,10 @@ func MayBeShared(n *Node) bool {
// funcname returns the name (without the package) of the function n.
func FuncName(n *Node) string {
if n == nil || n.Func == nil || n.Func.Nname == nil {
if n == nil || n.Func() == nil || n.Func().Nname == nil {
return "<nil>"
}
return n.Func.Nname.Sym.Name
return n.Func().Nname.Sym().Name
}
// pkgFuncName returns the name of the function referenced by n, with package prepended.
@ -360,13 +360,13 @@ func PkgFuncName(n *Node) string {
if n == nil {
return "<nil>"
}
if n.Op == ONAME {
s = n.Sym
if n.Op() == ONAME {
s = n.Sym()
} else {
if n.Func == nil || n.Func.Nname == nil {
if n.Func() == nil || n.Func().Nname == nil {
return "<nil>"
}
s = n.Func.Nname.Sym
s = n.Func().Nname.Sym()
}
pkg := s.Pkg
@ -1142,12 +1142,12 @@ func Inspect(n *Node, f func(*Node) bool) {
if n == nil || !f(n) {
return
}
InspectList(n.Ninit, f)
Inspect(n.Left, f)
Inspect(n.Right, f)
InspectList(n.List, f)
InspectList(n.Nbody, f)
InspectList(n.Rlist, f)
InspectList(n.Init(), f)
Inspect(n.Left(), f)
Inspect(n.Right(), f)
InspectList(n.List(), f)
InspectList(n.Body(), f)
InspectList(n.Rlist(), f)
}
func InspectList(l Nodes, f func(*Node) bool) {
@ -1242,8 +1242,8 @@ func NodAt(pos src.XPos, op Op, nleft, nright *Node) *Node {
f Func
}
n = &x.n
n.Func = &x.f
n.Func.Decl = n
n.SetFunc(&x.f)
n.Func().Decl = n
case ONAME:
base.Fatalf("use newname instead")
case OLABEL, OPACK:
@ -1252,16 +1252,16 @@ func NodAt(pos src.XPos, op Op, nleft, nright *Node) *Node {
m Name
}
n = &x.n
n.Name = &x.m
n.SetName(&x.m)
default:
n = new(Node)
}
n.Op = op
n.Left = nleft
n.Right = nright
n.Pos = pos
n.Xoffset = types.BADWIDTH
n.Orig = n
n.SetOp(op)
n.SetLeft(nleft)
n.SetRight(nright)
n.SetPos(pos)
n.SetOffset(types.BADWIDTH)
n.SetOrig(n)
return n
}
@ -1278,14 +1278,14 @@ func NewNameAt(pos src.XPos, s *types.Sym) *Node {
p Param
}
n := &x.n
n.Name = &x.m
n.Name.Param = &x.p
n.SetName(&x.m)
n.Name().Param = &x.p
n.Op = ONAME
n.Pos = pos
n.Orig = n
n.SetOp(ONAME)
n.SetPos(pos)
n.SetOrig(n)
n.Sym = s
n.SetSym(s)
return n
}
@ -1358,7 +1358,7 @@ func OrigSym(s *types.Sym) *types.Sym {
return nil
case 'b': // originally the blank identifier _
// TODO(mdempsky): Does s.Pkg matter here?
return BlankNode.Sym
return BlankNode.Sym()
}
return s
}
@ -1374,48 +1374,48 @@ func OrigSym(s *types.Sym) *types.Sym {
// SliceBounds returns n's slice bounds: low, high, and max in expr[low:high:max].
// n must be a slice expression. max is nil if n is a simple slice expression.
func (n *Node) SliceBounds() (low, high, max *Node) {
if n.List.Len() == 0 {
if n.List().Len() == 0 {
return nil, nil, nil
}
switch n.Op {
switch n.Op() {
case OSLICE, OSLICEARR, OSLICESTR:
s := n.List.Slice()
s := n.List().Slice()
return s[0], s[1], nil
case OSLICE3, OSLICE3ARR:
s := n.List.Slice()
s := n.List().Slice()
return s[0], s[1], s[2]
}
base.Fatalf("SliceBounds op %v: %v", n.Op, n)
base.Fatalf("SliceBounds op %v: %v", n.Op(), n)
return nil, nil, nil
}
// SetSliceBounds sets n's slice bounds, where n is a slice expression.
// n must be a slice expression. If max is non-nil, n must be a full slice expression.
func (n *Node) SetSliceBounds(low, high, max *Node) {
switch n.Op {
switch n.Op() {
case OSLICE, OSLICEARR, OSLICESTR:
if max != nil {
base.Fatalf("SetSliceBounds %v given three bounds", n.Op)
base.Fatalf("SetSliceBounds %v given three bounds", n.Op())
}
s := n.List.Slice()
s := n.List().Slice()
if s == nil {
if low == nil && high == nil {
return
}
n.List.Set2(low, high)
n.PtrList().Set2(low, high)
return
}
s[0] = low
s[1] = high
return
case OSLICE3, OSLICE3ARR:
s := n.List.Slice()
s := n.List().Slice()
if s == nil {
if low == nil && high == nil && max == nil {
return
}
n.List.Set3(low, high, max)
n.PtrList().Set3(low, high, max)
return
}
s[0] = low
@ -1423,7 +1423,7 @@ func (n *Node) SetSliceBounds(low, high, max *Node) {
s[2] = max
return
}
base.Fatalf("SetSliceBounds op %v: %v", n.Op, n)
base.Fatalf("SetSliceBounds op %v: %v", n.Op(), n)
}
// IsSlice3 reports whether o is a slice3 op (OSLICE3, OSLICE3ARR).
@ -1511,7 +1511,7 @@ func (n *Node) RawCopy() *Node {
// Orig pointing to itself.
func SepCopy(n *Node) *Node {
copy := *n
copy.Orig = &copy
copy.orig = &copy
return &copy
}
@ -1524,8 +1524,8 @@ func SepCopy(n *Node) *Node {
// messages; see issues #26855, #27765).
func Copy(n *Node) *Node {
copy := *n
if n.Orig == n {
copy.Orig = &copy
if n.Orig() == n {
copy.orig = &copy
}
return &copy
}
@ -1534,18 +1534,18 @@ func Copy(n *Node) *Node {
func IsNil(n *Node) bool {
// Check n.Orig because constant propagation may produce typed nil constants,
// which don't exist in the Go spec.
return n.Orig.Op == ONIL
return n.Orig().Op() == ONIL
}
func IsBlank(n *Node) bool {
if n == nil {
return false
}
return n.Sym.IsBlank()
return n.Sym().IsBlank()
}
// IsMethod reports whether n is a method.
// n must be a function or a method.
func IsMethod(n *Node) bool {
return n.Type.Recv() != nil
return n.Type().Recv() != nil
}

View file

@ -13,7 +13,7 @@ import (
)
func ConstType(n *Node) constant.Kind {
if n == nil || n.Op != OLITERAL {
if n == nil || n.Op() != OLITERAL {
return constant.Unknown
}
return n.Val().Kind()
@ -32,7 +32,7 @@ func ConstValue(n *Node) interface{} {
case constant.String:
return constant.StringVal(v)
case constant.Int:
return Int64Val(n.Type, v)
return Int64Val(n.Type(), v)
case constant.Float:
return Float64Val(v)
case constant.Complex:
@ -94,7 +94,7 @@ func ValidTypeForConst(t *types.Type, v constant.Value) bool {
func NewLiteral(v constant.Value) *Node {
n := Nod(OLITERAL, nil, nil)
if k := v.Kind(); k != constant.Unknown {
n.Type = idealType(k)
n.SetType(idealType(k))
n.SetVal(v)
}
return n

View file

@ -236,7 +236,7 @@ func nilcheckelim2(f *Func) {
continue
}
if v.Type.IsMemory() || v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
if v.Op == OpVarKill || v.Op == OpVarLive || (v.Op == OpVarDef && !v.Aux.(*ir.Node).Type.HasPointers()) {
if v.Op == OpVarKill || v.Op == OpVarLive || (v.Op == OpVarDef && !v.Aux.(*ir.Node).Type().HasPointers()) {
// These ops don't really change memory.
continue
// Note: OpVarDef requires that the defined variable not have pointers.