[dev.regabi] cmd/compile: remove Left, Right etc methods [generated]

Now that the generic graph structure methods - Left, Right, and so on -
have been removed from the Node interface, each implementation's uses
can be replaced with direct field access, using more specific names,
and the methods themselves can be deleted.

Passes buildall w/ toolstash -cmp.

[git-generate]

cd src/cmd/compile/internal/ir
rf '
	mv Func.iota Func.Iota_
	mv Name.fn Name.Func_
'

cd ../gc
rf '
ex . ../ir {
        import "cmd/compile/internal/ir"
        import "cmd/compile/internal/types"

        var ns ir.Nodes
        var b bool
        var i64 int64
        var n ir.Node
        var op ir.Op
        var sym *types.Sym
        var class ir.Class

        var decl *ir.Decl
        decl.Left()         -> decl.X
        decl.SetLeft(n)     -> decl.X = n

        var asl *ir.AssignListStmt
        asl.List()          -> asl.Lhs
        asl.PtrList()       -> &asl.Lhs
        asl.SetList(ns)     -> asl.Lhs = ns
        asl.Rlist()         -> asl.Rhs
        asl.PtrRlist()      -> &asl.Rhs
        asl.SetRlist(ns)    -> asl.Rhs = ns
        asl.Colas()         -> asl.Def
        asl.SetColas(b)     -> asl.Def = b

        var as *ir.AssignStmt
        as.Left()           -> as.X
        as.SetLeft(n)       -> as.X = n
        as.Right()          -> as.Y
        as.SetRight(n)      -> as.Y = n
        as.Colas()          -> as.Def
        as.SetColas(b)      -> as.Def = b

        var ao *ir.AssignOpStmt
        ao.Left()           -> ao.X
        ao.SetLeft(n)       -> ao.X = n
        ao.Right()          -> ao.Y
        ao.SetRight(n)      -> ao.Y = n
        ao.SubOp()          -> ao.AsOp
        ao.SetSubOp(op)     -> ao.AsOp = op
        ao.Implicit()       -> ao.IncDec
        ao.SetImplicit(b)   -> ao.IncDec = b

        var bl *ir.BlockStmt
        bl.List()           -> bl.List_
        bl.PtrList()        -> &bl.List_
        bl.SetList(ns)      -> bl.List_ = ns

        var br *ir.BranchStmt
        br.Sym()            -> br.Label
        br.SetSym(sym)      -> br.Label = sym

        var cas *ir.CaseStmt
        cas.List()          -> cas.List_
        cas.PtrList()       -> &cas.List_
        cas.SetList(ns)     -> cas.List_ = ns
        cas.Body()          -> cas.Body_
        cas.PtrBody()       -> &cas.Body_
        cas.SetBody(ns)     -> cas.Body_ = ns
        cas.Rlist()         -> cas.Vars
        cas.PtrRlist()      -> &cas.Vars
        cas.SetRlist(ns)    -> cas.Vars = ns
        cas.Left()          -> cas.Comm
        cas.SetLeft(n)      -> cas.Comm = n

        var fr *ir.ForStmt
        fr.Sym()            -> fr.Label
        fr.SetSym(sym)      -> fr.Label = sym
        fr.Left()           -> fr.Cond
        fr.SetLeft(n)       -> fr.Cond = n
        fr.Right()          -> fr.Post
        fr.SetRight(n)      -> fr.Post = n
        fr.Body()           -> fr.Body_
        fr.PtrBody()        -> &fr.Body_
        fr.SetBody(ns)      -> fr.Body_ = ns
        fr.List()           -> fr.Late
        fr.PtrList()        -> &fr.Late
        fr.SetList(ns)      -> fr.Late = ns
        fr.HasBreak()       -> fr.HasBreak_
        fr.SetHasBreak(b)   -> fr.HasBreak_ = b

        var gs *ir.GoDeferStmt
        gs.Left()           -> gs.Call
        gs.SetLeft(n)       -> gs.Call = n

        var ifs *ir.IfStmt
        ifs.Left()          -> ifs.Cond
        ifs.SetLeft(n)      -> ifs.Cond = n
        ifs.Body()          -> ifs.Body_
        ifs.PtrBody()       -> &ifs.Body_
        ifs.SetBody(ns)     -> ifs.Body_ = ns
        ifs.Rlist()         -> ifs.Else
        ifs.PtrRlist()      -> &ifs.Else
        ifs.SetRlist(ns)    -> ifs.Else = ns
        ifs.Likely()        -> ifs.Likely_
        ifs.SetLikely(b)    -> ifs.Likely_ = b

        var im *ir.InlineMarkStmt
        im.Offset()         -> im.Index
        im.SetOffset(i64)   -> im.Index = i64

        var lab *ir.LabelStmt
        lab.Sym()           -> lab.Label
        lab.SetSym(sym)     -> lab.Label = sym

        var rng *ir.RangeStmt
        rng.Sym()           -> rng.Label
        rng.SetSym(sym)     -> rng.Label = sym
        rng.Right()         -> rng.X
        rng.SetRight(n)     -> rng.X = n
        rng.Body()          -> rng.Body_
        rng.PtrBody()       -> &rng.Body_
        rng.SetBody(ns)     -> rng.Body_ = ns
        rng.List()          -> rng.Vars
        rng.PtrList()       -> &rng.Vars
        rng.SetList(ns)     -> rng.Vars = ns
        rng.HasBreak()      -> rng.HasBreak_
        rng.SetHasBreak(b)  -> rng.HasBreak_ = b
        rng.Colas()         -> rng.Def
        rng.SetColas(b)     -> rng.Def = b

        var ret *ir.ReturnStmt
        ret.List()          -> ret.Results
        ret.PtrList()       -> &ret.Results
        ret.SetList(ns)     -> ret.Results = ns

        var sel *ir.SelectStmt
        sel.List()          -> sel.Cases
        sel.PtrList()       -> &sel.Cases
        sel.SetList(ns)     -> sel.Cases = ns
        sel.Sym()           -> sel.Label
        sel.SetSym(sym)     -> sel.Label = sym
        sel.HasBreak()      -> sel.HasBreak_
        sel.SetHasBreak(b)  -> sel.HasBreak_ = b
        sel.Body()          -> sel.Compiled
        sel.PtrBody()       -> &sel.Compiled
        sel.SetBody(ns)     -> sel.Compiled = ns

        var send *ir.SendStmt
        send.Left()         -> send.Chan
        send.SetLeft(n)     -> send.Chan = n
        send.Right()        -> send.Value
        send.SetRight(n)    -> send.Value = n

        var sw *ir.SwitchStmt
        sw.Left()           -> sw.Tag
        sw.SetLeft(n)       -> sw.Tag = n
        sw.List()           -> sw.Cases
        sw.PtrList()        -> &sw.Cases
        sw.SetList(ns)      -> sw.Cases = ns
        sw.Body()           -> sw.Compiled
        sw.PtrBody()        -> &sw.Compiled
        sw.SetBody(ns)      -> sw.Compiled = ns
        sw.Sym()            -> sw.Label
        sw.SetSym(sym)      -> sw.Label = sym
        sw.HasBreak()       -> sw.HasBreak_
        sw.SetHasBreak(b)   -> sw.HasBreak_ = b

        var tg *ir.TypeSwitchGuard
        tg.Left()           -> tg.Tag
        tg.SetLeft(nil)     -> tg.Tag = nil
        tg.SetLeft(n)       -> tg.Tag = n.(*ir.Ident)
        tg.Right()          -> tg.X
        tg.SetRight(n)      -> tg.X = n

        var adds *ir.AddStringExpr
        adds.List()         -> adds.List_
        adds.PtrList()      -> &adds.List_
        adds.SetList(ns)    -> adds.List_ = ns

        var addr *ir.AddrExpr
        addr.Left()         -> addr.X
        addr.SetLeft(n)     -> addr.X = n
        addr.Right()        -> addr.Alloc
        addr.SetRight(n)    -> addr.Alloc = n

        var bin *ir.BinaryExpr
        bin.Left()          -> bin.X
        bin.SetLeft(n)      -> bin.X = n
        bin.Right()         -> bin.Y
        bin.SetRight(n)     -> bin.Y = n

        var log *ir.LogicalExpr
        log.Left()          -> log.X
        log.SetLeft(n)      -> log.X = n
        log.Right()         -> log.Y
        log.SetRight(n)     -> log.Y = n

        var call *ir.CallExpr
        call.Left()         -> call.X
        call.SetLeft(n)     -> call.X = n
        call.List()         -> call.Args
        call.PtrList()      -> &call.Args
        call.SetList(ns)    -> call.Args = ns
        call.Rlist()        -> call.Rargs
        call.PtrRlist()     -> &call.Rargs
        call.SetRlist(ns)   -> call.Rargs = ns
        call.IsDDD()        -> call.DDD
        call.SetIsDDD(b)    -> call.DDD = b
        call.NoInline()     -> call.NoInline_
        call.SetNoInline(b) -> call.NoInline_ = b
        call.Body()         -> call.Body_
        call.PtrBody()      -> &call.Body_
        call.SetBody(ns)    -> call.Body_ = ns

        var cp *ir.CallPartExpr
        cp.Func()           -> cp.Func_
        cp.Left()           -> cp.X
        cp.SetLeft(n)       -> cp.X = n
        cp.Sym()            -> cp.Method.Sym

        var clo *ir.ClosureExpr
        clo.Func()          -> clo.Func_

        var cr *ir.ClosureReadExpr
        cr.Offset()         -> cr.Offset_

        var cl *ir.CompLitExpr
        cl.Right()          -> cl.Ntype
        cl.SetRight(nil)    -> cl.Ntype = nil
        cl.SetRight(n)      -> cl.Ntype = ir.Node(n).(ir.Ntype)
        cl.List()           -> cl.List_
        cl.PtrList()        -> &cl.List_
        cl.SetList(ns)      -> cl.List_ = ns

        var conv *ir.ConvExpr
        conv.Left()         -> conv.X
        conv.SetLeft(n)     -> conv.X = n

        var ix *ir.IndexExpr
        ix.Left()           -> ix.X
        ix.SetLeft(n)       -> ix.X = n
        ix.Right()          -> ix.Index
        ix.SetRight(n)      -> ix.Index = n
        ix.IndexMapLValue() -> ix.Assigned
        ix.SetIndexMapLValue(b) -> ix.Assigned = b

        var kv *ir.KeyExpr
        kv.Left()           -> kv.Key
        kv.SetLeft(n)       -> kv.Key = n
        kv.Right()          -> kv.Value
        kv.SetRight(n)      -> kv.Value = n

        var sk *ir.StructKeyExpr
        sk.Sym()            -> sk.Field
        sk.SetSym(sym)      -> sk.Field = sym
        sk.Left()           -> sk.Value
        sk.SetLeft(n)       -> sk.Value = n
        sk.Offset()         -> sk.Offset_
        sk.SetOffset(i64)   -> sk.Offset_ = i64

        var ic *ir.InlinedCallExpr
        ic.Body()           -> ic.Body_
        ic.PtrBody()        -> &ic.Body_
        ic.SetBody(ns)      -> ic.Body_ = ns
        ic.Rlist()          -> ic.ReturnVars
        ic.PtrRlist()       -> &ic.ReturnVars
        ic.SetRlist(ns)     -> ic.ReturnVars = ns

        var mak *ir.MakeExpr
        mak.Left()          -> mak.Len
        mak.SetLeft(n)      -> mak.Len = n
        mak.Right()         -> mak.Cap
        mak.SetRight(n)     -> mak.Cap = n

        var par *ir.ParenExpr
        par.Left()          -> par.X
        par.SetLeft(n)      -> par.X = n

        var res *ir.ResultExpr
        res.Offset()        -> res.Offset_
        res.SetOffset(i64)  -> res.Offset_ = i64

        var dot *ir.SelectorExpr
        dot.Left()          -> dot.X
        dot.SetLeft(n)      -> dot.X = n
        dot.Sym()           -> dot.Sel
        dot.SetSym(sym)     -> dot.Sel = sym
        dot.Offset()        -> dot.Offset_
        dot.SetOffset(i64)  -> dot.Offset_ = i64

        var sl *ir.SliceExpr
        sl.Left()           -> sl.X
        sl.SetLeft(n)       -> sl.X = n
        sl.List()           -> sl.List_
        sl.PtrList()        -> &sl.List_
        sl.SetList(ns)      -> sl.List_ = ns

        var sh *ir.SliceHeaderExpr
        sh.Left()           -> sh.Ptr
        sh.SetLeft(n)       -> sh.Ptr = n
        sh.List()           -> sh.LenCap_
        sh.PtrList()        -> &sh.LenCap_
        sh.SetList(ns)      -> sh.LenCap_ = ns

        var st *ir.StarExpr
        st.Left()           -> st.X
        st.SetLeft(n)       -> st.X = n

        var ta *ir.TypeAssertExpr
        ta.Left()           -> ta.X
        ta.SetLeft(n)       -> ta.X = n
        ta.Right()          -> ta.Ntype
        ta.SetRight(n)    -> ta.Ntype = n
        ta.List()           -> ta.Itab
        ta.PtrList()        -> &ta.Itab
        ta.SetList(ns)      -> ta.Itab = ns

        var u *ir.UnaryExpr
        u.Left()            -> u.X
        u.SetLeft(n)        -> u.X = n

        var fn *ir.Func
        fn.Body()           -> fn.Body_
        fn.PtrBody()        -> &fn.Body_
        fn.SetBody(ns)      -> fn.Body_ = ns
        fn.Iota()           -> fn.Iota_
        fn.SetIota(i64)     -> fn.Iota_ = i64
        fn.Func()           -> fn

        var nam *ir.Name
        nam.SubOp()         -> nam.BuiltinOp
        nam.SetSubOp(op)    -> nam.BuiltinOp = op
        nam.Class()         -> nam.Class_
        nam.SetClass(class) -> nam.Class_ = class
        nam.Func()          -> nam.Func_
        nam.Offset()        -> nam.Offset_
        nam.SetOffset(i64)  -> nam.Offset_ = i64
}

ex . ../ir {
        import "cmd/compile/internal/ir"

        var n ir.Nodes

        (&n).Append         -> n.Append
        (&n).AppendNodes    -> n.AppendNodes
        (&n).MoveNodes      -> n.MoveNodes
        (&n).Prepend        -> n.Prepend
        (&n).Set            -> n.Set
        (&n).Set1           -> n.Set1
        (&n).Set2           -> n.Set2
        (&n).Set3           -> n.Set3

        var ntype ir.Ntype
        ir.Node(ntype).(ir.Ntype) -> ntype
}
'

cd ../ir
rf '
rm \
        Decl.Left Decl.SetLeft \
        AssignListStmt.List AssignListStmt.PtrList AssignListStmt.SetList \
        AssignListStmt.Rlist AssignListStmt.PtrRlist AssignListStmt.SetRlist \
        AssignListStmt.Colas AssignListStmt.SetColas \
        AssignStmt.Left AssignStmt.SetLeft \
        AssignStmt.Right AssignStmt.SetRight \
        AssignStmt.Colas AssignStmt.SetColas \
        AssignOpStmt.Left AssignOpStmt.SetLeft \
        AssignOpStmt.Right AssignOpStmt.SetRight \
        AssignOpStmt.SubOp AssignOpStmt.SetSubOp \
        AssignOpStmt.Implicit AssignOpStmt.SetImplicit \
        BlockStmt.List BlockStmt.PtrList BlockStmt.SetList \
        BranchStmt.SetSym \
        CaseStmt.List CaseStmt.PtrList CaseStmt.SetList \
        CaseStmt.Body CaseStmt.PtrBody CaseStmt.SetBody \
        CaseStmt.Rlist CaseStmt.PtrRlist CaseStmt.SetRlist \
        CaseStmt.Left CaseStmt.SetLeft \
        ForStmt.Left ForStmt.SetLeft \
        ForStmt.Right ForStmt.SetRight \
        ForStmt.Body ForStmt.PtrBody ForStmt.SetBody \
        ForStmt.List ForStmt.PtrList ForStmt.SetList \
        ForStmt.HasBreak ForStmt.SetHasBreak \
        ForStmt.Sym ForStmt.SetSym \
        GoDeferStmt.Left GoDeferStmt.SetLeft \
        IfStmt.Left IfStmt.SetLeft \
        IfStmt.Body IfStmt.PtrBody IfStmt.SetBody \
        IfStmt.Rlist IfStmt.PtrRlist IfStmt.SetRlist \
        IfStmt.Likely IfStmt.SetLikely \
        LabelStmt.SetSym \
        RangeStmt.Right RangeStmt.SetRight \
        RangeStmt.Body RangeStmt.PtrBody RangeStmt.SetBody \
        RangeStmt.List RangeStmt.PtrList RangeStmt.SetList \
        RangeStmt.HasBreak RangeStmt.SetHasBreak \
        RangeStmt.Colas RangeStmt.SetColas \
        RangeStmt.Sym RangeStmt.SetSym \
        ReturnStmt.List ReturnStmt.PtrList ReturnStmt.SetList \
        SelectStmt.List SelectStmt.PtrList SelectStmt.SetList \
        SelectStmt.HasBreak SelectStmt.SetHasBreak \
        SelectStmt.Body SelectStmt.PtrBody SelectStmt.SetBody \
        SelectStmt.Sym SelectStmt.SetSym \
        SendStmt.Left SendStmt.SetLeft \
        SendStmt.Right SendStmt.SetRight \
        SwitchStmt.Left SwitchStmt.SetLeft \
        SwitchStmt.List SwitchStmt.PtrList SwitchStmt.SetList \
        SwitchStmt.Body SwitchStmt.PtrBody SwitchStmt.SetBody \
        SwitchStmt.HasBreak SwitchStmt.SetHasBreak \
        SwitchStmt.Sym SwitchStmt.SetSym \
        TypeSwitchGuard.Left TypeSwitchGuard.SetLeft \
        TypeSwitchGuard.Right TypeSwitchGuard.SetRight \
        AddStringExpr.List AddStringExpr.PtrList AddStringExpr.SetList \
        AddrExpr.Left AddrExpr.SetLeft \
        AddrExpr.Right AddrExpr.SetRight \
        BinaryExpr.Left BinaryExpr.SetLeft \
        BinaryExpr.Right BinaryExpr.SetRight \
        LogicalExpr.Left LogicalExpr.SetLeft \
        LogicalExpr.Right LogicalExpr.SetRight \
        CallExpr.Left CallExpr.SetLeft \
        CallExpr.List CallExpr.PtrList CallExpr.SetList \
        CallExpr.Rlist CallExpr.PtrRlist CallExpr.SetRlist \
        CallExpr.NoInline CallExpr.SetNoInline \
        CallExpr.Body CallExpr.PtrBody CallExpr.SetBody \
        CallExpr.IsDDD CallExpr.SetIsDDD \
        CallPartExpr.Left CallPartExpr.SetLeft \
        ClosureReadExpr.Offset \
        ClosureReadExpr.Type \ # provided by miniExpr already
        CompLitExpr.Right CompLitExpr.SetRight \
        CompLitExpr.List CompLitExpr.PtrList CompLitExpr.SetList \
        ConvExpr.Left ConvExpr.SetLeft \
        IndexExpr.Left IndexExpr.SetLeft \
        IndexExpr.Right IndexExpr.SetRight \
        IndexExpr.IndexMapLValue IndexExpr.SetIndexMapLValue \
        KeyExpr.Left KeyExpr.SetLeft \
        KeyExpr.Right KeyExpr.SetRight \
        StructKeyExpr.Left StructKeyExpr.SetLeft \
        StructKeyExpr.Offset StructKeyExpr.SetOffset \
        StructKeyExpr.SetSym \
        InlinedCallExpr.Body InlinedCallExpr.PtrBody InlinedCallExpr.SetBody \
        InlinedCallExpr.Rlist InlinedCallExpr.PtrRlist InlinedCallExpr.SetRlist \
        MakeExpr.Left MakeExpr.SetLeft \
        MakeExpr.Right MakeExpr.SetRight \
        MethodExpr.Left MethodExpr.SetLeft \
        MethodExpr.Right MethodExpr.SetRight \
        MethodExpr.Offset MethodExpr.SetOffset \
        MethodExpr.Class MethodExpr.SetClass \
        ParenExpr.Left ParenExpr.SetLeft \
        ResultExpr.Offset ResultExpr.SetOffset \
        ReturnStmt.IsDDD \
        SelectorExpr.Left SelectorExpr.SetLeft \
        SelectorExpr.Offset SelectorExpr.SetOffset \
        SelectorExpr.SetSym \
        SliceExpr.Left SliceExpr.SetLeft \
        SliceExpr.List SliceExpr.PtrList SliceExpr.SetList \
        SliceHeaderExpr.Left SliceHeaderExpr.SetLeft \
        SliceHeaderExpr.List SliceHeaderExpr.PtrList SliceHeaderExpr.SetList \
        StarExpr.Left StarExpr.SetLeft \
        TypeAssertExpr.Left TypeAssertExpr.SetLeft \
        TypeAssertExpr.Right TypeAssertExpr.SetRight \
        TypeAssertExpr.List TypeAssertExpr.PtrList TypeAssertExpr.SetList \
        UnaryExpr.Left UnaryExpr.SetLeft \
        Func.Body Func.PtrBody Func.SetBody \
        Func.Iota Func.SetIota \
        CallPartExpr.Func ClosureExpr.Func Func.Func Name.Func \

mv BlockStmt.List_ BlockStmt.List
mv CaseStmt.List_ CaseStmt.List
mv CaseStmt.Body_ CaseStmt.Body
mv ForStmt.Body_ ForStmt.Body
mv ForStmt.HasBreak_ ForStmt.HasBreak
mv Func.Iota_ Func.Iota
mv IfStmt.Body_ IfStmt.Body
mv IfStmt.Likely_ IfStmt.Likely
mv RangeStmt.Body_ RangeStmt.Body
mv RangeStmt.HasBreak_ RangeStmt.HasBreak
mv SelectStmt.HasBreak_ SelectStmt.HasBreak
mv SwitchStmt.HasBreak_ SwitchStmt.HasBreak
mv AddStringExpr.List_ AddStringExpr.List
mv CallExpr.NoInline_ CallExpr.NoInline
mv CallExpr.Body_ CallExpr.Body # TODO what is this?
mv CallExpr.DDD CallExpr.IsDDD
mv ClosureReadExpr.Offset_ ClosureReadExpr.Offset
mv CompLitExpr.List_ CompLitExpr.List
mv StructKeyExpr.Offset_ StructKeyExpr.Offset
mv InlinedCallExpr.Body_ InlinedCallExpr.Body
mv ResultExpr.Offset_ ResultExpr.Offset
mv SelectorExpr.Offset_ SelectorExpr.Offset
mv SliceExpr.List_ SliceExpr.List
mv SliceHeaderExpr.LenCap_ SliceHeaderExpr.LenCap
mv Func.Body_ Func.Body
mv CallPartExpr.Func_ CallPartExpr.Func
mv ClosureExpr.Func_ ClosureExpr.Func
mv Name.Func_ Name.Func
'

Change-Id: Ia2ee59649674f83eb123e63fda7a7781cf91cc56
Reviewed-on: https://go-review.googlesource.com/c/go/+/277935
Trust: Russ Cox <rsc@golang.org>
Run-TryBot: Russ Cox <rsc@golang.org>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Russ Cox 2020-12-23 00:02:08 -05:00
parent 14d667341f
commit f9d373720e
41 changed files with 2539 additions and 2803 deletions

View file

@ -20,7 +20,7 @@ import (
func mkParamResultField(t *types.Type, s *types.Sym, which ir.Class) *types.Field {
field := types.NewField(src.NoXPos, s, t)
n := NewName(s)
n.SetClass(which)
n.Class_ = which
field.Nname = n
n.SetType(t)
return field
@ -78,7 +78,7 @@ func verifyParamResultOffset(t *testing.T, f *types.Field, r ABIParamAssignment,
n := ir.AsNode(f.Nname).(*ir.Name)
if n.FrameOffset() != int64(r.Offset) {
t.Errorf("%s %d: got offset %d wanted %d t=%v",
which, idx, r.Offset, n.Offset(), f.Type)
which, idx, r.Offset, n.Offset_, f.Type)
return 1
}
return 0

View file

@ -324,11 +324,11 @@ func genhash(t *types.Type) *obj.LSym {
nx := ir.NewIndexExpr(base.Pos, np, ni)
nx.SetBounded(true)
na := nodAddr(nx)
call.PtrList().Append(na)
call.PtrList().Append(nh)
loop.PtrBody().Append(ir.NewAssignStmt(base.Pos, nh, call))
call.Args.Append(na)
call.Args.Append(nh)
loop.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
fn.PtrBody().Append(loop)
fn.Body.Append(loop)
case types.TSTRUCT:
// Walk the struct using memhash for runs of AMEM
@ -348,9 +348,9 @@ func genhash(t *types.Type) *obj.LSym {
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := nodAddr(nx)
call.PtrList().Append(na)
call.PtrList().Append(nh)
fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nh, call))
call.Args.Append(na)
call.Args.Append(nh)
fn.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
i++
continue
}
@ -363,21 +363,21 @@ func genhash(t *types.Type) *obj.LSym {
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := nodAddr(nx)
call.PtrList().Append(na)
call.PtrList().Append(nh)
call.PtrList().Append(nodintconst(size))
fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nh, call))
call.Args.Append(na)
call.Args.Append(nh)
call.Args.Append(nodintconst(size))
fn.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
i = next
}
}
r := ir.NewReturnStmt(base.Pos, nil)
r.PtrList().Append(nh)
fn.PtrBody().Append(r)
r.Results.Append(nh)
fn.Body.Append(r)
if base.Flag.LowerR != 0 {
ir.DumpList("genhash body", fn.Body())
ir.DumpList("genhash body", fn.Body)
}
funcbody()
@ -386,7 +386,7 @@ func genhash(t *types.Type) *obj.LSym {
typecheckFunc(fn)
Curfn = fn
typecheckslice(fn.Body().Slice(), ctxStmt)
typecheckslice(fn.Body.Slice(), ctxStmt)
Curfn = nil
if base.Debug.DclStack != 0 {
@ -587,11 +587,11 @@ func geneq(t *types.Type) *obj.LSym {
for i := int64(0); i < nelem; i++ {
// if check {} else { goto neq }
nif := ir.NewIfStmt(base.Pos, checkIdx(nodintconst(i)), nil, nil)
nif.PtrRlist().Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
fn.PtrBody().Append(nif)
nif.Else.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
fn.Body.Append(nif)
}
if last {
fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, checkIdx(nodintconst(nelem))))
fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, checkIdx(nodintconst(nelem))))
}
} else {
// Generate a for loop.
@ -604,11 +604,11 @@ func geneq(t *types.Type) *obj.LSym {
loop.PtrInit().Append(init)
// if eq(pi, qi) {} else { goto neq }
nif := ir.NewIfStmt(base.Pos, checkIdx(i), nil, nil)
nif.PtrRlist().Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
loop.PtrBody().Append(nif)
fn.PtrBody().Append(loop)
nif.Else.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
loop.Body.Append(nif)
fn.Body.Append(loop)
if last {
fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, nodbool(true)))
fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, nodbool(true)))
}
}
}
@ -718,42 +718,42 @@ func geneq(t *types.Type) *obj.LSym {
}
if len(flatConds) == 0 {
fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, nodbool(true)))
fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, nodbool(true)))
} else {
for _, c := range flatConds[:len(flatConds)-1] {
// if cond {} else { goto neq }
n := ir.NewIfStmt(base.Pos, c, nil, nil)
n.PtrRlist().Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
fn.PtrBody().Append(n)
n.Else.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, neq))
fn.Body.Append(n)
}
fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, flatConds[len(flatConds)-1]))
fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, flatConds[len(flatConds)-1]))
}
}
// ret:
// return
ret := autolabel(".ret")
fn.PtrBody().Append(ir.NewLabelStmt(base.Pos, ret))
fn.PtrBody().Append(ir.NewReturnStmt(base.Pos, nil))
fn.Body.Append(ir.NewLabelStmt(base.Pos, ret))
fn.Body.Append(ir.NewReturnStmt(base.Pos, nil))
// neq:
// r = false
// return (or goto ret)
fn.PtrBody().Append(ir.NewLabelStmt(base.Pos, neq))
fn.PtrBody().Append(ir.NewAssignStmt(base.Pos, nr, nodbool(false)))
fn.Body.Append(ir.NewLabelStmt(base.Pos, neq))
fn.Body.Append(ir.NewAssignStmt(base.Pos, nr, nodbool(false)))
if EqCanPanic(t) || anyCall(fn) {
// Epilogue is large, so share it with the equal case.
fn.PtrBody().Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, ret))
fn.Body.Append(ir.NewBranchStmt(base.Pos, ir.OGOTO, ret))
} else {
// Epilogue is small, so don't bother sharing.
fn.PtrBody().Append(ir.NewReturnStmt(base.Pos, nil))
fn.Body.Append(ir.NewReturnStmt(base.Pos, nil))
}
// TODO(khr): the epilogue size detection condition above isn't perfect.
// We should really do a generic CL that shares epilogues across
// the board. See #24936.
if base.Flag.LowerR != 0 {
ir.DumpList("geneq body", fn.Body())
ir.DumpList("geneq body", fn.Body)
}
funcbody()
@ -762,7 +762,7 @@ func geneq(t *types.Type) *obj.LSym {
typecheckFunc(fn)
Curfn = fn
typecheckslice(fn.Body().Slice(), ctxStmt)
typecheckslice(fn.Body.Slice(), ctxStmt)
Curfn = nil
if base.Debug.DclStack != 0 {
@ -869,10 +869,10 @@ func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
fn, needsize := eqmemfunc(size, nx.Type().Elem())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
call.PtrList().Append(nx)
call.PtrList().Append(ny)
call.Args.Append(nx)
call.Args.Append(ny)
if needsize {
call.PtrList().Append(nodintconst(size))
call.Args.Append(nodintconst(size))
}
return call

View file

@ -17,7 +17,7 @@ type exporter struct {
func (p *exporter) markObject(n ir.Node) {
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
if n.Class() == ir.PFUNC {
if n.Class_ == ir.PFUNC {
inlFlood(n, exportsym)
}
}

View file

@ -77,11 +77,11 @@ func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
// TODO: This creation of the named function should probably really be done in a
// separate pass from type-checking.
func typecheckclosure(clo *ir.ClosureExpr, top int) {
fn := clo.Func()
fn := clo.Func
// Set current associated iota value, so iota can be used inside
// function in ConstSpec, see issue #22344
if x := getIotaValue(); x >= 0 {
fn.SetIota(x)
fn.Iota = x
}
fn.ClosureType = typecheck(fn.ClosureType, ctxType)
@ -124,7 +124,7 @@ func typecheckclosure(clo *ir.ClosureExpr, top int) {
Curfn = fn
olddd := decldepth
decldepth = 1
typecheckslice(fn.Body().Slice(), ctxStmt)
typecheckslice(fn.Body.Slice(), ctxStmt)
decldepth = olddd
Curfn = oldfn
}
@ -195,7 +195,7 @@ func capturevars(fn *ir.Func) {
outermost := v.Defn.(*ir.Name)
// out parameters will be assigned to implicitly upon return.
if outermost.Class() != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
if outermost.Class_ != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
v.SetByval(true)
} else {
outermost.Name().SetAddrtaken(true)
@ -262,7 +262,7 @@ func transformclosure(fn *ir.Func) {
v = addr
}
v.SetClass(ir.PPARAM)
v.Class_ = ir.PPARAM
decls = append(decls, v)
fld := types.NewField(src.NoXPos, v.Sym(), v.Type())
@ -294,7 +294,7 @@ func transformclosure(fn *ir.Func) {
if v.Byval() && v.Type().Width <= int64(2*Widthptr) {
// If it is a small variable captured by value, downgrade it to PAUTO.
v.SetClass(ir.PAUTO)
v.Class_ = ir.PAUTO
fn.Dcl = append(fn.Dcl, v)
body = append(body, ir.NewAssignStmt(base.Pos, v, cr))
} else {
@ -302,7 +302,7 @@ func transformclosure(fn *ir.Func) {
// and initialize in entry prologue.
addr := NewName(lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
addr.SetClass(ir.PAUTO)
addr.Class_ = ir.PAUTO
addr.SetUsed(true)
addr.Curfn = fn
fn.Dcl = append(fn.Dcl, addr)
@ -328,7 +328,7 @@ func transformclosure(fn *ir.Func) {
// hasemptycvars reports whether closure clo has an
// empty list of captured vars.
func hasemptycvars(clo *ir.ClosureExpr) bool {
return len(clo.Func().ClosureVars) == 0
return len(clo.Func.ClosureVars) == 0
}
// closuredebugruntimecheck applies boilerplate checks for debug flags
@ -336,9 +336,9 @@ func hasemptycvars(clo *ir.ClosureExpr) bool {
func closuredebugruntimecheck(clo *ir.ClosureExpr) {
if base.Debug.Closure > 0 {
if clo.Esc() == EscHeap {
base.WarnfAt(clo.Pos(), "heap closure, captured vars = %v", clo.Func().ClosureVars)
base.WarnfAt(clo.Pos(), "heap closure, captured vars = %v", clo.Func.ClosureVars)
} else {
base.WarnfAt(clo.Pos(), "stack closure, captured vars = %v", clo.Func().ClosureVars)
base.WarnfAt(clo.Pos(), "stack closure, captured vars = %v", clo.Func.ClosureVars)
}
}
if base.Flag.CompilingRuntime && clo.Esc() == EscHeap {
@ -366,7 +366,7 @@ func closureType(clo *ir.ClosureExpr) *types.Type {
fields := []*ir.Field{
namedfield(".F", types.Types[types.TUINTPTR]),
}
for _, v := range clo.Func().ClosureVars {
for _, v := range clo.Func.ClosureVars {
typ := v.Type()
if !v.Byval() {
typ = types.NewPtr(typ)
@ -379,7 +379,7 @@ func closureType(clo *ir.ClosureExpr) *types.Type {
}
func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
fn := clo.Func()
fn := clo.Func
// If no closure vars, don't bother wrapping.
if hasemptycvars(clo) {
@ -394,7 +394,7 @@ func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos.SetEsc(clo.Esc())
clos.PtrList().Set(append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, fn.ClosureEnter.Slice()...))
clos.List.Set(append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, fn.ClosureEnter.Slice()...))
addr := nodAddr(clos)
addr.SetEsc(clo.Esc())
@ -407,7 +407,7 @@ func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
if !types.Identical(typ, x.Type()) {
panic("closure type does not match order's assigned type")
}
addr.SetRight(x)
addr.Alloc = x
clo.Prealloc = nil
}
@ -428,13 +428,13 @@ func typecheckpartialcall(n ir.Node, sym *types.Sym) *ir.CallPartExpr {
fn := makepartialcall(dot, dot.Type(), sym)
fn.SetWrapper(true)
return ir.NewCallPartExpr(dot.Pos(), dot.Left(), dot.Selection, fn)
return ir.NewCallPartExpr(dot.Pos(), dot.X, dot.Selection, fn)
}
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls.
func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.Func {
rcvrtype := dot.Left().Type()
rcvrtype := dot.X.Type()
sym := methodSymSuffix(rcvrtype, meth, "-fm")
if sym.Uniq() {
@ -480,24 +480,24 @@ func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.
}
call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
call.PtrList().Set(paramNnames(tfn.Type()))
call.SetIsDDD(tfn.Type().IsVariadic())
call.Args.Set(paramNnames(tfn.Type()))
call.IsDDD = tfn.Type().IsVariadic()
if t0.NumResults() != 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
ret.PtrList().Set1(call)
ret.Results.Set1(call)
body = append(body, ret)
} else {
body = append(body, call)
}
fn.PtrBody().Set(body)
fn.Body.Set(body)
funcbody()
typecheckFunc(fn)
// Need to typecheck the body of the just-generated wrapper.
// typecheckslice() requires that Curfn is set when processing an ORETURN.
Curfn = fn
typecheckslice(fn.Body().Slice(), ctxStmt)
typecheckslice(fn.Body.Slice(), ctxStmt)
sym.Def = fn
Target.Decls = append(Target.Decls, fn)
Curfn = savecurfn
@ -512,7 +512,7 @@ func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.
func partialCallType(n *ir.CallPartExpr) *types.Type {
t := tostruct([]*ir.Field{
namedfield("F", types.Types[types.TUINTPTR]),
namedfield("R", n.Left().Type()),
namedfield("R", n.X.Type()),
})
t.SetNoalg(true)
return t
@ -526,13 +526,13 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
//
// Like walkclosure above.
if n.Left().Type().IsInterface() {
if n.X.Type().IsInterface() {
// Trigger panic for method on nil interface now.
// Otherwise it happens in the wrapper and is confusing.
n.SetLeft(cheapexpr(n.Left(), init))
n.SetLeft(walkexpr(n.Left(), nil))
n.X = cheapexpr(n.X, init)
n.X = walkexpr(n.X, nil)
tab := typecheck(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.Left()), ctxExpr)
tab := typecheck(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X), ctxExpr)
c := ir.NewUnaryExpr(base.Pos, ir.OCHECKNIL, tab)
c.SetTypecheck(1)
@ -543,7 +543,7 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos.SetEsc(n.Esc())
clos.PtrList().Set2(ir.NewUnaryExpr(base.Pos, ir.OCFUNC, n.Func().Nname), n.Left())
clos.List.Set2(ir.NewUnaryExpr(base.Pos, ir.OCFUNC, n.Func.Nname), n.X)
addr := nodAddr(clos)
addr.SetEsc(n.Esc())
@ -556,7 +556,7 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
if !types.Identical(typ, x.Type()) {
panic("partial call type does not match order's assigned type")
}
addr.SetRight(x)
addr.Alloc = x
n.Prealloc = nil
}

View file

@ -163,8 +163,8 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
}
n := n.(*ir.UnaryExpr)
n.SetLeft(convlit(n.Left(), ot))
if n.Left().Type() == nil {
n.X = convlit(n.X, ot)
if n.X.Type() == nil {
n.SetType(nil)
return n
}
@ -181,13 +181,13 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
var l, r ir.Node
switch n := n.(type) {
case *ir.BinaryExpr:
n.SetLeft(convlit(n.Left(), ot))
n.SetRight(convlit(n.Right(), ot))
l, r = n.Left(), n.Right()
n.X = convlit(n.X, ot)
n.Y = convlit(n.Y, ot)
l, r = n.X, n.Y
case *ir.LogicalExpr:
n.SetLeft(convlit(n.Left(), ot))
n.SetRight(convlit(n.Right(), ot))
l, r = n.Left(), n.Right()
n.X = convlit(n.X, ot)
n.Y = convlit(n.Y, ot)
l, r = n.X, n.Y
}
if l.Type() == nil || r.Type() == nil {
@ -213,8 +213,8 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
case ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
n.SetLeft(convlit1(n.Left(), t, explicit, nil))
n.SetType(n.Left().Type())
n.X = convlit1(n.X, t, explicit, nil)
n.SetType(n.X.Type())
if n.Type() != nil && !n.Type().IsInteger() {
base.Errorf("invalid operation: %v (shift of type %v)", n, n.Type())
n.SetType(nil)
@ -452,7 +452,7 @@ func evalConst(n ir.Node) ir.Node {
switch n.Op() {
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
n := n.(*ir.UnaryExpr)
nl := n.Left()
nl := n.X
if nl.Op() == ir.OLITERAL {
var prec uint
if n.Type().IsUnsigned() {
@ -463,7 +463,7 @@ func evalConst(n ir.Node) ir.Node {
case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT:
n := n.(*ir.BinaryExpr)
nl, nr := n.Left(), n.Right()
nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
rval := nr.Val()
@ -488,21 +488,21 @@ func evalConst(n ir.Node) ir.Node {
case ir.OOROR, ir.OANDAND:
n := n.(*ir.LogicalExpr)
nl, nr := n.Left(), n.Right()
nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origConst(n, constant.BinaryOp(nl.Val(), tokenForOp[n.Op()], nr.Val()))
}
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
n := n.(*ir.BinaryExpr)
nl, nr := n.Left(), n.Right()
nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origBoolConst(n, constant.Compare(nl.Val(), tokenForOp[n.Op()], nr.Val()))
}
case ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
nl, nr := n.Left(), n.Right()
nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
// shiftBound from go/types; "so we can express smallestFloat64"
const shiftBound = 1023 - 1 + 52
@ -517,14 +517,14 @@ func evalConst(n ir.Node) ir.Node {
case ir.OCONV, ir.ORUNESTR:
n := n.(*ir.ConvExpr)
nl := n.Left()
nl := n.X
if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
return origConst(n, convertVal(nl.Val(), n.Type(), true))
}
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
nl := n.Left()
nl := n.X
if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
// set so n.Orig gets OCONV instead of OCONVNOP
n.SetOp(ir.OCONV)
@ -534,7 +534,7 @@ func evalConst(n ir.Node) ir.Node {
case ir.OADDSTR:
// Merge adjacent constants in the argument list.
n := n.(*ir.AddStringExpr)
s := n.List().Slice()
s := n.List.Slice()
need := 0
for i := 0; i < len(s); i++ {
if i == 0 || !ir.IsConst(s[i-1], constant.String) || !ir.IsConst(s[i], constant.String) {
@ -564,7 +564,7 @@ func evalConst(n ir.Node) ir.Node {
}
nl := ir.Copy(n).(*ir.AddStringExpr)
nl.PtrList().Set(s[i:i2])
nl.List.Set(s[i:i2])
newList = append(newList, origConst(nl, constant.MakeString(strings.Join(strs, ""))))
i = i2 - 1
} else {
@ -573,12 +573,12 @@ func evalConst(n ir.Node) ir.Node {
}
nn := ir.Copy(n).(*ir.AddStringExpr)
nn.PtrList().Set(newList)
nn.List.Set(newList)
return nn
case ir.OCAP, ir.OLEN:
n := n.(*ir.UnaryExpr)
nl := n.Left()
nl := n.X
switch nl.Type().Kind() {
case types.TSTRING:
if ir.IsConst(nl, constant.String) {
@ -596,21 +596,21 @@ func evalConst(n ir.Node) ir.Node {
case ir.OREAL:
n := n.(*ir.UnaryExpr)
nl := n.Left()
nl := n.X
if nl.Op() == ir.OLITERAL {
return origConst(n, constant.Real(nl.Val()))
}
case ir.OIMAG:
n := n.(*ir.UnaryExpr)
nl := n.Left()
nl := n.X
if nl.Op() == ir.OLITERAL {
return origConst(n, constant.Imag(nl.Val()))
}
case ir.OCOMPLEX:
n := n.(*ir.BinaryExpr)
nl, nr := n.Left(), n.Right()
nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origConst(n, makeComplex(nl.Val(), nr.Val()))
}
@ -871,7 +871,7 @@ func (s *constSet) add(pos src.XPos, n ir.Node, what, where string) {
if conv := n; conv.Op() == ir.OCONVIFACE {
conv := conv.(*ir.ConvExpr)
if conv.Implicit() {
n = conv.Left()
n = conv.X
}
}

View file

@ -120,7 +120,7 @@ func declare(n *ir.Name, ctxt ir.Class) {
s.Lastlineno = base.Pos
s.Def = n
n.Vargen = int32(gen)
n.SetClass(ctxt)
n.Class_ = ctxt
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
}
@ -137,9 +137,9 @@ func variter(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node {
if len(el) == 1 && len(vl) > 1 {
e := el[0]
as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as2.PtrRlist().Set1(e)
as2.Rhs.Set1(e)
for _, v := range vl {
as2.PtrList().Append(v)
as2.Lhs.Append(v)
declare(v, dclcontext)
v.Ntype = t
v.Defn = as2
@ -234,7 +234,7 @@ func oldname(s *types.Sym) ir.Node {
if c == nil || c.Curfn != Curfn {
// Do not have a closure var for the active closure yet; make one.
c = NewName(s)
c.SetClass(ir.PAUTOHEAP)
c.Class_ = ir.PAUTOHEAP
c.SetIsClosureVar(true)
c.SetIsDDD(n.IsDDD())
c.Defn = n
@ -810,11 +810,11 @@ func makefuncsym(s *types.Sym) {
// setNodeNameFunc marks a node as a function.
func setNodeNameFunc(n *ir.Name) {
if n.Op() != ir.ONAME || n.Class() != ir.Pxxx {
if n.Op() != ir.ONAME || n.Class_ != ir.Pxxx {
base.Fatalf("expected ONAME/Pxxx node, got %v", n)
}
n.SetClass(ir.PFUNC)
n.Class_ = ir.PFUNC
n.Sym().SetFunc(true)
}
@ -876,11 +876,11 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) {
return
}
n := nn.(*ir.CallExpr)
if n.Left() == nil || n.Left().Op() != ir.ONAME {
if n.X == nil || n.X.Op() != ir.ONAME {
return
}
fn := n.Left().(*ir.Name)
if fn.Class() != ir.PFUNC || fn.Name().Defn == nil {
fn := n.X.(*ir.Name)
if fn.Class_ != ir.PFUNC || fn.Name().Defn == nil {
return
}
if !isRuntimePkg(fn.Sym().Pkg) || fn.Sym().Name != "systemstack" {
@ -888,14 +888,14 @@ func (c *nowritebarrierrecChecker) findExtraCalls(nn ir.Node) {
}
var callee *ir.Func
arg := n.List().First()
arg := n.Args.First()
switch arg.Op() {
case ir.ONAME:
arg := arg.(*ir.Name)
callee = arg.Name().Defn.(*ir.Func)
case ir.OCLOSURE:
arg := arg.(*ir.ClosureExpr)
callee = arg.Func()
callee = arg.Func
default:
base.Fatalf("expected ONAME or OCLOSURE node, got %+v", arg)
}
@ -973,7 +973,7 @@ func (c *nowritebarrierrecChecker) check() {
q.PushRight(target.Nname)
}
for !q.Empty() {
fn := q.PopLeft().Func()
fn := q.PopLeft().Func
// Check fn.
if fn.WBPos.IsKnown() {

View file

@ -228,21 +228,21 @@ func (e *Escape) walkFunc(fn *ir.Func) {
if e.labels == nil {
e.labels = make(map[*types.Sym]labelState)
}
e.labels[n.Sym()] = nonlooping
e.labels[n.Label] = nonlooping
case ir.OGOTO:
// If we visited the label before the goto,
// then this is a looping label.
n := n.(*ir.BranchStmt)
if e.labels[n.Sym()] == nonlooping {
e.labels[n.Sym()] = looping
if e.labels[n.Label] == nonlooping {
e.labels[n.Label] = looping
}
}
})
e.curfn = fn
e.loopDepth = 1
e.block(fn.Body())
e.block(fn.Body)
if len(e.labels) != 0 {
base.FatalfAt(fn.Pos(), "leftover labels after walkFunc")
@ -304,18 +304,18 @@ func (e *Escape) stmt(n ir.Node) {
case ir.OBLOCK:
n := n.(*ir.BlockStmt)
e.stmts(n.List())
e.stmts(n.List)
case ir.ODCL:
// Record loop depth at declaration.
n := n.(*ir.Decl)
if !ir.IsBlank(n.Left()) {
e.dcl(n.Left())
if !ir.IsBlank(n.X) {
e.dcl(n.X)
}
case ir.OLABEL:
n := n.(*ir.LabelStmt)
switch e.labels[n.Sym()] {
switch e.labels[n.Label] {
case nonlooping:
if base.Flag.LowerM > 2 {
fmt.Printf("%v:%v non-looping label\n", base.FmtPos(base.Pos), n)
@ -328,127 +328,127 @@ func (e *Escape) stmt(n ir.Node) {
default:
base.Fatalf("label missing tag")
}
delete(e.labels, n.Sym())
delete(e.labels, n.Label)
case ir.OIF:
n := n.(*ir.IfStmt)
e.discard(n.Left())
e.block(n.Body())
e.block(n.Rlist())
e.discard(n.Cond)
e.block(n.Body)
e.block(n.Else)
case ir.OFOR, ir.OFORUNTIL:
n := n.(*ir.ForStmt)
e.loopDepth++
e.discard(n.Left())
e.stmt(n.Right())
e.block(n.Body())
e.discard(n.Cond)
e.stmt(n.Post)
e.block(n.Body)
e.loopDepth--
case ir.ORANGE:
// for List = range Right { Nbody }
n := n.(*ir.RangeStmt)
e.loopDepth++
ks := e.addrs(n.List())
e.block(n.Body())
ks := e.addrs(n.Vars)
e.block(n.Body)
e.loopDepth--
// Right is evaluated outside the loop.
k := e.discardHole()
if len(ks) >= 2 {
if n.Right().Type().IsArray() {
if n.X.Type().IsArray() {
k = ks[1].note(n, "range")
} else {
k = ks[1].deref(n, "range-deref")
}
}
e.expr(e.later(k), n.Right())
e.expr(e.later(k), n.X)
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
typesw := n.Left() != nil && n.Left().Op() == ir.OTYPESW
typesw := n.Tag != nil && n.Tag.Op() == ir.OTYPESW
var ks []EscHole
for _, cas := range n.List().Slice() { // cases
for _, cas := range n.Cases.Slice() { // cases
cas := cas.(*ir.CaseStmt)
if typesw && n.Left().(*ir.TypeSwitchGuard).Left() != nil {
cv := cas.Rlist().First()
if typesw && n.Tag.(*ir.TypeSwitchGuard).Tag != nil {
cv := cas.Vars.First()
k := e.dcl(cv) // type switch variables have no ODCL.
if cv.Type().HasPointers() {
ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
}
}
e.discards(cas.List())
e.block(cas.Body())
e.discards(cas.List)
e.block(cas.Body)
}
if typesw {
e.expr(e.teeHole(ks...), n.Left().(*ir.TypeSwitchGuard).Right())
e.expr(e.teeHole(ks...), n.Tag.(*ir.TypeSwitchGuard).X)
} else {
e.discard(n.Left())
e.discard(n.Tag)
}
case ir.OSELECT:
n := n.(*ir.SelectStmt)
for _, cas := range n.List().Slice() {
for _, cas := range n.Cases.Slice() {
cas := cas.(*ir.CaseStmt)
e.stmt(cas.Left())
e.block(cas.Body())
e.stmt(cas.Comm)
e.block(cas.Body)
}
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
e.assign(n.List().First(), n.Rlist().First(), "selrecv", n)
e.assign(n.List().Second(), nil, "selrecv", n)
e.assign(n.Lhs.First(), n.Rhs.First(), "selrecv", n)
e.assign(n.Lhs.Second(), nil, "selrecv", n)
case ir.ORECV:
// TODO(mdempsky): Consider e.discard(n.Left).
n := n.(*ir.UnaryExpr)
e.exprSkipInit(e.discardHole(), n) // already visited n.Ninit
case ir.OSEND:
n := n.(*ir.SendStmt)
e.discard(n.Left())
e.assignHeap(n.Right(), "send", n)
e.discard(n.Chan)
e.assignHeap(n.Value, "send", n)
case ir.OAS:
n := n.(*ir.AssignStmt)
e.assign(n.Left(), n.Right(), "assign", n)
e.assign(n.X, n.Y, "assign", n)
case ir.OASOP:
n := n.(*ir.AssignOpStmt)
e.assign(n.Left(), n.Right(), "assign", n)
e.assign(n.X, n.Y, "assign", n)
case ir.OAS2:
n := n.(*ir.AssignListStmt)
for i, nl := range n.List().Slice() {
e.assign(nl, n.Rlist().Index(i), "assign-pair", n)
for i, nl := range n.Lhs.Slice() {
e.assign(nl, n.Rhs.Index(i), "assign-pair", n)
}
case ir.OAS2DOTTYPE: // v, ok = x.(type)
n := n.(*ir.AssignListStmt)
e.assign(n.List().First(), n.Rlist().First(), "assign-pair-dot-type", n)
e.assign(n.List().Second(), nil, "assign-pair-dot-type", n)
e.assign(n.Lhs.First(), n.Rhs.First(), "assign-pair-dot-type", n)
e.assign(n.Lhs.Second(), nil, "assign-pair-dot-type", n)
case ir.OAS2MAPR: // v, ok = m[k]
n := n.(*ir.AssignListStmt)
e.assign(n.List().First(), n.Rlist().First(), "assign-pair-mapr", n)
e.assign(n.List().Second(), nil, "assign-pair-mapr", n)
e.assign(n.Lhs.First(), n.Rhs.First(), "assign-pair-mapr", n)
e.assign(n.Lhs.Second(), nil, "assign-pair-mapr", n)
case ir.OAS2RECV: // v, ok = <-ch
n := n.(*ir.AssignListStmt)
e.assign(n.List().First(), n.Rlist().First(), "assign-pair-receive", n)
e.assign(n.List().Second(), nil, "assign-pair-receive", n)
e.assign(n.Lhs.First(), n.Rhs.First(), "assign-pair-receive", n)
e.assign(n.Lhs.Second(), nil, "assign-pair-receive", n)
case ir.OAS2FUNC:
n := n.(*ir.AssignListStmt)
e.stmts(n.Rlist().First().Init())
e.call(e.addrs(n.List()), n.Rlist().First(), nil)
e.stmts(n.Rhs.First().Init())
e.call(e.addrs(n.Lhs), n.Rhs.First(), nil)
case ir.ORETURN:
n := n.(*ir.ReturnStmt)
results := e.curfn.Type().Results().FieldSlice()
for i, v := range n.List().Slice() {
for i, v := range n.Results.Slice() {
e.assign(ir.AsNode(results[i].Nname), v, "return", n)
}
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OCLOSE, ir.OCOPY, ir.ODELETE, ir.OPANIC, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
e.call(nil, n, nil)
case ir.OGO, ir.ODEFER:
n := n.(*ir.GoDeferStmt)
e.stmts(n.Left().Init())
e.call(nil, n.Left(), n)
e.stmts(n.Call.Init())
e.call(nil, n.Call, n)
case ir.ORETJMP:
// TODO(mdempsky): What do? esc.go just ignores it.
@ -491,7 +491,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
uintptrEscapesHack := k.uintptrEscapesHack
k.uintptrEscapesHack = false
if uintptrEscapesHack && n.Op() == ir.OCONVNOP && n.(*ir.ConvExpr).Left().Type().IsUnsafePtr() {
if uintptrEscapesHack && n.Op() == ir.OCONVNOP && n.(*ir.ConvExpr).X.Type().IsUnsafePtr() {
// nop
} else if k.derefs >= 0 && !n.Type().HasPointers() {
k = e.discardHole()
@ -506,7 +506,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.ONAME:
n := n.(*ir.Name)
if n.Class() == ir.PFUNC || n.Class() == ir.PEXTERN {
if n.Class_ == ir.PFUNC || n.Class_ == ir.PEXTERN {
return
}
e.flow(k, e.oldLoc(n))
@ -517,46 +517,46 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
n := n.(*ir.UnaryExpr)
e.discard(n.Left())
e.discard(n.X)
case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.ODIV, ir.OMOD, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
n := n.(*ir.BinaryExpr)
e.discard(n.Left())
e.discard(n.Right())
e.discard(n.X)
e.discard(n.Y)
case ir.OANDAND, ir.OOROR:
n := n.(*ir.LogicalExpr)
e.discard(n.Left())
e.discard(n.Right())
e.discard(n.X)
e.discard(n.Y)
case ir.OADDR:
n := n.(*ir.AddrExpr)
e.expr(k.addr(n, "address-of"), n.Left()) // "address-of"
e.expr(k.addr(n, "address-of"), n.X) // "address-of"
case ir.ODEREF:
n := n.(*ir.StarExpr)
e.expr(k.deref(n, "indirection"), n.Left()) // "indirection"
e.expr(k.deref(n, "indirection"), n.X) // "indirection"
case ir.ODOT, ir.ODOTMETH, ir.ODOTINTER:
n := n.(*ir.SelectorExpr)
e.expr(k.note(n, "dot"), n.Left())
e.expr(k.note(n, "dot"), n.X)
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
e.expr(k.deref(n, "dot of pointer"), n.Left()) // "dot of pointer"
e.expr(k.deref(n, "dot of pointer"), n.X) // "dot of pointer"
case ir.ODOTTYPE, ir.ODOTTYPE2:
n := n.(*ir.TypeAssertExpr)
e.expr(k.dotType(n.Type(), n, "dot"), n.Left())
e.expr(k.dotType(n.Type(), n, "dot"), n.X)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
if n.Left().Type().IsArray() {
e.expr(k.note(n, "fixed-array-index-of"), n.Left())
if n.X.Type().IsArray() {
e.expr(k.note(n, "fixed-array-index-of"), n.X)
} else {
// TODO(mdempsky): Fix why reason text.
e.expr(k.deref(n, "dot of pointer"), n.Left())
e.expr(k.deref(n, "dot of pointer"), n.X)
}
e.discard(n.Right())
e.discard(n.Index)
case ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
e.discard(n.Left())
e.discard(n.Right())
e.discard(n.X)
e.discard(n.Index)
case ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR:
n := n.(*ir.SliceExpr)
e.expr(k.note(n, "slice"), n.Left())
e.expr(k.note(n, "slice"), n.X)
low, high, max := n.SliceBounds()
e.discard(low)
e.discard(high)
@ -564,29 +564,29 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.OCONV, ir.OCONVNOP:
n := n.(*ir.ConvExpr)
if checkPtr(e.curfn, 2) && n.Type().IsUnsafePtr() && n.Left().Type().IsPtr() {
if checkPtr(e.curfn, 2) && n.Type().IsUnsafePtr() && n.X.Type().IsPtr() {
// When -d=checkptr=2 is enabled, treat
// conversions to unsafe.Pointer as an
// escaping operation. This allows better
// runtime instrumentation, since we can more
// easily detect object boundaries on the heap
// than the stack.
e.assignHeap(n.Left(), "conversion to unsafe.Pointer", n)
} else if n.Type().IsUnsafePtr() && n.Left().Type().IsUintptr() {
e.unsafeValue(k, n.Left())
e.assignHeap(n.X, "conversion to unsafe.Pointer", n)
} else if n.Type().IsUnsafePtr() && n.X.Type().IsUintptr() {
e.unsafeValue(k, n.X)
} else {
e.expr(k, n.Left())
e.expr(k, n.X)
}
case ir.OCONVIFACE:
n := n.(*ir.ConvExpr)
if !n.Left().Type().IsInterface() && !isdirectiface(n.Left().Type()) {
if !n.X.Type().IsInterface() && !isdirectiface(n.X.Type()) {
k = e.spill(k, n)
}
e.expr(k.note(n, "interface-converted"), n.Left())
e.expr(k.note(n, "interface-converted"), n.X)
case ir.ORECV:
n := n.(*ir.UnaryExpr)
e.discard(n.Left())
e.discard(n.X)
case ir.OCALLMETH, ir.OCALLFUNC, ir.OCALLINTER, ir.OLEN, ir.OCAP, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCOPY:
e.call([]EscHole{k}, n, nil)
@ -598,15 +598,15 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.OMAKESLICE:
n := n.(*ir.MakeExpr)
e.spill(k, n)
e.discard(n.Left())
e.discard(n.Right())
e.discard(n.Len)
e.discard(n.Cap)
case ir.OMAKECHAN:
n := n.(*ir.MakeExpr)
e.discard(n.Left())
e.discard(n.Len)
case ir.OMAKEMAP:
n := n.(*ir.MakeExpr)
e.spill(k, n)
e.discard(n.Left())
e.discard(n.Len)
case ir.ORECOVER:
// nop
@ -633,17 +633,17 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
name, _ := m.Nname.(*ir.Name)
paramK := e.tagHole(ks, name, m.Type.Recv())
e.expr(e.teeHole(paramK, closureK), n.Left())
e.expr(e.teeHole(paramK, closureK), n.X)
case ir.OPTRLIT:
n := n.(*ir.AddrExpr)
e.expr(e.spill(k, n), n.Left())
e.expr(e.spill(k, n), n.X)
case ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
for _, elt := range n.List().Slice() {
for _, elt := range n.List.Slice() {
if elt.Op() == ir.OKEY {
elt = elt.(*ir.KeyExpr).Right()
elt = elt.(*ir.KeyExpr).Value
}
e.expr(k.note(n, "array literal element"), elt)
}
@ -653,17 +653,17 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
k = e.spill(k, n)
k.uintptrEscapesHack = uintptrEscapesHack // for ...uintptr parameters
for _, elt := range n.List().Slice() {
for _, elt := range n.List.Slice() {
if elt.Op() == ir.OKEY {
elt = elt.(*ir.KeyExpr).Right()
elt = elt.(*ir.KeyExpr).Value
}
e.expr(k.note(n, "slice-literal-element"), elt)
}
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
for _, elt := range n.List().Slice() {
e.expr(k.note(n, "struct literal element"), elt.(*ir.StructKeyExpr).Left())
for _, elt := range n.List.Slice() {
e.expr(k.note(n, "struct literal element"), elt.(*ir.StructKeyExpr).Value)
}
case ir.OMAPLIT:
@ -671,10 +671,10 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
e.spill(k, n)
// Map keys and values are always stored in the heap.
for _, elt := range n.List().Slice() {
for _, elt := range n.List.Slice() {
elt := elt.(*ir.KeyExpr)
e.assignHeap(elt.Left(), "map literal key", n)
e.assignHeap(elt.Right(), "map literal value", n)
e.assignHeap(elt.Key, "map literal key", n)
e.assignHeap(elt.Value, "map literal value", n)
}
case ir.OCLOSURE:
@ -682,7 +682,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
k = e.spill(k, n)
// Link addresses of captured variables to closure.
for _, v := range n.Func().ClosureVars {
for _, v := range n.Func.ClosureVars {
k := k
if !v.Byval() {
k = k.addr(v, "reference")
@ -694,7 +694,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
case ir.ORUNES2STR, ir.OBYTES2STR, ir.OSTR2RUNES, ir.OSTR2BYTES, ir.ORUNESTR:
n := n.(*ir.ConvExpr)
e.spill(k, n)
e.discard(n.Left())
e.discard(n.X)
case ir.OADDSTR:
n := n.(*ir.AddStringExpr)
@ -702,7 +702,7 @@ func (e *Escape) exprSkipInit(k EscHole, n ir.Node) {
// Arguments of OADDSTR never escape;
// runtime.concatstrings makes sure of that.
e.discards(n.List())
e.discards(n.List)
}
}
@ -718,31 +718,31 @@ func (e *Escape) unsafeValue(k EscHole, n ir.Node) {
switch n.Op() {
case ir.OCONV, ir.OCONVNOP:
n := n.(*ir.ConvExpr)
if n.Left().Type().IsUnsafePtr() {
e.expr(k, n.Left())
if n.X.Type().IsUnsafePtr() {
e.expr(k, n.X)
} else {
e.discard(n.Left())
e.discard(n.X)
}
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
if isReflectHeaderDataField(n) {
e.expr(k.deref(n, "reflect.Header.Data"), n.Left())
e.expr(k.deref(n, "reflect.Header.Data"), n.X)
} else {
e.discard(n.Left())
e.discard(n.X)
}
case ir.OPLUS, ir.ONEG, ir.OBITNOT:
n := n.(*ir.UnaryExpr)
e.unsafeValue(k, n.Left())
e.unsafeValue(k, n.X)
case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.ODIV, ir.OMOD, ir.OAND, ir.OANDNOT:
n := n.(*ir.BinaryExpr)
e.unsafeValue(k, n.Left())
e.unsafeValue(k, n.Right())
e.unsafeValue(k, n.X)
e.unsafeValue(k, n.Y)
case ir.OLSH, ir.ORSH:
n := n.(*ir.BinaryExpr)
e.unsafeValue(k, n.Left())
e.unsafeValue(k, n.X)
// RHS need not be uintptr-typed (#32959) and can't meaningfully
// flow pointers anyway.
e.discard(n.Right())
e.discard(n.Y)
default:
e.exprSkipInit(e.discardHole(), n)
}
@ -775,7 +775,7 @@ func (e *Escape) addr(n ir.Node) EscHole {
base.Fatalf("unexpected addr: %v", n)
case ir.ONAME:
n := n.(*ir.Name)
if n.Class() == ir.PEXTERN {
if n.Class_ == ir.PEXTERN {
break
}
k = e.oldLoc(n).asHole()
@ -784,21 +784,21 @@ func (e *Escape) addr(n ir.Node) EscHole {
e.addr(n.Name_)
case ir.ODOT:
n := n.(*ir.SelectorExpr)
k = e.addr(n.Left())
k = e.addr(n.X)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
e.discard(n.Right())
if n.Left().Type().IsArray() {
k = e.addr(n.Left())
e.discard(n.Index)
if n.X.Type().IsArray() {
k = e.addr(n.X)
} else {
e.discard(n.Left())
e.discard(n.X)
}
case ir.ODEREF, ir.ODOTPTR:
e.discard(n)
case ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
e.discard(n.Left())
e.assignHeap(n.Right(), "key of map put", n)
e.discard(n.X)
e.assignHeap(n.Index, "key of map put", n)
}
if !n.Type().HasPointers() {
@ -876,17 +876,17 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
var fn *ir.Name
switch call.Op() {
case ir.OCALLFUNC:
switch v := staticValue(call.Left()); {
case v.Op() == ir.ONAME && v.(*ir.Name).Class() == ir.PFUNC:
switch v := staticValue(call.X); {
case v.Op() == ir.ONAME && v.(*ir.Name).Class_ == ir.PFUNC:
fn = v.(*ir.Name)
case v.Op() == ir.OCLOSURE:
fn = v.(*ir.ClosureExpr).Func().Nname
fn = v.(*ir.ClosureExpr).Func.Nname
}
case ir.OCALLMETH:
fn = methodExprName(call.Left())
fn = methodExprName(call.X)
}
fntype := call.Left().Type()
fntype := call.X.Type()
if fn != nil {
fntype = fn.Type()
}
@ -898,20 +898,20 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
}
if r := fntype.Recv(); r != nil {
argument(e.tagHole(ks, fn, r), call.Left().(*ir.SelectorExpr).Left())
argument(e.tagHole(ks, fn, r), call.X.(*ir.SelectorExpr).X)
} else {
// Evaluate callee function expression.
argument(e.discardHole(), call.Left())
argument(e.discardHole(), call.X)
}
args := call.List().Slice()
args := call.Args.Slice()
for i, param := range fntype.Params().FieldSlice() {
argument(e.tagHole(ks, fn, param), args[i])
}
case ir.OAPPEND:
call := call.(*ir.CallExpr)
args := call.List().Slice()
args := call.Args.Slice()
// Appendee slice may flow directly to the result, if
// it has enough capacity. Alternatively, a new heap
@ -923,7 +923,7 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
}
argument(appendeeK, args[0])
if call.IsDDD() {
if call.IsDDD {
appendedK := e.discardHole()
if args[1].Type().IsSlice() && args[1].Type().Elem().HasPointers() {
appendedK = e.heapHole().deref(call, "appended slice...")
@ -937,30 +937,30 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
case ir.OCOPY:
call := call.(*ir.BinaryExpr)
argument(e.discardHole(), call.Left())
argument(e.discardHole(), call.X)
copiedK := e.discardHole()
if call.Right().Type().IsSlice() && call.Right().Type().Elem().HasPointers() {
if call.Y.Type().IsSlice() && call.Y.Type().Elem().HasPointers() {
copiedK = e.heapHole().deref(call, "copied slice")
}
argument(copiedK, call.Right())
argument(copiedK, call.Y)
case ir.OPANIC:
call := call.(*ir.UnaryExpr)
argument(e.heapHole(), call.Left())
argument(e.heapHole(), call.X)
case ir.OCOMPLEX:
call := call.(*ir.BinaryExpr)
argument(e.discardHole(), call.Left())
argument(e.discardHole(), call.Right())
argument(e.discardHole(), call.X)
argument(e.discardHole(), call.Y)
case ir.ODELETE, ir.OPRINT, ir.OPRINTN, ir.ORECOVER:
call := call.(*ir.CallExpr)
for _, arg := range call.List().Slice() {
for _, arg := range call.Args.Slice() {
argument(e.discardHole(), arg)
}
case ir.OLEN, ir.OCAP, ir.OREAL, ir.OIMAG, ir.OCLOSE:
call := call.(*ir.UnaryExpr)
argument(e.discardHole(), call.Left())
argument(e.discardHole(), call.X)
}
}
@ -1557,7 +1557,7 @@ func (e *Escape) finish(fns []*ir.Func) {
}
func (l *EscLocation) isName(c ir.Class) bool {
return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class() == c
return l.n != nil && l.n.Op() == ir.ONAME && l.n.(*ir.Name).Class_ == c
}
const numEscResults = 7
@ -1726,10 +1726,10 @@ func isSliceSelfAssign(dst, src ir.Node) bool {
return false
case ir.ODEREF:
dst := dst.(*ir.StarExpr)
dstX = dst.Left()
dstX = dst.X
case ir.ODOTPTR:
dst := dst.(*ir.SelectorExpr)
dstX = dst.Left()
dstX = dst.X
}
if dstX.Op() != ir.ONAME {
return false
@ -1749,7 +1749,7 @@ func isSliceSelfAssign(dst, src ir.Node) bool {
// For slicing an array (not pointer to array), there is an implicit OADDR.
// We check that to determine non-pointer array slicing.
src := src.(*ir.SliceExpr)
if src.Left().Op() == ir.OADDR {
if src.X.Op() == ir.OADDR {
return false
}
default:
@ -1757,15 +1757,15 @@ func isSliceSelfAssign(dst, src ir.Node) bool {
}
// slice is applied to ONAME dereference.
var baseX ir.Node
switch base := src.(*ir.SliceExpr).Left(); base.Op() {
switch base := src.(*ir.SliceExpr).X; base.Op() {
default:
return false
case ir.ODEREF:
base := base.(*ir.StarExpr)
baseX = base.Left()
baseX = base.X
case ir.ODOTPTR:
base := base.(*ir.SelectorExpr)
baseX = base.Left()
baseX = base.X
}
if baseX.Op() != ir.ONAME {
return false
@ -1801,14 +1801,14 @@ func isSelfAssign(dst, src ir.Node) bool {
// Safe trailing accessors that are permitted to differ.
dst := dst.(*ir.SelectorExpr)
src := src.(*ir.SelectorExpr)
return samesafeexpr(dst.Left(), src.Left())
return samesafeexpr(dst.X, src.X)
case ir.OINDEX:
dst := dst.(*ir.IndexExpr)
src := src.(*ir.IndexExpr)
if mayAffectMemory(dst.Right()) || mayAffectMemory(src.Right()) {
if mayAffectMemory(dst.Index) || mayAffectMemory(src.Index) {
return false
}
return samesafeexpr(dst.Left(), src.Left())
return samesafeexpr(dst.X, src.X)
default:
return false
}
@ -1834,27 +1834,27 @@ func mayAffectMemory(n ir.Node) bool {
case ir.OADD, ir.OSUB, ir.OOR, ir.OXOR, ir.OMUL, ir.OLSH, ir.ORSH, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OMOD:
n := n.(*ir.BinaryExpr)
return mayAffectMemory(n.Left()) || mayAffectMemory(n.Right())
return mayAffectMemory(n.X) || mayAffectMemory(n.Y)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
return mayAffectMemory(n.Left()) || mayAffectMemory(n.Right())
return mayAffectMemory(n.X) || mayAffectMemory(n.Index)
case ir.OCONVNOP, ir.OCONV:
n := n.(*ir.ConvExpr)
return mayAffectMemory(n.Left())
return mayAffectMemory(n.X)
case ir.OLEN, ir.OCAP, ir.ONOT, ir.OBITNOT, ir.OPLUS, ir.ONEG, ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
n := n.(*ir.UnaryExpr)
return mayAffectMemory(n.Left())
return mayAffectMemory(n.X)
case ir.ODOT, ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
return mayAffectMemory(n.Left())
return mayAffectMemory(n.X)
case ir.ODEREF:
n := n.(*ir.StarExpr)
return mayAffectMemory(n.Left())
return mayAffectMemory(n.X)
default:
return true
@ -1871,7 +1871,7 @@ func heapAllocReason(n ir.Node) string {
// Parameters are always passed via the stack.
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
return ""
}
}
@ -1893,9 +1893,9 @@ func heapAllocReason(n ir.Node) string {
if n.Op() == ir.OMAKESLICE {
n := n.(*ir.MakeExpr)
r := n.Right()
r := n.Cap
if r == nil {
r = n.Left()
r = n.Len
}
if !smallintconst(r) {
return "non-constant size"
@ -1928,7 +1928,7 @@ func addrescapes(n ir.Node) {
// if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping.
// on PPARAM it means something different.
if n.Class() == ir.PAUTO && n.Esc() == EscNever {
if n.Class_ == ir.PAUTO && n.Esc() == EscNever {
break
}
@ -1938,7 +1938,7 @@ func addrescapes(n ir.Node) {
break
}
if n.Class() != ir.PPARAM && n.Class() != ir.PPARAMOUT && n.Class() != ir.PAUTO {
if n.Class_ != ir.PPARAM && n.Class_ != ir.PPARAMOUT && n.Class_ != ir.PAUTO {
break
}
@ -1969,18 +1969,18 @@ func addrescapes(n ir.Node) {
// is always a heap pointer anyway.
case ir.ODOT:
n := n.(*ir.SelectorExpr)
addrescapes(n.Left())
addrescapes(n.X)
case ir.OINDEX:
n := n.(*ir.IndexExpr)
if !n.Left().Type().IsSlice() {
addrescapes(n.Left())
if !n.X.Type().IsSlice() {
addrescapes(n.X)
}
case ir.OPAREN:
n := n.(*ir.ParenExpr)
addrescapes(n.Left())
addrescapes(n.X)
case ir.OCONVNOP:
n := n.(*ir.ConvExpr)
addrescapes(n.Left())
addrescapes(n.X)
}
}
@ -1992,7 +1992,7 @@ func moveToHeap(n *ir.Name) {
if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", n)
}
if n.Class() == ir.PAUTOHEAP {
if n.Class_ == ir.PAUTOHEAP {
ir.Dump("n", n)
base.Fatalf("double move to heap")
}
@ -2011,7 +2011,7 @@ func moveToHeap(n *ir.Name) {
// Parameters have a local stack copy used at function start/end
// in addition to the copy in the heap that may live longer than
// the function.
if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
if n.FrameOffset() == types.BADWIDTH {
base.Fatalf("addrescapes before param assignment")
}
@ -2023,9 +2023,9 @@ func moveToHeap(n *ir.Name) {
stackcopy := NewName(n.Sym())
stackcopy.SetType(n.Type())
stackcopy.SetFrameOffset(n.FrameOffset())
stackcopy.SetClass(n.Class())
stackcopy.Class_ = n.Class_
stackcopy.Heapaddr = heapaddr
if n.Class() == ir.PPARAMOUT {
if n.Class_ == ir.PPARAMOUT {
// Make sure the pointer to the heap copy is kept live throughout the function.
// The function could panic at any point, and then a defer could recover.
// Thus, we need the pointer to the heap copy always available so the
@ -2047,7 +2047,7 @@ func moveToHeap(n *ir.Name) {
}
// Parameters are before locals, so can stop early.
// This limits the search even in functions with many local variables.
if d.Class() == ir.PAUTO {
if d.Class_ == ir.PAUTO {
break
}
}
@ -2058,7 +2058,7 @@ func moveToHeap(n *ir.Name) {
}
// Modify n in place so that uses of n now mean indirection of the heapaddr.
n.SetClass(ir.PAUTOHEAP)
n.Class_ = ir.PAUTOHEAP
n.SetFrameOffset(0)
n.Heapaddr = heapaddr
n.SetEsc(EscHeap)
@ -2084,7 +2084,7 @@ func (e *Escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
return fmt.Sprintf("arg#%d", narg)
}
if fn.Body().Len() == 0 {
if fn.Body.Len() == 0 {
// Assume that uintptr arguments must be held live across the call.
// This is most important for syscall.Syscall.
// See golang.org/issue/13372.
@ -2106,7 +2106,7 @@ func (e *Escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
// External functions are assumed unsafe, unless
// //go:noescape is given before the declaration.
if fn.Func().Pragma&ir.Noescape != 0 {
if fn.Pragma&ir.Noescape != 0 {
if base.Flag.LowerM != 0 && f.Sym != nil {
base.WarnfAt(f.Pos, "%v does not escape", name())
}
@ -2120,7 +2120,7 @@ func (e *Escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
return esc.Encode()
}
if fn.Func().Pragma&ir.UintptrEscapes != 0 {
if fn.Pragma&ir.UintptrEscapes != 0 {
if f.Type.IsUintptr() {
if base.Flag.LowerM != 0 {
base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name())

View file

@ -83,7 +83,7 @@ func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Cl
}
n := ir.NewDeclNameAt(pos, op, s)
n.SetClass(ctxt) // TODO(mdempsky): Move this into NewDeclNameAt too?
n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
s.SetPkgDef(n)
s.Importdef = ipkg
return n

View file

@ -35,7 +35,7 @@ func isParamStackCopy(n ir.Node) bool {
return false
}
name := n.(*ir.Name)
return (name.Class() == ir.PPARAM || name.Class() == ir.PPARAMOUT) && name.Heapaddr != nil
return (name.Class_ == ir.PPARAM || name.Class_ == ir.PPARAMOUT) && name.Heapaddr != nil
}
// isParamHeapCopy reports whether this is the on-heap copy of
@ -45,7 +45,7 @@ func isParamHeapCopy(n ir.Node) bool {
return false
}
name := n.(*ir.Name)
return name.Class() == ir.PAUTOHEAP && name.Name().Stackcopy != nil
return name.Class_ == ir.PAUTOHEAP && name.Name().Stackcopy != nil
}
// autotmpname returns the name for an autotmp variable numbered n.
@ -79,7 +79,7 @@ func tempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
n := ir.NewNameAt(pos, s)
s.Def = n
n.SetType(t)
n.SetClass(ir.PAUTO)
n.Class_ = ir.PAUTO
n.SetEsc(EscNever)
n.Curfn = curfn
n.SetUsed(true)

View file

@ -270,16 +270,16 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
tail = ir.NewBranchStmt(base.Pos, ir.ORETJMP, f.Nname.Sym())
} else {
call := ir.NewCallExpr(base.Pos, ir.OCALL, f.Nname, nil)
call.PtrList().Set(paramNnames(tfn.Type()))
call.SetIsDDD(tfn.Type().IsVariadic())
call.Args.Set(paramNnames(tfn.Type()))
call.IsDDD = tfn.Type().IsVariadic()
tail = call
if tfn.Type().NumResults() > 0 {
n := ir.NewReturnStmt(base.Pos, nil)
n.PtrList().Set1(call)
n.Results.Set1(call)
tail = n
}
}
fn.PtrBody().Append(tail)
fn.Body.Append(tail)
funcbody()
if base.Debug.DclStack != 0 {
@ -288,7 +288,7 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
typecheckFunc(fn)
Curfn = fn
typecheckslice(fn.Body().Slice(), ctxStmt)
typecheckslice(fn.Body.Slice(), ctxStmt)
escapeFuncs([]*ir.Func{fn}, false)

View file

@ -429,7 +429,7 @@ func (p *iexporter) doDecl(n *ir.Name) {
switch n.Op() {
case ir.ONAME:
switch n.Class() {
switch n.Class_ {
case ir.PEXTERN:
// Variable.
w.tag('V')
@ -449,7 +449,7 @@ func (p *iexporter) doDecl(n *ir.Name) {
w.funcExt(n)
default:
base.Fatalf("unexpected class: %v, %v", n, n.Class())
base.Fatalf("unexpected class: %v, %v", n, n.Class_)
}
case ir.OLITERAL:
@ -528,7 +528,7 @@ func (p *iexporter) doInline(f *ir.Name) {
w := p.newWriter()
w.setPkg(fnpkg(f), false)
w.stmtList(ir.AsNodes(f.Func().Inl.Body))
w.stmtList(ir.AsNodes(f.Func.Inl.Body))
w.finish("inl", p.inlineIndex, f.Sym())
}
@ -983,14 +983,14 @@ func (w *exportWriter) funcExt(n *ir.Name) {
}
// Inline body.
if n.Func().Inl != nil {
w.uint64(1 + uint64(n.Func().Inl.Cost))
if n.Func().ExportInline() {
if n.Func.Inl != nil {
w.uint64(1 + uint64(n.Func.Inl.Cost))
if n.Func.ExportInline() {
w.p.doInline(n)
}
// Endlineno for inlined function.
w.pos(n.Func().Endlineno)
w.pos(n.Func.Endlineno)
} else {
w.uint64(0)
}
@ -1068,27 +1068,27 @@ func (w *exportWriter) stmt(n ir.Node) {
// generate OBLOCK nodes except to denote an empty
// function body, although that may change.)
n := n.(*ir.BlockStmt)
for _, n := range n.List().Slice() {
for _, n := range n.List.Slice() {
w.stmt(n)
}
case ir.ODCL:
n := n.(*ir.Decl)
w.op(ir.ODCL)
w.pos(n.Left().Pos())
w.localName(n.Left().(*ir.Name))
w.typ(n.Left().Type())
w.pos(n.X.Pos())
w.localName(n.X.(*ir.Name))
w.typ(n.X.Type())
case ir.OAS:
// Don't export "v = <N>" initializing statements, hope they're always
// preceded by the DCL which will be re-parsed and typecheck to reproduce
// the "v = <N>" again.
n := n.(*ir.AssignStmt)
if n.Right() != nil {
if n.Y != nil {
w.op(ir.OAS)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
w.expr(n.X)
w.expr(n.Y)
}
case ir.OASOP:
@ -1096,23 +1096,23 @@ func (w *exportWriter) stmt(n ir.Node) {
w.op(ir.OASOP)
w.pos(n.Pos())
w.op(n.AsOp)
w.expr(n.Left())
if w.bool(!n.Implicit()) {
w.expr(n.Right())
w.expr(n.X)
if w.bool(!n.IncDec) {
w.expr(n.Y)
}
case ir.OAS2, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
n := n.(*ir.AssignListStmt)
w.op(ir.OAS2)
w.pos(n.Pos())
w.exprList(n.List())
w.exprList(n.Rlist())
w.exprList(n.Lhs)
w.exprList(n.Rhs)
case ir.ORETURN:
n := n.(*ir.ReturnStmt)
w.op(ir.ORETURN)
w.pos(n.Pos())
w.exprList(n.List())
w.exprList(n.Results)
// case ORETJMP:
// unreachable - generated by compiler for trampolin routines
@ -1121,32 +1121,32 @@ func (w *exportWriter) stmt(n ir.Node) {
n := n.(*ir.GoDeferStmt)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Call)
case ir.OIF:
n := n.(*ir.IfStmt)
w.op(ir.OIF)
w.pos(n.Pos())
w.stmtList(n.Init())
w.expr(n.Left())
w.stmtList(n.Body())
w.stmtList(n.Rlist())
w.expr(n.Cond)
w.stmtList(n.Body)
w.stmtList(n.Else)
case ir.OFOR:
n := n.(*ir.ForStmt)
w.op(ir.OFOR)
w.pos(n.Pos())
w.stmtList(n.Init())
w.exprsOrNil(n.Left(), n.Right())
w.stmtList(n.Body())
w.exprsOrNil(n.Cond, n.Post)
w.stmtList(n.Body)
case ir.ORANGE:
n := n.(*ir.RangeStmt)
w.op(ir.ORANGE)
w.pos(n.Pos())
w.stmtList(n.List())
w.expr(n.Right())
w.stmtList(n.Body())
w.stmtList(n.Vars)
w.expr(n.X)
w.stmtList(n.Body)
case ir.OSELECT:
n := n.(*ir.SelectStmt)
@ -1161,7 +1161,7 @@ func (w *exportWriter) stmt(n ir.Node) {
w.op(n.Op())
w.pos(n.Pos())
w.stmtList(n.Init())
w.exprsOrNil(n.Left(), nil)
w.exprsOrNil(n.Tag, nil)
w.caseList(n)
// case OCASE:
@ -1191,11 +1191,11 @@ func isNamedTypeSwitch(n ir.Node) bool {
return false
}
sw := n.(*ir.SwitchStmt)
if sw.Left() == nil || sw.Left().Op() != ir.OTYPESW {
if sw.Tag == nil || sw.Tag.Op() != ir.OTYPESW {
return false
}
guard := sw.Left().(*ir.TypeSwitchGuard)
return guard.Left() != nil
guard := sw.Tag.(*ir.TypeSwitchGuard)
return guard.Tag != nil
}
func (w *exportWriter) caseList(sw ir.Node) {
@ -1203,19 +1203,19 @@ func (w *exportWriter) caseList(sw ir.Node) {
var cases []ir.Node
if sw.Op() == ir.OSWITCH {
cases = sw.(*ir.SwitchStmt).List().Slice()
cases = sw.(*ir.SwitchStmt).Cases.Slice()
} else {
cases = sw.(*ir.SelectStmt).List().Slice()
cases = sw.(*ir.SelectStmt).Cases.Slice()
}
w.uint64(uint64(len(cases)))
for _, cas := range cases {
cas := cas.(*ir.CaseStmt)
w.pos(cas.Pos())
w.stmtList(cas.List())
w.stmtList(cas.List)
if namedTypeSwitch {
w.localName(cas.Rlist().First().(*ir.Name))
w.localName(cas.Vars.First().(*ir.Name))
}
w.stmtList(cas.Body())
w.stmtList(cas.Body)
}
}
@ -1230,21 +1230,21 @@ func simplifyForExport(n ir.Node) ir.Node {
switch n.Op() {
case ir.OPAREN:
n := n.(*ir.ParenExpr)
return simplifyForExport(n.Left())
return simplifyForExport(n.X)
case ir.ODEREF:
n := n.(*ir.StarExpr)
if n.Implicit() {
return simplifyForExport(n.Left())
return simplifyForExport(n.X)
}
case ir.OADDR:
n := n.(*ir.AddrExpr)
if n.Implicit() {
return simplifyForExport(n.Left())
return simplifyForExport(n.X)
}
case ir.ODOT, ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
if n.Implicit() {
return simplifyForExport(n.Left())
return simplifyForExport(n.X)
}
}
return n
@ -1283,7 +1283,7 @@ func (w *exportWriter) expr(n ir.Node) {
case ir.ONAME:
// Package scope name.
n := n.(*ir.Name)
if (n.Class() == ir.PEXTERN || n.Class() == ir.PFUNC) && !ir.IsBlank(n) {
if (n.Class_ == ir.PEXTERN || n.Class_ == ir.PFUNC) && !ir.IsBlank(n) {
w.op(ir.ONONAME)
w.qualifiedIdent(n)
break
@ -1305,14 +1305,14 @@ func (w *exportWriter) expr(n ir.Node) {
w.op(ir.OTYPESW)
w.pos(n.Pos())
var s *types.Sym
if n.Left() != nil {
if n.Left().Op() != ir.ONONAME {
base.Fatalf("expected ONONAME, got %v", n.Left())
if n.Tag != nil {
if n.Tag.Op() != ir.ONONAME {
base.Fatalf("expected ONONAME, got %v", n.Tag)
}
s = n.Left().Sym()
s = n.Tag.Sym()
}
w.localIdent(s, 0) // declared pseudo-variable, if any
w.exprsOrNil(n.Right(), nil)
w.exprsOrNil(n.X, nil)
// case OTARRAY, OTMAP, OTCHAN, OTSTRUCT, OTINTER, OTFUNC:
// should have been resolved by typechecking - handled by default case
@ -1327,27 +1327,27 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.AddrExpr)
w.op(ir.OADDR)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
w.op(ir.OSTRUCTLIT)
w.pos(n.Pos())
w.typ(n.Type())
w.fieldList(n.List()) // special handling of field names
w.fieldList(n.List) // special handling of field names
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
n := n.(*ir.CompLitExpr)
w.op(ir.OCOMPLIT)
w.pos(n.Pos())
w.typ(n.Type())
w.exprList(n.List())
w.exprList(n.List)
case ir.OKEY:
n := n.(*ir.KeyExpr)
w.op(ir.OKEY)
w.pos(n.Pos())
w.exprsOrNil(n.Left(), n.Right())
w.exprsOrNil(n.Key, n.Value)
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
@ -1357,35 +1357,35 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.CallPartExpr)
w.op(ir.OXDOT)
w.pos(n.Pos())
w.expr(n.Left())
w.selector(n.Sym())
w.expr(n.X)
w.selector(n.Method.Sym)
case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH:
n := n.(*ir.SelectorExpr)
w.op(ir.OXDOT)
w.pos(n.Pos())
w.expr(n.Left())
w.selector(n.Sym())
w.expr(n.X)
w.selector(n.Sel)
case ir.ODOTTYPE, ir.ODOTTYPE2:
n := n.(*ir.TypeAssertExpr)
w.op(ir.ODOTTYPE)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
w.typ(n.Type())
case ir.OINDEX, ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
w.op(ir.OINDEX)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
w.expr(n.X)
w.expr(n.Index)
case ir.OSLICE, ir.OSLICESTR, ir.OSLICEARR:
n := n.(*ir.SliceExpr)
w.op(ir.OSLICE)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
low, high, _ := n.SliceBounds()
w.exprsOrNil(low, high)
@ -1393,7 +1393,7 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.SliceExpr)
w.op(ir.OSLICE3)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
low, high, max := n.SliceBounds()
w.exprsOrNil(low, high)
w.expr(max)
@ -1403,33 +1403,33 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.BinaryExpr)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
w.expr(n.X)
w.expr(n.Y)
w.op(ir.OEND)
case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR:
n := n.(*ir.ConvExpr)
w.op(ir.OCONV)
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
w.typ(n.Type())
case ir.OREAL, ir.OIMAG, ir.OCAP, ir.OCLOSE, ir.OLEN, ir.ONEW, ir.OPANIC:
n := n.(*ir.UnaryExpr)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
w.op(ir.OEND)
case ir.OAPPEND, ir.ODELETE, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
n := n.(*ir.CallExpr)
w.op(n.Op())
w.pos(n.Pos())
w.exprList(n.List()) // emits terminating OEND
w.exprList(n.Args) // emits terminating OEND
// only append() calls may contain '...' arguments
if n.Op() == ir.OAPPEND {
w.bool(n.IsDDD())
} else if n.IsDDD() {
w.bool(n.IsDDD)
} else if n.IsDDD {
base.Fatalf("exporter: unexpected '...' with %v call", n.Op())
}
@ -1438,9 +1438,9 @@ func (w *exportWriter) expr(n ir.Node) {
w.op(ir.OCALL)
w.pos(n.Pos())
w.stmtList(n.Init())
w.expr(n.Left())
w.exprList(n.List())
w.bool(n.IsDDD())
w.expr(n.X)
w.exprList(n.Args)
w.bool(n.IsDDD)
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
n := n.(*ir.MakeExpr)
@ -1451,12 +1451,12 @@ func (w *exportWriter) expr(n ir.Node) {
default:
// empty list
w.op(ir.OEND)
case n.Right() != nil:
w.expr(n.Left())
w.expr(n.Right())
case n.Cap != nil:
w.expr(n.Len)
w.expr(n.Cap)
w.op(ir.OEND)
case n.Left() != nil && (n.Op() == ir.OMAKESLICE || !n.Left().Type().IsUntyped()):
w.expr(n.Left())
case n.Len != nil && (n.Op() == ir.OMAKESLICE || !n.Len.Type().IsUntyped()):
w.expr(n.Len)
w.op(ir.OEND)
}
@ -1465,26 +1465,26 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.UnaryExpr)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
case ir.OADDR:
n := n.(*ir.AddrExpr)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
case ir.ODEREF:
n := n.(*ir.StarExpr)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.X)
case ir.OSEND:
n := n.(*ir.SendStmt)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
w.expr(n.Chan)
w.expr(n.Value)
// binary expressions
case ir.OADD, ir.OAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
@ -1492,21 +1492,21 @@ func (w *exportWriter) expr(n ir.Node) {
n := n.(*ir.BinaryExpr)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
w.expr(n.X)
w.expr(n.Y)
case ir.OANDAND, ir.OOROR:
n := n.(*ir.LogicalExpr)
w.op(n.Op())
w.pos(n.Pos())
w.expr(n.Left())
w.expr(n.Right())
w.expr(n.X)
w.expr(n.Y)
case ir.OADDSTR:
n := n.(*ir.AddStringExpr)
w.op(ir.OADDSTR)
w.pos(n.Pos())
w.exprList(n.List())
w.exprList(n.List)
case ir.ODCLCONST:
// if exporting, DCLCONST should just be removed as its usage
@ -1543,8 +1543,8 @@ func (w *exportWriter) fieldList(list ir.Nodes) {
w.uint64(uint64(list.Len()))
for _, n := range list.Slice() {
n := n.(*ir.StructKeyExpr)
w.selector(n.Sym())
w.expr(n.Left())
w.selector(n.Field)
w.expr(n.Value)
}
}
@ -1557,7 +1557,7 @@ func (w *exportWriter) localName(n *ir.Name) {
// PPARAM/PPARAMOUT, because we only want to include vargen in
// non-param names.
var v int32
if n.Class() == ir.PAUTO || (n.Class() == ir.PAUTOHEAP && n.Name().Stackcopy == nil) {
if n.Class_ == ir.PAUTO || (n.Class_ == ir.PAUTOHEAP && n.Name().Stackcopy == nil) {
v = n.Name().Vargen
}

View file

@ -329,7 +329,7 @@ func (r *importReader) doDecl(sym *types.Sym) *ir.Name {
fn.SetType(mtyp)
m := newFuncNameAt(mpos, methodSym(recv.Type, msym), fn)
m.SetType(mtyp)
m.SetClass(ir.PFUNC)
m.Class_ = ir.PFUNC
// methodSym already marked m.Sym as a function.
f := types.NewField(mpos, msym, mtyp)
@ -643,10 +643,10 @@ func (r *importReader) funcExt(n *ir.Name) {
// Inline body.
if u := r.uint64(); u > 0 {
n.Func().Inl = &ir.Inline{
n.Func.Inl = &ir.Inline{
Cost: int32(u - 1),
}
n.Func().Endlineno = r.pos()
n.Func.Endlineno = r.pos()
}
}
@ -757,7 +757,7 @@ func (r *importReader) stmtList() []ir.Node {
// Inline them into the statement list.
if n.Op() == ir.OBLOCK {
n := n.(*ir.BlockStmt)
list = append(list, n.List().Slice()...)
list = append(list, n.List.Slice()...)
} else {
list = append(list, n)
}
@ -772,17 +772,17 @@ func (r *importReader) caseList(sw ir.Node) []ir.Node {
cases := make([]ir.Node, r.uint64())
for i := range cases {
cas := ir.NewCaseStmt(r.pos(), nil, nil)
cas.PtrList().Set(r.stmtList())
cas.List.Set(r.stmtList())
if namedTypeSwitch {
// Note: per-case variables will have distinct, dotted
// names after import. That's okay: swt.go only needs
// Sym for diagnostics anyway.
caseVar := ir.NewNameAt(cas.Pos(), r.ident())
declare(caseVar, dclcontext)
cas.PtrRlist().Set1(caseVar)
caseVar.Defn = sw.(*ir.SwitchStmt).Left()
cas.Vars.Set1(caseVar)
caseVar.Defn = sw.(*ir.SwitchStmt).Tag
}
cas.PtrBody().Set(r.stmtList())
cas.Body.Set(r.stmtList())
cases[i] = cas
}
return cases
@ -867,7 +867,7 @@ func (r *importReader) node() ir.Node {
savedlineno := base.Pos
base.Pos = r.pos()
n := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
n.PtrList().Set(r.elemList()) // special handling of field names
n.List.Set(r.elemList()) // special handling of field names
base.Pos = savedlineno
return n
@ -876,7 +876,7 @@ func (r *importReader) node() ir.Node {
case ir.OCOMPLIT:
n := ir.NewCompLitExpr(r.pos(), ir.OCOMPLIT, ir.TypeNode(r.typ()).(ir.Ntype), nil)
n.PtrList().Set(r.exprList())
n.List.Set(r.exprList())
return n
case ir.OKEY:
@ -931,9 +931,9 @@ func (r *importReader) node() ir.Node {
case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
n := builtinCall(r.pos(), op)
n.PtrList().Set(r.exprList())
n.Args.Set(r.exprList())
if op == ir.OAPPEND {
n.SetIsDDD(r.bool())
n.IsDDD = r.bool()
}
return n
@ -943,15 +943,15 @@ func (r *importReader) node() ir.Node {
case ir.OCALL:
n := ir.NewCallExpr(r.pos(), ir.OCALL, nil, nil)
n.PtrInit().Set(r.stmtList())
n.SetLeft(r.expr())
n.PtrList().Set(r.exprList())
n.SetIsDDD(r.bool())
n.X = r.expr()
n.Args.Set(r.exprList())
n.IsDDD = r.bool()
return n
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
n := builtinCall(r.pos(), ir.OMAKE)
n.PtrList().Append(ir.TypeNode(r.typ()))
n.PtrList().Append(r.exprList()...)
n.Args.Append(ir.TypeNode(r.typ()))
n.Args.Append(r.exprList()...)
return n
// unary expressions
@ -1006,13 +1006,13 @@ func (r *importReader) node() ir.Node {
case ir.OASOP:
n := ir.NewAssignOpStmt(r.pos(), ir.OXXX, nil, nil)
n.SetSubOp(r.op())
n.SetLeft(r.expr())
n.AsOp = r.op()
n.X = r.expr()
if !r.bool() {
n.SetRight(nodintconst(1))
n.SetImplicit(true)
n.Y = nodintconst(1)
n.IncDec = true
} else {
n.SetRight(r.expr())
n.Y = r.expr()
}
return n
@ -1021,13 +1021,13 @@ func (r *importReader) node() ir.Node {
case ir.OAS2:
n := ir.NewAssignListStmt(r.pos(), ir.OAS2, nil, nil)
n.PtrList().Set(r.exprList())
n.PtrRlist().Set(r.exprList())
n.Lhs.Set(r.exprList())
n.Rhs.Set(r.exprList())
return n
case ir.ORETURN:
n := ir.NewReturnStmt(r.pos(), nil)
n.PtrList().Set(r.exprList())
n.Results.Set(r.exprList())
return n
// case ORETJMP:
@ -1039,40 +1039,40 @@ func (r *importReader) node() ir.Node {
case ir.OIF:
n := ir.NewIfStmt(r.pos(), nil, nil, nil)
n.PtrInit().Set(r.stmtList())
n.SetLeft(r.expr())
n.PtrBody().Set(r.stmtList())
n.PtrRlist().Set(r.stmtList())
n.Cond = r.expr()
n.Body.Set(r.stmtList())
n.Else.Set(r.stmtList())
return n
case ir.OFOR:
n := ir.NewForStmt(r.pos(), nil, nil, nil, nil)
n.PtrInit().Set(r.stmtList())
left, right := r.exprsOrNil()
n.SetLeft(left)
n.SetRight(right)
n.PtrBody().Set(r.stmtList())
n.Cond = left
n.Post = right
n.Body.Set(r.stmtList())
return n
case ir.ORANGE:
n := ir.NewRangeStmt(r.pos(), nil, nil, nil)
n.PtrList().Set(r.stmtList())
n.SetRight(r.expr())
n.PtrBody().Set(r.stmtList())
n.Vars.Set(r.stmtList())
n.X = r.expr()
n.Body.Set(r.stmtList())
return n
case ir.OSELECT:
n := ir.NewSelectStmt(r.pos(), nil)
n.PtrInit().Set(r.stmtList())
r.exprsOrNil() // TODO(rsc): Delete (and fix exporter). These are always nil.
n.PtrList().Set(r.caseList(n))
n.Cases.Set(r.caseList(n))
return n
case ir.OSWITCH:
n := ir.NewSwitchStmt(r.pos(), nil, nil)
n.PtrInit().Set(r.stmtList())
left, _ := r.exprsOrNil()
n.SetLeft(left)
n.PtrList().Set(r.caseList(n))
n.Tag = left
n.Cases.Set(r.caseList(n))
return n
// case OCASE:

View file

@ -45,7 +45,7 @@ func fninit() *ir.Name {
if n.Op() == ir.ONONAME {
continue
}
if n.Op() != ir.ONAME || n.(*ir.Name).Class() != ir.PEXTERN {
if n.Op() != ir.ONAME || n.(*ir.Name).Class_ != ir.PEXTERN {
base.Fatalf("bad inittask: %v", n)
}
deps = append(deps, n.(*ir.Name).Sym().Linksym())
@ -62,7 +62,7 @@ func fninit() *ir.Name {
fn.Dcl = append(fn.Dcl, initTodo.Dcl...)
initTodo.Dcl = nil
fn.PtrBody().Set(nf)
fn.Body.Set(nf)
funcbody()
typecheckFunc(fn)
@ -83,8 +83,8 @@ func fninit() *ir.Name {
// Record user init functions.
for _, fn := range Target.Inits {
// Skip init functions with empty bodies.
if fn.Body().Len() == 1 {
if stmt := fn.Body().First(); stmt.Op() == ir.OBLOCK && stmt.(*ir.BlockStmt).List().Len() == 0 {
if fn.Body.Len() == 1 {
if stmt := fn.Body.First(); stmt.Op() == ir.OBLOCK && stmt.(*ir.BlockStmt).List.Len() == 0 {
continue
}
}
@ -99,7 +99,7 @@ func fninit() *ir.Name {
sym := lookup(".inittask")
task := NewName(sym)
task.SetType(types.Types[types.TUINT8]) // fake type
task.SetClass(ir.PEXTERN)
task.Class_ = ir.PEXTERN
sym.Def = task
lsym := sym.Linksym()
ot := 0

View file

@ -139,7 +139,7 @@ func (o *InitOrder) processAssign(n ir.Node) {
defn := dep.Defn
// Skip dependencies on functions (PFUNC) and
// variables already initialized (InitDone).
if dep.Class() != ir.PEXTERN || o.order[defn] == orderDone {
if dep.Class_ != ir.PEXTERN || o.order[defn] == orderDone {
continue
}
o.order[n]++
@ -203,7 +203,7 @@ func (o *InitOrder) findInitLoopAndExit(n *ir.Name, path *[]*ir.Name) {
*path = append(*path, n)
for _, ref := range refers {
// Short-circuit variables that were initialized.
if ref.Class() == ir.PEXTERN && o.order[ref.Defn] == orderDone {
if ref.Class_ == ir.PEXTERN && o.order[ref.Defn] == orderDone {
continue
}
@ -220,7 +220,7 @@ func reportInitLoopAndExit(l []*ir.Name) {
// the start.
i := -1
for j, n := range l {
if n.Class() == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
if n.Class_ == ir.PEXTERN && (i == -1 || n.Pos().Before(l[i].Pos())) {
i = j
}
}
@ -255,13 +255,13 @@ func collectDeps(n ir.Node, transitive bool) ir.NameSet {
switch n.Op() {
case ir.OAS:
n := n.(*ir.AssignStmt)
d.inspect(n.Right())
d.inspect(n.Y)
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
n := n.(*ir.AssignListStmt)
d.inspect(n.Rlist().First())
d.inspect(n.Rhs.First())
case ir.ODCLFUNC:
n := n.(*ir.Func)
d.inspectList(n.Body())
d.inspectList(n.Body)
default:
base.Fatalf("unexpected Op: %v", n.Op())
}
@ -294,14 +294,14 @@ func (d *initDeps) visit(n ir.Node) {
case ir.ONAME:
n := n.(*ir.Name)
switch n.Class() {
switch n.Class_ {
case ir.PEXTERN, ir.PFUNC:
d.foundDep(n)
}
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
d.inspectList(n.Func().Body())
d.inspectList(n.Func.Body)
case ir.ODOTMETH, ir.OCALLPART:
d.foundDep(methodExprName(n))
@ -327,8 +327,8 @@ func (d *initDeps) foundDep(n *ir.Name) {
return
}
d.seen.Add(n)
if d.transitive && n.Class() == ir.PFUNC {
d.inspectList(n.Defn.(*ir.Func).Body())
if d.transitive && n.Class_ == ir.PFUNC {
d.inspectList(n.Defn.(*ir.Func).Body)
}
}
@ -360,10 +360,10 @@ func firstLHS(n ir.Node) *ir.Name {
switch n.Op() {
case ir.OAS:
n := n.(*ir.AssignStmt)
return n.Left().Name()
return n.X.Name()
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2RECV, ir.OAS2MAPR:
n := n.(*ir.AssignListStmt)
return n.List().First().Name()
return n.Lhs.First().Name()
}
base.Fatalf("unexpected Op: %v", n.Op())

View file

@ -196,7 +196,7 @@ func caninl(fn *ir.Func) {
}
// If fn has no body (is defined outside of Go), cannot inline it.
if fn.Body().Len() == 0 {
if fn.Body.Len() == 0 {
reason = "no function body"
return
}
@ -206,10 +206,10 @@ func caninl(fn *ir.Func) {
}
n := fn.Nname
if n.Func().InlinabilityChecked() {
if n.Func.InlinabilityChecked() {
return
}
defer n.Func().SetInlinabilityChecked(true)
defer n.Func.SetInlinabilityChecked(true)
cc := int32(inlineExtraCallCost)
if base.Flag.LowerL == 4 {
@ -235,14 +235,14 @@ func caninl(fn *ir.Func) {
return
}
n.Func().Inl = &ir.Inline{
n.Func.Inl = &ir.Inline{
Cost: inlineMaxBudget - visitor.budget,
Dcl: pruneUnusedAutos(n.Defn.(*ir.Func).Func().Dcl, &visitor),
Body: ir.DeepCopyList(src.NoXPos, fn.Body().Slice()),
Dcl: pruneUnusedAutos(n.Defn.(*ir.Func).Dcl, &visitor),
Body: ir.DeepCopyList(src.NoXPos, fn.Body.Slice()),
}
if base.Flag.LowerM > 1 {
fmt.Printf("%v: can inline %v with cost %d as: %v { %v }\n", ir.Line(fn), n, inlineMaxBudget-visitor.budget, fn.Type(), ir.AsNodes(n.Func().Inl.Body))
fmt.Printf("%v: can inline %v with cost %d as: %v { %v }\n", ir.Line(fn), n, inlineMaxBudget-visitor.budget, fn.Type(), ir.AsNodes(n.Func.Inl.Body))
} else if base.Flag.LowerM != 0 {
fmt.Printf("%v: can inline %v\n", ir.Line(fn), n)
}
@ -257,10 +257,10 @@ func inlFlood(n *ir.Name, exportsym func(*ir.Name)) {
if n == nil {
return
}
if n.Op() != ir.ONAME || n.Class() != ir.PFUNC {
base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class())
if n.Op() != ir.ONAME || n.Class_ != ir.PFUNC {
base.Fatalf("inlFlood: unexpected %v, %v, %v", n, n.Op(), n.Class_)
}
fn := n.Func()
fn := n.Func
if fn == nil {
base.Fatalf("inlFlood: missing Func on %v", n)
}
@ -285,7 +285,7 @@ func inlFlood(n *ir.Name, exportsym func(*ir.Name)) {
case ir.ONAME:
n := n.(*ir.Name)
switch n.Class() {
switch n.Class_ {
case ir.PFUNC:
inlFlood(n, exportsym)
exportsym(n)
@ -348,9 +348,9 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
// because getcaller{pc,sp} expect a pointer to the caller's first argument.
//
// runtime.throw is a "cheap call" like panic in normal code.
if n.Left().Op() == ir.ONAME {
name := n.Left().(*ir.Name)
if name.Class() == ir.PFUNC && isRuntimePkg(name.Sym().Pkg) {
if n.X.Op() == ir.ONAME {
name := n.X.(*ir.Name)
if name.Class_ == ir.PFUNC && isRuntimePkg(name.Sym().Pkg) {
fn := name.Sym().Name
if fn == "getcallerpc" || fn == "getcallersp" {
return errors.New("call to " + fn)
@ -367,7 +367,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
break
}
if fn := inlCallee(n.Left()); fn != nil && fn.Inl != nil {
if fn := inlCallee(n.X); fn != nil && fn.Inl != nil {
v.budget -= fn.Inl.Cost
break
}
@ -378,12 +378,12 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
// Call is okay if inlinable and we have the budget for the body.
case ir.OCALLMETH:
n := n.(*ir.CallExpr)
t := n.Left().Type()
t := n.X.Type()
if t == nil {
base.Fatalf("no function type for [%p] %+v\n", n.Left(), n.Left())
base.Fatalf("no function type for [%p] %+v\n", n.X, n.X)
}
if isRuntimePkg(n.Left().Sym().Pkg) {
fn := n.Left().Sym().Name
if isRuntimePkg(n.X.Sym().Pkg) {
fn := n.X.Sym().Name
if fn == "heapBits.nextArena" {
// Special case: explicitly allow
// mid-stack inlining of
@ -393,7 +393,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
break
}
}
if inlfn := methodExprName(n.Left()).Func(); inlfn.Inl != nil {
if inlfn := methodExprName(n.X).Func; inlfn.Inl != nil {
v.budget -= inlfn.Inl.Cost
break
}
@ -431,35 +431,35 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
case ir.OFOR, ir.OFORUNTIL:
n := n.(*ir.ForStmt)
if n.Sym() != nil {
if n.Label != nil {
return errors.New("labeled control")
}
case ir.OSWITCH:
n := n.(*ir.SwitchStmt)
if n.Sym() != nil {
if n.Label != nil {
return errors.New("labeled control")
}
// case ir.ORANGE, ir.OSELECT in "unhandled" above
case ir.OBREAK, ir.OCONTINUE:
n := n.(*ir.BranchStmt)
if n.Sym() != nil {
if n.Label != nil {
// Should have short-circuited due to labeled control error above.
base.Fatalf("unexpected labeled break/continue: %v", n)
}
case ir.OIF:
n := n.(*ir.IfStmt)
if ir.IsConst(n.Left(), constant.Bool) {
if ir.IsConst(n.Cond, constant.Bool) {
// This if and the condition cost nothing.
// TODO(rsc): It seems strange that we visit the dead branch.
if err := ir.DoList(n.Init(), v.do); err != nil {
return err
}
if err := ir.DoList(n.Body(), v.do); err != nil {
if err := ir.DoList(n.Body, v.do); err != nil {
return err
}
if err := ir.DoList(n.Rlist(), v.do); err != nil {
if err := ir.DoList(n.Else, v.do); err != nil {
return err
}
return nil
@ -467,7 +467,7 @@ func (v *hairyVisitor) doNode(n ir.Node) error {
case ir.ONAME:
n := n.(*ir.Name)
if n.Class() == ir.PAUTO {
if n.Class_ == ir.PAUTO {
v.usedLocals[n] = true
}
@ -526,8 +526,8 @@ func inlcalls(fn *ir.Func) {
// Turn an OINLCALL into a statement.
func inlconv2stmt(inlcall *ir.InlinedCallExpr) ir.Node {
n := ir.NewBlockStmt(inlcall.Pos(), nil)
n.SetList(inlcall.Init())
n.PtrList().AppendNodes(inlcall.PtrBody())
n.List = inlcall.Init()
n.List.AppendNodes(&inlcall.Body)
return n
}
@ -535,8 +535,8 @@ func inlconv2stmt(inlcall *ir.InlinedCallExpr) ir.Node {
// The result of inlconv2expr MUST be assigned back to n, e.g.
// n.Left = inlconv2expr(n.Left)
func inlconv2expr(n *ir.InlinedCallExpr) ir.Node {
r := n.Rlist().First()
return initExpr(append(n.Init().Slice(), n.Body().Slice()...), r)
r := n.ReturnVars.First()
return initExpr(append(n.Init().Slice(), n.Body.Slice()...), r)
}
// Turn the rlist (with the return values) of the OINLCALL in
@ -545,12 +545,12 @@ func inlconv2expr(n *ir.InlinedCallExpr) ir.Node {
// order will be preserved. Used in return, oas2func and call
// statements.
func inlconv2list(n *ir.InlinedCallExpr) []ir.Node {
if n.Op() != ir.OINLCALL || n.Rlist().Len() == 0 {
if n.Op() != ir.OINLCALL || n.ReturnVars.Len() == 0 {
base.Fatalf("inlconv2list %+v\n", n)
}
s := n.Rlist().Slice()
s[0] = initExpr(append(n.Init().Slice(), n.Body().Slice()...), s[0])
s := n.ReturnVars.Slice()
s[0] = initExpr(append(n.Init().Slice(), n.Body.Slice()...), s[0])
return s
}
@ -575,10 +575,10 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
switch n.Op() {
case ir.ODEFER, ir.OGO:
n := n.(*ir.GoDeferStmt)
switch call := n.Left(); call.Op() {
switch call := n.Call; call.Op() {
case ir.OCALLFUNC, ir.OCALLMETH:
call := call.(*ir.CallExpr)
call.SetNoInline(true)
call.NoInline = true
}
// TODO do them here (or earlier),
@ -589,7 +589,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
// Prevent inlining some reflect.Value methods when using checkptr,
// even when package reflect was compiled without it (#35073).
n := n.(*ir.CallExpr)
if s := n.Left().Sym(); base.Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
if s := n.X.Sym(); base.Debug.Checkptr != 0 && isReflectPkg(s.Pkg) && (s.Name == "Value.UnsafeAddr" || s.Name == "Value.Pointer") {
return n
}
}
@ -600,8 +600,8 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
if as := n; as.Op() == ir.OAS2FUNC {
as := as.(*ir.AssignListStmt)
if as.Rlist().First().Op() == ir.OINLCALL {
as.PtrRlist().Set(inlconv2list(as.Rlist().First().(*ir.InlinedCallExpr)))
if as.Rhs.First().Op() == ir.OINLCALL {
as.Rhs.Set(inlconv2list(as.Rhs.First().(*ir.InlinedCallExpr)))
as.SetOp(ir.OAS2)
as.SetTypecheck(0)
n = typecheck(as, ctxStmt)
@ -614,7 +614,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
switch n.Op() {
case ir.OCALLFUNC, ir.OCALLMETH:
n := n.(*ir.CallExpr)
if n.NoInline() {
if n.NoInline {
return n
}
}
@ -624,27 +624,27 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
case ir.OCALLFUNC:
call = n.(*ir.CallExpr)
if base.Flag.LowerM > 3 {
fmt.Printf("%v:call to func %+v\n", ir.Line(n), call.Left())
fmt.Printf("%v:call to func %+v\n", ir.Line(n), call.X)
}
if IsIntrinsicCall(call) {
break
}
if fn := inlCallee(call.Left()); fn != nil && fn.Inl != nil {
if fn := inlCallee(call.X); fn != nil && fn.Inl != nil {
n = mkinlcall(call, fn, maxCost, inlMap, edit)
}
case ir.OCALLMETH:
call = n.(*ir.CallExpr)
if base.Flag.LowerM > 3 {
fmt.Printf("%v:call to meth %v\n", ir.Line(n), call.Left().(*ir.SelectorExpr).Sel)
fmt.Printf("%v:call to meth %v\n", ir.Line(n), call.X.(*ir.SelectorExpr).Sel)
}
// typecheck should have resolved ODOTMETH->type, whose nname points to the actual function.
if call.Left().Type() == nil {
base.Fatalf("no function type for [%p] %+v\n", call.Left(), call.Left())
if call.X.Type() == nil {
base.Fatalf("no function type for [%p] %+v\n", call.X, call.X)
}
n = mkinlcall(call, methodExprName(call.Left()).Func(), maxCost, inlMap, edit)
n = mkinlcall(call, methodExprName(call.X).Func, maxCost, inlMap, edit)
}
base.Pos = lno
@ -681,15 +681,15 @@ func inlCallee(fn ir.Node) *ir.Func {
if n == nil || !types.Identical(n.Type().Recv().Type, fn.T) {
return nil
}
return n.Func()
return n.Func
case ir.ONAME:
fn := fn.(*ir.Name)
if fn.Class() == ir.PFUNC {
return fn.Func()
if fn.Class_ == ir.PFUNC {
return fn.Func
}
case ir.OCLOSURE:
fn := fn.(*ir.ClosureExpr)
c := fn.Func()
c := fn.Func
caninl(c)
return c
}
@ -699,7 +699,7 @@ func inlCallee(fn ir.Node) *ir.Func {
func staticValue(n ir.Node) ir.Node {
for {
if n.Op() == ir.OCONVNOP {
n = n.(*ir.ConvExpr).Left()
n = n.(*ir.ConvExpr).X
continue
}
@ -719,7 +719,7 @@ func staticValue1(nn ir.Node) ir.Node {
return nil
}
n := nn.(*ir.Name)
if n.Class() != ir.PAUTO || n.Name().Addrtaken() {
if n.Class_ != ir.PAUTO || n.Name().Addrtaken() {
return nil
}
@ -733,12 +733,12 @@ FindRHS:
switch defn.Op() {
case ir.OAS:
defn := defn.(*ir.AssignStmt)
rhs = defn.Right()
rhs = defn.Y
case ir.OAS2:
defn := defn.(*ir.AssignListStmt)
for i, lhs := range defn.List().Slice() {
for i, lhs := range defn.Lhs.Slice() {
if lhs == n {
rhs = defn.Rlist().Index(i)
rhs = defn.Rhs.Index(i)
break FindRHS
}
}
@ -775,12 +775,12 @@ func reassigned(name *ir.Name) bool {
switch n.Op() {
case ir.OAS:
n := n.(*ir.AssignStmt)
if n.Left() == name && n != name.Defn {
if n.X == name && n != name.Defn {
return true
}
case ir.OAS2, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2DOTTYPE, ir.OAS2RECV, ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
for _, p := range n.List().Slice() {
for _, p := range n.Lhs.Slice() {
if p == name && n != name.Defn {
return true
}
@ -887,11 +887,11 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// inlconv2expr or inlconv2list). Make sure to preserve these,
// if necessary (#42703).
if n.Op() == ir.OCALLFUNC {
callee := n.Left()
callee := n.X
for callee.Op() == ir.OCONVNOP {
conv := callee.(*ir.ConvExpr)
ninit.AppendNodes(conv.PtrInit())
callee = conv.Left()
callee = conv.X
}
if callee.Op() != ir.ONAME && callee.Op() != ir.OCLOSURE && callee.Op() != ir.OMETHEXPR {
base.Fatalf("unexpected callee expression: %v", callee)
@ -944,7 +944,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
if ln.Op() != ir.ONAME {
continue
}
if ln.Class() == ir.PPARAMOUT { // return values handled below.
if ln.Class_ == ir.PPARAMOUT { // return values handled below.
continue
}
if isParamStackCopy(ln) { // ignore the on-stack copy of a parameter that moved to the heap
@ -957,7 +957,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
inlf := typecheck(inlvar(ln), ctxExpr)
inlvars[ln] = inlf
if base.Flag.GenDwarfInl > 0 {
if ln.Class() == ir.PPARAM {
if ln.Class_ == ir.PPARAM {
inlf.Name().SetInlFormal(true)
} else {
inlf.Name().SetInlLocal(true)
@ -1010,54 +1010,54 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// Assign arguments to the parameters' temp names.
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as.SetColas(true)
as.Def = true
if n.Op() == ir.OCALLMETH {
sel := n.Left().(*ir.SelectorExpr)
if sel.Left() == nil {
sel := n.X.(*ir.SelectorExpr)
if sel.X == nil {
base.Fatalf("method call without receiver: %+v", n)
}
as.PtrRlist().Append(sel.Left())
as.Rhs.Append(sel.X)
}
as.PtrRlist().Append(n.List().Slice()...)
as.Rhs.Append(n.Args.Slice()...)
// For non-dotted calls to variadic functions, we assign the
// variadic parameter's temp name separately.
var vas *ir.AssignStmt
if recv := fn.Type().Recv(); recv != nil {
as.PtrList().Append(inlParam(recv, as, inlvars))
as.Lhs.Append(inlParam(recv, as, inlvars))
}
for _, param := range fn.Type().Params().Fields().Slice() {
// For ordinary parameters or variadic parameters in
// dotted calls, just add the variable to the
// assignment list, and we're done.
if !param.IsDDD() || n.IsDDD() {
as.PtrList().Append(inlParam(param, as, inlvars))
if !param.IsDDD() || n.IsDDD {
as.Lhs.Append(inlParam(param, as, inlvars))
continue
}
// Otherwise, we need to collect the remaining values
// to pass as a slice.
x := as.List().Len()
for as.List().Len() < as.Rlist().Len() {
as.PtrList().Append(argvar(param.Type, as.List().Len()))
x := as.Lhs.Len()
for as.Lhs.Len() < as.Rhs.Len() {
as.Lhs.Append(argvar(param.Type, as.Lhs.Len()))
}
varargs := as.List().Slice()[x:]
varargs := as.Lhs.Slice()[x:]
vas = ir.NewAssignStmt(base.Pos, nil, nil)
vas.SetLeft(inlParam(param, vas, inlvars))
vas.X = inlParam(param, vas, inlvars)
if len(varargs) == 0 {
vas.SetRight(nodnil())
vas.Right().SetType(param.Type)
vas.Y = nodnil()
vas.Y.SetType(param.Type)
} else {
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(param.Type).(ir.Ntype), nil)
lit.PtrList().Set(varargs)
vas.SetRight(lit)
lit.List.Set(varargs)
vas.Y = lit
}
}
if as.Rlist().Len() != 0 {
if as.Rhs.Len() != 0 {
ninit.Append(typecheck(as, ctxStmt))
}
@ -1093,7 +1093,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// Note issue 28603.
inlMark := ir.NewInlineMarkStmt(base.Pos, types.BADWIDTH)
inlMark.SetPos(n.Pos().WithIsStmt())
inlMark.SetOffset(int64(newIndex))
inlMark.Index = int64(newIndex)
ninit.Append(inlMark)
if base.Flag.GenDwarfInl > 0 {
@ -1130,8 +1130,8 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
call := ir.NewInlinedCallExpr(base.Pos, nil, nil)
call.PtrInit().Set(ninit.Slice())
call.PtrBody().Set(body)
call.PtrRlist().Set(retvars)
call.Body.Set(body)
call.ReturnVars.Set(retvars)
call.SetType(n.Type())
call.SetTypecheck(1)
@ -1160,7 +1160,7 @@ func inlvar(var_ ir.Node) ir.Node {
n := NewName(var_.Sym())
n.SetType(var_.Type())
n.SetClass(ir.PAUTO)
n.Class_ = ir.PAUTO
n.SetUsed(true)
n.Curfn = Curfn // the calling function, not the called one
n.SetAddrtaken(var_.Name().Addrtaken())
@ -1173,7 +1173,7 @@ func inlvar(var_ ir.Node) ir.Node {
func retvar(t *types.Field, i int) ir.Node {
n := NewName(lookupN("~R", i))
n.SetType(t.Type)
n.SetClass(ir.PAUTO)
n.Class_ = ir.PAUTO
n.SetUsed(true)
n.Curfn = Curfn // the calling function, not the called one
Curfn.Dcl = append(Curfn.Dcl, n)
@ -1185,7 +1185,7 @@ func retvar(t *types.Field, i int) ir.Node {
func argvar(t *types.Type, i int) ir.Node {
n := NewName(lookupN("~arg", i))
n.SetType(t.Elem())
n.SetClass(ir.PAUTO)
n.Class_ = ir.PAUTO
n.SetUsed(true)
n.Curfn = Curfn // the calling function, not the called one
Curfn.Dcl = append(Curfn.Dcl, n)
@ -1277,19 +1277,19 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
// this return is guaranteed to belong to the current inlined function.
n := n.(*ir.ReturnStmt)
init := subst.list(n.Init())
if len(subst.retvars) != 0 && n.List().Len() != 0 {
if len(subst.retvars) != 0 && n.Results.Len() != 0 {
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
// Make a shallow copy of retvars.
// Otherwise OINLCALL.Rlist will be the same list,
// and later walk and typecheck may clobber it.
for _, n := range subst.retvars {
as.PtrList().Append(n)
as.Lhs.Append(n)
}
as.PtrRlist().Set(subst.list(n.List()))
as.Rhs.Set(subst.list(n.Results))
if subst.delayretvars {
for _, n := range as.List().Slice() {
for _, n := range as.Lhs.Slice() {
as.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
n.Name().Defn = as
}
@ -1306,8 +1306,8 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
m := ir.Copy(n).(*ir.BranchStmt)
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
p := fmt.Sprintf("%s·%d", n.Sym().Name, inlgen)
m.SetSym(lookup(p))
p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
m.Label = lookup(p)
return m
case ir.OLABEL:
@ -1315,8 +1315,8 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
m := ir.Copy(n).(*ir.LabelStmt)
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
p := fmt.Sprintf("%s·%d", n.Sym().Name, inlgen)
m.SetSym(lookup(p))
p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
m.Label = lookup(p)
return m
}
@ -1345,7 +1345,7 @@ func (subst *inlsubst) updatedPos(xpos src.XPos) src.XPos {
func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
s := make([]*ir.Name, 0, len(ll))
for _, n := range ll {
if n.Class() == ir.PAUTO {
if n.Class_ == ir.PAUTO {
if _, found := vis.usedLocals[n]; !found {
continue
}
@ -1359,7 +1359,7 @@ func pruneUnusedAutos(ll []*ir.Name, vis *hairyVisitor) []*ir.Name {
// concrete-type method calls where applicable.
func devirtualize(fn *ir.Func) {
Curfn = fn
ir.VisitList(fn.Body(), func(n ir.Node) {
ir.VisitList(fn.Body, func(n ir.Node) {
if n.Op() == ir.OCALLINTER {
devirtualizeCall(n.(*ir.CallExpr))
}
@ -1367,21 +1367,21 @@ func devirtualize(fn *ir.Func) {
}
func devirtualizeCall(call *ir.CallExpr) {
sel := call.Left().(*ir.SelectorExpr)
r := staticValue(sel.Left())
sel := call.X.(*ir.SelectorExpr)
r := staticValue(sel.X)
if r.Op() != ir.OCONVIFACE {
return
}
recv := r.(*ir.ConvExpr)
typ := recv.Left().Type()
typ := recv.X.Type()
if typ.IsInterface() {
return
}
dt := ir.NewTypeAssertExpr(sel.Pos(), sel.Left(), nil)
dt := ir.NewTypeAssertExpr(sel.Pos(), sel.X, nil)
dt.SetType(typ)
x := typecheck(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sym()), ctxExpr|ctxCallee)
x := typecheck(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sel), ctxExpr|ctxCallee)
switch x.Op() {
case ir.ODOTMETH:
x := x.(*ir.SelectorExpr)
@ -1389,7 +1389,7 @@ func devirtualizeCall(call *ir.CallExpr) {
base.WarnfAt(call.Pos(), "devirtualizing %v to %v", sel, typ)
}
call.SetOp(ir.OCALLMETH)
call.SetLeft(x)
call.X = x
case ir.ODOTINTER:
// Promoted method from embedded interface-typed field (#42279).
x := x.(*ir.SelectorExpr)
@ -1397,7 +1397,7 @@ func devirtualizeCall(call *ir.CallExpr) {
base.WarnfAt(call.Pos(), "partially devirtualizing %v to %v", sel, typ)
}
call.SetOp(ir.OCALLINTER)
call.SetLeft(x)
call.X = x
default:
// TODO(mdempsky): Turn back into Fatalf after more testing.
if base.Flag.LowerM != 0 {

View file

@ -272,7 +272,7 @@ func Main(archInit func(*Arch)) {
for _, n := range Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
if n.Func().OClosure != nil {
if n.OClosure != nil {
Curfn = n
transformclosure(n)
}

View file

@ -167,7 +167,7 @@ func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
if body == nil {
body = []ir.Node{ir.NewBlockStmt(base.Pos, nil)}
}
fn.PtrBody().Set(body)
fn.Body.Set(body)
base.Pos = p.makeXPos(block.Rbrace)
fn.Endlineno = base.Pos
@ -650,13 +650,13 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
case *syntax.CompositeLit:
n := ir.NewCompLitExpr(p.pos(expr), ir.OCOMPLIT, nil, nil)
if expr.Type != nil {
n.SetRight(p.expr(expr.Type))
n.Ntype = ir.Node(p.expr(expr.Type)).(ir.Ntype)
}
l := p.exprs(expr.ElemList)
for i, e := range l {
l[i] = p.wrapname(expr.ElemList[i], e)
}
n.PtrList().Set(l)
n.List.Set(l)
base.Pos = p.makeXPos(expr.Rbrace)
return n
case *syntax.KeyValueExpr:
@ -719,8 +719,8 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
return ir.NewBinaryExpr(pos, op, x, y)
case *syntax.CallExpr:
n := ir.NewCallExpr(p.pos(expr), ir.OCALL, p.expr(expr.Fun), nil)
n.PtrList().Set(p.exprs(expr.ArgList))
n.SetIsDDD(expr.HasDots)
n.Args.Set(p.exprs(expr.ArgList))
n.IsDDD = expr.HasDots
return n
case *syntax.ArrayType:
@ -968,10 +968,10 @@ func (p *noder) stmtsFall(stmts []syntax.Stmt, fallOK bool) []ir.Node {
for i, stmt := range stmts {
s := p.stmtFall(stmt, fallOK && i+1 == len(stmts))
if s == nil {
} else if s.Op() == ir.OBLOCK && s.(*ir.BlockStmt).List().Len() > 0 {
} else if s.Op() == ir.OBLOCK && s.(*ir.BlockStmt).List.Len() > 0 {
// Inline non-empty block.
// Empty blocks must be preserved for checkreturn.
nodes = append(nodes, s.(*ir.BlockStmt).List().Slice()...)
nodes = append(nodes, s.(*ir.BlockStmt).List.Slice()...)
} else {
nodes = append(nodes, s)
}
@ -1006,23 +1006,23 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
case *syntax.AssignStmt:
if stmt.Op != 0 && stmt.Op != syntax.Def {
n := ir.NewAssignOpStmt(p.pos(stmt), p.binOp(stmt.Op), p.expr(stmt.Lhs), p.expr(stmt.Rhs))
n.SetImplicit(stmt.Rhs == syntax.ImplicitOne)
n.IncDec = stmt.Rhs == syntax.ImplicitOne
return n
}
rhs := p.exprList(stmt.Rhs)
if list, ok := stmt.Lhs.(*syntax.ListExpr); ok && len(list.ElemList) != 1 || len(rhs) != 1 {
n := ir.NewAssignListStmt(p.pos(stmt), ir.OAS2, nil, nil)
n.SetColas(stmt.Op == syntax.Def)
n.PtrList().Set(p.assignList(stmt.Lhs, n, n.Colas()))
n.PtrRlist().Set(rhs)
n.Def = stmt.Op == syntax.Def
n.Lhs.Set(p.assignList(stmt.Lhs, n, n.Def))
n.Rhs.Set(rhs)
return n
}
n := ir.NewAssignStmt(p.pos(stmt), nil, nil)
n.SetColas(stmt.Op == syntax.Def)
n.SetLeft(p.assignList(stmt.Lhs, n, n.Colas())[0])
n.SetRight(rhs[0])
n.Def = stmt.Op == syntax.Def
n.X = p.assignList(stmt.Lhs, n, n.Def)[0]
n.Y = rhs[0]
return n
case *syntax.BranchStmt:
@ -1064,13 +1064,13 @@ func (p *noder) stmtFall(stmt syntax.Stmt, fallOK bool) ir.Node {
results = p.exprList(stmt.Results)
}
n := ir.NewReturnStmt(p.pos(stmt), nil)
n.PtrList().Set(results)
if n.List().Len() == 0 && Curfn != nil {
n.Results.Set(results)
if n.Results.Len() == 0 && Curfn != nil {
for _, ln := range Curfn.Dcl {
if ln.Class() == ir.PPARAM {
if ln.Class_ == ir.PPARAM {
continue
}
if ln.Class() != ir.PPARAMOUT {
if ln.Class_ != ir.PPARAMOUT {
break
}
if ln.Sym().Def != ln {
@ -1163,16 +1163,16 @@ func (p *noder) ifStmt(stmt *syntax.IfStmt) ir.Node {
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Cond != nil {
n.SetLeft(p.expr(stmt.Cond))
n.Cond = p.expr(stmt.Cond)
}
n.PtrBody().Set(p.blockStmt(stmt.Then))
n.Body.Set(p.blockStmt(stmt.Then))
if stmt.Else != nil {
e := p.stmt(stmt.Else)
if e.Op() == ir.OBLOCK {
e := e.(*ir.BlockStmt)
n.PtrRlist().Set(e.List().Slice())
n.Else.Set(e.List.Slice())
} else {
n.PtrRlist().Set1(e)
n.Else.Set1(e)
}
}
p.closeAnotherScope()
@ -1188,10 +1188,10 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) ir.Node {
n := ir.NewRangeStmt(p.pos(r), nil, p.expr(r.X), nil)
if r.Lhs != nil {
n.SetColas(r.Def)
n.PtrList().Set(p.assignList(r.Lhs, n, n.Colas()))
n.Def = r.Def
n.Vars.Set(p.assignList(r.Lhs, n, n.Def))
}
n.PtrBody().Set(p.blockStmt(stmt.Body))
n.Body.Set(p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
@ -1201,12 +1201,12 @@ func (p *noder) forStmt(stmt *syntax.ForStmt) ir.Node {
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Cond != nil {
n.SetLeft(p.expr(stmt.Cond))
n.Cond = p.expr(stmt.Cond)
}
if stmt.Post != nil {
n.SetRight(p.stmt(stmt.Post))
n.Post = p.stmt(stmt.Post)
}
n.PtrBody().Set(p.blockStmt(stmt.Body))
n.Body.Set(p.blockStmt(stmt.Body))
p.closeAnotherScope()
return n
}
@ -1218,14 +1218,14 @@ func (p *noder) switchStmt(stmt *syntax.SwitchStmt) ir.Node {
n.PtrInit().Set1(p.stmt(stmt.Init))
}
if stmt.Tag != nil {
n.SetLeft(p.expr(stmt.Tag))
n.Tag = p.expr(stmt.Tag)
}
var tswitch *ir.TypeSwitchGuard
if l := n.Left(); l != nil && l.Op() == ir.OTYPESW {
if l := n.Tag; l != nil && l.Op() == ir.OTYPESW {
tswitch = l.(*ir.TypeSwitchGuard)
}
n.PtrList().Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
n.Cases.Set(p.caseClauses(stmt.Body, tswitch, stmt.Rbrace))
p.closeScope(stmt.Rbrace)
return n
@ -1242,12 +1242,12 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
n := ir.NewCaseStmt(p.pos(clause), nil, nil)
if clause.Cases != nil {
n.PtrList().Set(p.exprList(clause.Cases))
n.List.Set(p.exprList(clause.Cases))
}
if tswitch != nil && tswitch.Left() != nil {
nn := NewName(tswitch.Left().Sym())
if tswitch != nil && tswitch.Tag != nil {
nn := NewName(tswitch.Tag.Sym())
declare(nn, dclcontext)
n.PtrRlist().Set1(nn)
n.Vars.Set1(nn)
// keep track of the instances for reporting unused
nn.Defn = tswitch
}
@ -1263,8 +1263,8 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
body = body[:len(body)-1]
}
n.PtrBody().Set(p.stmtsFall(body, true))
if l := n.Body().Len(); l > 0 && n.Body().Index(l-1).Op() == ir.OFALL {
n.Body.Set(p.stmtsFall(body, true))
if l := n.Body.Len(); l > 0 && n.Body.Index(l-1).Op() == ir.OFALL {
if tswitch != nil {
base.Errorf("cannot fallthrough in type switch")
}
@ -1283,7 +1283,7 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
func (p *noder) selectStmt(stmt *syntax.SelectStmt) ir.Node {
n := ir.NewSelectStmt(p.pos(stmt), nil)
n.PtrList().Set(p.commClauses(stmt.Body, stmt.Rbrace))
n.Cases.Set(p.commClauses(stmt.Body, stmt.Rbrace))
return n
}
@ -1298,9 +1298,9 @@ func (p *noder) commClauses(clauses []*syntax.CommClause, rbrace syntax.Pos) []i
n := ir.NewCaseStmt(p.pos(clause), nil, nil)
if clause.Comm != nil {
n.PtrList().Set1(p.stmt(clause.Comm))
n.List.Set1(p.stmt(clause.Comm))
}
n.PtrBody().Set(p.stmts(clause.Body))
n.Body.Set(p.stmts(clause.Body))
nodes = append(nodes, n)
}
if len(clauses) > 0 {
@ -1321,16 +1321,16 @@ func (p *noder) labeledStmt(label *syntax.LabeledStmt, fallOK bool) ir.Node {
switch ls.Op() {
case ir.OFOR:
ls := ls.(*ir.ForStmt)
ls.SetSym(sym)
ls.Label = sym
case ir.ORANGE:
ls := ls.(*ir.RangeStmt)
ls.SetSym(sym)
ls.Label = sym
case ir.OSWITCH:
ls := ls.(*ir.SwitchStmt)
ls.SetSym(sym)
ls.Label = sym
case ir.OSELECT:
ls := ls.(*ir.SelectStmt)
ls.SetSym(sym)
ls.Label = sym
}
}
}
@ -1339,7 +1339,7 @@ func (p *noder) labeledStmt(label *syntax.LabeledStmt, fallOK bool) ir.Node {
if ls != nil {
if ls.Op() == ir.OBLOCK {
ls := ls.(*ir.BlockStmt)
l = append(l, ls.List().Slice()...)
l = append(l, ls.List.Slice()...)
} else {
l = append(l, ls)
}

View file

@ -214,7 +214,7 @@ func addptabs() {
if s.Pkg.Name != "main" {
continue
}
if n.Type().Kind() == types.TFUNC && n.Class() == ir.PFUNC {
if n.Type().Kind() == types.TFUNC && n.Class_ == ir.PFUNC {
// function
ptabs = append(ptabs, ptabEntry{s: s, t: s.Def.Type()})
} else {
@ -228,7 +228,7 @@ func dumpGlobal(n *ir.Name) {
if n.Type() == nil {
base.Fatalf("external %v nil type\n", n)
}
if n.Class() == ir.PFUNC {
if n.Class_ == ir.PFUNC {
return
}
if n.Sym().Pkg != types.LocalPkg {
@ -560,8 +560,8 @@ func pfuncsym(n *ir.Name, noff int64, f *ir.Name) {
if n.Sym() == nil {
base.Fatalf("pfuncsym nil n sym")
}
if f.Class() != ir.PFUNC {
base.Fatalf("pfuncsym class not PFUNC %d", f.Class())
if f.Class_ != ir.PFUNC {
base.Fatalf("pfuncsym class not PFUNC %d", f.Class_)
}
s := n.Sym().Linksym()
s.WriteAddr(base.Ctxt, noff, Widthptr, funcsym(f.Sym()).Linksym(), 0)

File diff suppressed because it is too large Load diff

View file

@ -68,11 +68,11 @@ func emitptrargsmap(fn *ir.Func) {
// the top of the stack and increasing in size.
// Non-autos sort on offset.
func cmpstackvarlt(a, b *ir.Name) bool {
if (a.Class() == ir.PAUTO) != (b.Class() == ir.PAUTO) {
return b.Class() == ir.PAUTO
if (a.Class_ == ir.PAUTO) != (b.Class_ == ir.PAUTO) {
return b.Class_ == ir.PAUTO
}
if a.Class() != ir.PAUTO {
if a.Class_ != ir.PAUTO {
return a.FrameOffset() < b.FrameOffset()
}
@ -113,7 +113,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Mark the PAUTO's unused.
for _, ln := range fn.Dcl {
if ln.Class() == ir.PAUTO {
if ln.Class_ == ir.PAUTO {
ln.SetUsed(false)
}
}
@ -128,7 +128,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
for _, b := range f.Blocks {
for _, v := range b.Values {
if n, ok := v.Aux.(*ir.Name); ok {
switch n.Class() {
switch n.Class_ {
case ir.PPARAM, ir.PPARAMOUT:
// Don't modify nodfp; it is a global.
if n != nodfp {
@ -154,7 +154,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Reassign stack offsets of the locals that are used.
lastHasPtr := false
for i, n := range fn.Dcl {
if n.Op() != ir.ONAME || n.Class() != ir.PAUTO {
if n.Op() != ir.ONAME || n.Class_ != ir.PAUTO {
continue
}
if !n.Used() {
@ -207,7 +207,7 @@ func funccompile(fn *ir.Func) {
// assign parameter offsets
dowidth(fn.Type())
if fn.Body().Len() == 0 {
if fn.Body.Len() == 0 {
// Initialize ABI wrappers if necessary.
initLSym(fn, false)
emitptrargsmap(fn)
@ -249,7 +249,7 @@ func compile(fn *ir.Func) {
// because symbols must be allocated before the parallel
// phase of the compiler.
for _, n := range fn.Dcl {
switch n.Class() {
switch n.Class_ {
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
if livenessShouldTrack(n) && n.Addrtaken() {
dtypesym(n.Type())
@ -360,7 +360,7 @@ func compileFunctions() {
// since they're most likely to be the slowest.
// This helps avoid stragglers.
sort.Slice(compilequeue, func(i, j int) bool {
return compilequeue[i].Body().Len() > compilequeue[j].Body().Len()
return compilequeue[i].Body.Len() > compilequeue[j].Body.Len()
})
}
var wg sync.WaitGroup
@ -440,7 +440,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
if n.Op() != ir.ONAME { // might be OTYPE or OLITERAL
continue
}
switch n.Class() {
switch n.Class_ {
case ir.PAUTO:
if !n.Used() {
// Text == nil -> generating abstract function
@ -533,7 +533,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
var abbrev int
var offs int64
switch n.Class() {
switch n.Class_ {
case ir.PAUTO:
offs = n.FrameOffset()
abbrev = dwarf.DW_ABRV_AUTO
@ -549,7 +549,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
abbrev = dwarf.DW_ABRV_PARAM
offs = n.FrameOffset() + base.Ctxt.FixedFrameSize()
default:
base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class(), n)
base.Fatalf("createSimpleVar unexpected class %v for node %v", n.Class_, n)
}
typename := dwarf.InfoPrefix + typesymname(n.Type())
@ -566,7 +566,7 @@ func createSimpleVar(fnsym *obj.LSym, n *ir.Name) *dwarf.Var {
declpos := base.Ctxt.InnermostPos(declPos(n))
return &dwarf.Var{
Name: n.Sym().Name,
IsReturnValue: n.Class() == ir.PPARAMOUT,
IsReturnValue: n.Class_ == ir.PPARAMOUT,
IsInlFormal: n.Name().InlFormal(),
Abbrev: abbrev,
StackOffset: int32(offs),
@ -643,7 +643,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
if c == '.' || n.Type().IsUntyped() {
continue
}
if n.Class() == ir.PPARAM && !canSSAType(n.Type()) {
if n.Class_ == ir.PPARAM && !canSSAType(n.Type()) {
// SSA-able args get location lists, and may move in and
// out of registers, so those are handled elsewhere.
// Autos and named output params seem to get handled
@ -658,10 +658,10 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
typename := dwarf.InfoPrefix + typesymname(n.Type())
decls = append(decls, n)
abbrev := dwarf.DW_ABRV_AUTO_LOCLIST
isReturnValue := (n.Class() == ir.PPARAMOUT)
if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
isReturnValue := (n.Class_ == ir.PPARAMOUT)
if n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
} else if n.Class() == ir.PAUTOHEAP {
} else if n.Class_ == ir.PAUTOHEAP {
// If dcl in question has been promoted to heap, do a bit
// of extra work to recover original class (auto or param);
// see issue 30908. This insures that we get the proper
@ -670,9 +670,9 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir
// and not stack).
// TODO(thanm): generate a better location expression
stackcopy := n.Name().Stackcopy
if stackcopy != nil && (stackcopy.Class() == ir.PPARAM || stackcopy.Class() == ir.PPARAMOUT) {
if stackcopy != nil && (stackcopy.Class_ == ir.PPARAM || stackcopy.Class_ == ir.PPARAMOUT) {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
isReturnValue = (stackcopy.Class() == ir.PPARAMOUT)
isReturnValue = (stackcopy.Class_ == ir.PPARAMOUT)
}
}
inlIndex := 0
@ -731,7 +731,7 @@ func preInliningDcls(fnsym *obj.LSym) []*ir.Name {
func stackOffset(slot ssa.LocalSlot) int32 {
n := slot.N
var off int64
switch n.Class() {
switch n.Class_ {
case ir.PAUTO:
off = n.FrameOffset()
if base.Ctxt.FixedFrameSize() == 0 {
@ -753,7 +753,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
n := debug.Vars[varID]
var abbrev int
switch n.Class() {
switch n.Class_ {
case ir.PAUTO:
abbrev = dwarf.DW_ABRV_AUTO_LOCLIST
case ir.PPARAM, ir.PPARAMOUT:
@ -777,7 +777,7 @@ func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var
declpos := base.Ctxt.InnermostPos(n.Pos())
dvar := &dwarf.Var{
Name: n.Sym().Name,
IsReturnValue: n.Class() == ir.PPARAMOUT,
IsReturnValue: n.Class_ == ir.PPARAMOUT,
IsInlFormal: n.Name().InlFormal(),
Abbrev: abbrev,
Type: base.Ctxt.Lookup(typename),

View file

@ -44,7 +44,7 @@ func TestCmpstackvar(t *testing.T) {
n := NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
n.SetClass(cl)
n.Class_ = cl
return n
}
testdata := []struct {
@ -159,7 +159,7 @@ func TestStackvarSort(t *testing.T) {
n := NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
n.SetClass(cl)
n.Class_ = cl
return n
}
inp := []*ir.Name{

View file

@ -211,7 +211,7 @@ func livenessShouldTrack(nn ir.Node) bool {
return false
}
n := nn.(*ir.Name)
return (n.Class() == ir.PAUTO || n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Type().HasPointers()
return (n.Class_ == ir.PAUTO || n.Class_ == ir.PPARAM || n.Class_ == ir.PPARAMOUT) && n.Type().HasPointers()
}
// getvariables returns the list of on-stack variables that we need to track
@ -238,7 +238,7 @@ func (lv *Liveness) initcache() {
lv.cache.initialized = true
for i, node := range lv.vars {
switch node.Class() {
switch node.Class_ {
case ir.PPARAM:
// A return instruction with a p.to is a tail return, which brings
// the stack pointer back up (if it ever went down) and then jumps
@ -494,7 +494,7 @@ func (lv *Liveness) pointerMap(liveout bvec, vars []*ir.Name, args, locals bvec)
break
}
node := vars[i]
switch node.Class() {
switch node.Class_ {
case ir.PAUTO:
onebitwalktype1(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
@ -795,7 +795,7 @@ func (lv *Liveness) epilogue() {
// don't need to keep the stack copy live?
if lv.fn.HasDefer() {
for i, n := range lv.vars {
if n.Class() == ir.PPARAMOUT {
if n.Class_ == ir.PPARAMOUT {
if n.Name().IsOutputParamHeapAddr() {
// Just to be paranoid. Heap addresses are PAUTOs.
base.Fatalf("variable %v both output param and heap output param", n)
@ -893,7 +893,7 @@ func (lv *Liveness) epilogue() {
if !liveout.Get(int32(i)) {
continue
}
if n.Class() == ir.PPARAM {
if n.Class_ == ir.PPARAM {
continue // ok
}
base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Nname, n)
@ -926,7 +926,7 @@ func (lv *Liveness) epilogue() {
// the only things that can possibly be live are the
// input parameters.
for j, n := range lv.vars {
if n.Class() != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
if n.Class_ != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Nname, n)
}
}
@ -1171,7 +1171,7 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
var maxArgNode *ir.Name
for _, n := range lv.vars {
switch n.Class() {
switch n.Class_ {
case ir.PPARAM, ir.PPARAMOUT:
if maxArgNode == nil || n.FrameOffset() > maxArgNode.FrameOffset() {
maxArgNode = n

View file

@ -27,7 +27,7 @@ func typecheckrange(n *ir.RangeStmt) {
// second half of dance, the first half being typecheckrangeExpr
n.SetTypecheck(1)
ls := n.List().Slice()
ls := n.Vars.Slice()
for i1, n1 := range ls {
if n1.Typecheck() == 0 {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
@ -35,19 +35,19 @@ func typecheckrange(n *ir.RangeStmt) {
}
decldepth++
typecheckslice(n.Body().Slice(), ctxStmt)
typecheckslice(n.Body.Slice(), ctxStmt)
decldepth--
}
func typecheckrangeExpr(n *ir.RangeStmt) {
n.SetRight(typecheck(n.Right(), ctxExpr))
n.X = typecheck(n.X, ctxExpr)
t := n.Right().Type()
t := n.X.Type()
if t == nil {
return
}
// delicate little dance. see typecheckas2
ls := n.List().Slice()
ls := n.Vars.Slice()
for i1, n1 := range ls {
if !ir.DeclaredBy(n1, n) {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
@ -63,7 +63,7 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
toomany := false
switch t.Kind() {
default:
base.ErrorfAt(n.Pos(), "cannot range over %L", n.Right())
base.ErrorfAt(n.Pos(), "cannot range over %L", n.X)
return
case types.TARRAY, types.TSLICE:
@ -76,13 +76,13 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
case types.TCHAN:
if !t.ChanDir().CanRecv() {
base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.Right(), n.Right().Type())
base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.X, n.X.Type())
return
}
t1 = t.Elem()
t2 = nil
if n.List().Len() == 2 {
if n.Vars.Len() == 2 {
toomany = true
}
@ -91,16 +91,16 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
t2 = types.RuneType
}
if n.List().Len() > 2 || toomany {
if n.Vars.Len() > 2 || toomany {
base.ErrorfAt(n.Pos(), "too many variables in range")
}
var v1, v2 ir.Node
if n.List().Len() != 0 {
v1 = n.List().First()
if n.Vars.Len() != 0 {
v1 = n.Vars.First()
}
if n.List().Len() > 1 {
v2 = n.List().Second()
if n.Vars.Len() > 1 {
v2 = n.Vars.Second()
}
// this is not only an optimization but also a requirement in the spec.
@ -109,7 +109,7 @@ func typecheckrangeExpr(n *ir.RangeStmt) {
// present."
if ir.IsBlank(v2) {
if v1 != nil {
n.PtrList().Set1(v1)
n.Vars.Set1(v1)
}
v2 = nil
}
@ -159,7 +159,7 @@ func cheapComputableIndex(width int64) bool {
// the returned node.
func walkrange(nrange *ir.RangeStmt) ir.Node {
if isMapClear(nrange) {
m := nrange.Right()
m := nrange.X
lno := setlineno(m)
n := mapClear(m)
base.Pos = lno
@ -168,7 +168,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
nfor := ir.NewForStmt(nrange.Pos(), nil, nil, nil, nil)
nfor.SetInit(nrange.Init())
nfor.SetSym(nrange.Sym())
nfor.Label = nrange.Label
// variable name conventions:
// ohv1, hv1, hv2: hidden (old) val 1, 2
@ -179,17 +179,17 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
t := nrange.Type()
a := nrange.Right()
a := nrange.X
lno := setlineno(a)
var v1, v2 ir.Node
l := nrange.List().Len()
l := nrange.Vars.Len()
if l > 0 {
v1 = nrange.List().First()
v1 = nrange.Vars.First()
}
if l > 1 {
v2 = nrange.List().Second()
v2 = nrange.Vars.Second()
}
if ir.IsBlank(v2) {
@ -227,8 +227,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
init = append(init, ir.NewAssignStmt(base.Pos, hn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha)))
nfor.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn))
nfor.SetRight(ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, nodintconst(1))))
nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
nfor.Post = ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, nodintconst(1)))
// for range ha { body }
if v1 == nil {
@ -249,8 +249,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
a := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(hv1, tmp)
a.Lhs.Set2(v1, v2)
a.Rhs.Set2(hv1, tmp)
body = []ir.Node{a}
break
}
@ -268,7 +268,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// elimination on the index variable (see #20711).
// Enhance the prove pass to understand this.
ifGuard = ir.NewIfStmt(base.Pos, nil, nil, nil)
ifGuard.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn))
ifGuard.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
nfor.SetOp(ir.OFORUNTIL)
hp := temp(types.NewPtr(nrange.Type().Elem()))
@ -279,8 +279,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
a := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(hv1, ir.NewStarExpr(base.Pos, hp))
a.Lhs.Set2(v1, v2)
a.Rhs.Set2(hv1, ir.NewStarExpr(base.Pos, hp))
body = append(body, a)
// Advance pointer as part of the late increment.
@ -289,7 +289,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// advancing the pointer is safe and won't go past the
// end of the allocation.
as := ir.NewAssignStmt(base.Pos, hp, addptr(hp, t.Elem().Width))
nfor.PtrList().Set1(typecheck(as, ctxStmt))
nfor.Late.Set1(typecheck(as, ctxStmt))
case types.TMAP:
// order.stmt allocated the iterator for us.
@ -305,11 +305,11 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
fn = substArgTypes(fn, t.Key(), t.Elem(), th)
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, nodAddr(hit)))
nfor.SetLeft(ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), nodnil()))
nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), nodnil())
fn = syslook("mapiternext")
fn = substArgTypes(fn, th)
nfor.SetRight(mkcall1(fn, nil, nil, nodAddr(hit)))
nfor.Post = mkcall1(fn, nil, nil, nodAddr(hit))
key := ir.NewStarExpr(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym))
if v1 == nil {
@ -319,8 +319,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
} else {
elem := ir.NewStarExpr(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, elemsym))
a := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(key, elem)
a.Lhs.Set2(v1, v2)
a.Rhs.Set2(key, elem)
body = []ir.Node{a}
}
@ -335,12 +335,12 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
}
hb := temp(types.Types[types.TBOOL])
nfor.SetLeft(ir.NewBinaryExpr(base.Pos, ir.ONE, hb, nodbool(false)))
nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, hb, nodbool(false))
a := ir.NewAssignListStmt(base.Pos, ir.OAS2RECV, nil, nil)
a.SetTypecheck(1)
a.PtrList().Set2(hv1, hb)
a.PtrRlist().Set1(ir.NewUnaryExpr(base.Pos, ir.ORECV, ha))
nfor.Left().PtrInit().Set1(a)
a.Lhs.Set2(hv1, hb)
a.Rhs.Set1(ir.NewUnaryExpr(base.Pos, ir.ORECV, ha))
nfor.Cond.PtrInit().Set1(a)
if v1 == nil {
body = nil
} else {
@ -378,7 +378,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
// hv1 < len(ha)
nfor.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha)))
nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha))
if v1 != nil {
// hv1t = hv1
@ -392,19 +392,19 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// if hv2 < utf8.RuneSelf
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OLT, hv2, nodintconst(utf8.RuneSelf)))
nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv2, nodintconst(utf8.RuneSelf))
// hv1++
nif.PtrBody().Set1(ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, nodintconst(1))))
nif.Body.Set1(ir.NewAssignStmt(base.Pos, hv1, ir.NewBinaryExpr(base.Pos, ir.OADD, hv1, nodintconst(1))))
// } else {
eif := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
nif.PtrRlist().Set1(eif)
nif.Else.Set1(eif)
// hv2, hv1 = decoderune(ha, hv1)
eif.PtrList().Set2(hv2, hv1)
eif.Lhs.Set2(hv2, hv1)
fn := syslook("decoderune")
eif.PtrRlist().Set1(mkcall1(fn, fn.Type().Results(), nil, ha, hv1))
eif.Rhs.Set1(mkcall1(fn, fn.Type().Results(), nil, ha, hv1))
body = append(body, nif)
@ -412,8 +412,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
if v2 != nil {
// v1, v2 = hv1t, hv2
a := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
a.PtrList().Set2(v1, v2)
a.PtrRlist().Set2(hv1t, hv2)
a.Lhs.Set2(v1, v2)
a.Rhs.Set2(hv1t, hv2)
body = append(body, a)
} else {
// v1 = hv1t
@ -431,18 +431,18 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
nfor.PtrInit().Append(init...)
}
typecheckslice(nfor.Left().Init().Slice(), ctxStmt)
typecheckslice(nfor.Cond.Init().Slice(), ctxStmt)
nfor.SetLeft(typecheck(nfor.Left(), ctxExpr))
nfor.SetLeft(defaultlit(nfor.Left(), nil))
nfor.SetRight(typecheck(nfor.Right(), ctxStmt))
nfor.Cond = typecheck(nfor.Cond, ctxExpr)
nfor.Cond = defaultlit(nfor.Cond, nil)
nfor.Post = typecheck(nfor.Post, ctxStmt)
typecheckslice(body, ctxStmt)
nfor.PtrBody().Append(body...)
nfor.PtrBody().Append(nrange.Body().Slice()...)
nfor.Body.Append(body...)
nfor.Body.Append(nrange.Body.Slice()...)
var n ir.Node = nfor
if ifGuard != nil {
ifGuard.PtrBody().Set1(n)
ifGuard.Body.Set1(n)
n = ifGuard
}
@ -464,11 +464,11 @@ func isMapClear(n *ir.RangeStmt) bool {
return false
}
if n.Op() != ir.ORANGE || n.Type().Kind() != types.TMAP || n.List().Len() != 1 {
if n.Op() != ir.ORANGE || n.Type().Kind() != types.TMAP || n.Vars.Len() != 1 {
return false
}
k := n.List().First()
k := n.Vars.First()
if k == nil || ir.IsBlank(k) {
return false
}
@ -478,17 +478,17 @@ func isMapClear(n *ir.RangeStmt) bool {
return false
}
if n.Body().Len() != 1 {
if n.Body.Len() != 1 {
return false
}
stmt := n.Body().First() // only stmt in body
stmt := n.Body.First() // only stmt in body
if stmt == nil || stmt.Op() != ir.ODELETE {
return false
}
m := n.Right()
if delete := stmt.(*ir.CallExpr); !samesafeexpr(delete.List().First(), m) || !samesafeexpr(delete.List().Second(), k) {
m := n.X
if delete := stmt.(*ir.CallExpr); !samesafeexpr(delete.Args.First(), m) || !samesafeexpr(delete.Args.Second(), k) {
return false
}
@ -531,26 +531,26 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
return nil
}
if loop.Body().Len() != 1 || loop.Body().First() == nil {
if loop.Body.Len() != 1 || loop.Body.First() == nil {
return nil
}
stmt1 := loop.Body().First() // only stmt in body
stmt1 := loop.Body.First() // only stmt in body
if stmt1.Op() != ir.OAS {
return nil
}
stmt := stmt1.(*ir.AssignStmt)
if stmt.Left().Op() != ir.OINDEX {
if stmt.X.Op() != ir.OINDEX {
return nil
}
lhs := stmt.Left().(*ir.IndexExpr)
lhs := stmt.X.(*ir.IndexExpr)
if !samesafeexpr(lhs.Left(), a) || !samesafeexpr(lhs.Right(), v1) {
if !samesafeexpr(lhs.X, a) || !samesafeexpr(lhs.Index, v1) {
return nil
}
elemsize := loop.Type().Elem().Width
if elemsize <= 0 || !isZero(stmt.Right()) {
if elemsize <= 0 || !isZero(stmt.Y) {
return nil
}
@ -562,8 +562,8 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
// i = len(a) - 1
// }
n := ir.NewIfStmt(base.Pos, nil, nil, nil)
n.PtrBody().Set(nil)
n.SetLeft(ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), nodintconst(0)))
n.Body.Set(nil)
n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), nodintconst(0))
// hp = &a[0]
hp := temp(types.Types[types.TUNSAFEPTR])
@ -571,12 +571,12 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
ix := ir.NewIndexExpr(base.Pos, a, nodintconst(0))
ix.SetBounded(true)
addr := convnop(nodAddr(ix), types.Types[types.TUNSAFEPTR])
n.PtrBody().Append(ir.NewAssignStmt(base.Pos, hp, addr))
n.Body.Append(ir.NewAssignStmt(base.Pos, hp, addr))
// hn = len(a) * sizeof(elem(a))
hn := temp(types.Types[types.TUINTPTR])
mul := conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), nodintconst(elemsize)), types.Types[types.TUINTPTR])
n.PtrBody().Append(ir.NewAssignStmt(base.Pos, hn, mul))
n.Body.Append(ir.NewAssignStmt(base.Pos, hn, mul))
var fn ir.Node
if a.Type().Elem().HasPointers() {
@ -588,16 +588,16 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
fn = mkcall("memclrNoHeapPointers", nil, nil, hp, hn)
}
n.PtrBody().Append(fn)
n.Body.Append(fn)
// i = len(a) - 1
v1 = ir.NewAssignStmt(base.Pos, v1, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), nodintconst(1)))
n.PtrBody().Append(v1)
n.Body.Append(v1)
n.SetLeft(typecheck(n.Left(), ctxExpr))
n.SetLeft(defaultlit(n.Left(), nil))
typecheckslice(n.Body().Slice(), ctxStmt)
n.Cond = typecheck(n.Cond, ctxExpr)
n.Cond = defaultlit(n.Cond, nil)
typecheckslice(n.Body.Slice(), ctxStmt)
return walkstmt(n)
}

View file

@ -994,7 +994,7 @@ func typename(t *types.Type) *ir.AddrExpr {
if s.Def == nil {
n := ir.NewNameAt(src.NoXPos, s)
n.SetType(types.Types[types.TUINT8])
n.SetClass(ir.PEXTERN)
n.Class_ = ir.PEXTERN
n.SetTypecheck(1)
s.Def = n
}
@ -1013,7 +1013,7 @@ func itabname(t, itype *types.Type) *ir.AddrExpr {
if s.Def == nil {
n := NewName(s)
n.SetType(types.Types[types.TUINT8])
n.SetClass(ir.PEXTERN)
n.Class_ = ir.PEXTERN
n.SetTypecheck(1)
s.Def = n
itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: s.Linksym()})
@ -1875,7 +1875,7 @@ func zeroaddr(size int64) ir.Node {
if s.Def == nil {
x := NewName(s)
x.SetType(types.Types[types.TUINT8])
x.SetClass(ir.PEXTERN)
x.Class_ = ir.PEXTERN
x.SetTypecheck(1)
s.Def = x
}

View file

@ -58,7 +58,7 @@ func visitBottomUp(list []ir.Node, analyze func(list []*ir.Func, recursive bool)
for _, n := range list {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
if !n.Func().IsHiddenClosure() {
if !n.IsHiddenClosure() {
v.visit(n)
}
}
@ -82,7 +82,7 @@ func (v *bottomUpVisitor) visit(n *ir.Func) uint32 {
switch n.Op() {
case ir.ONAME:
n := n.(*ir.Name)
if n.Class() == ir.PFUNC {
if n.Class_ == ir.PFUNC {
if n != nil && n.Name().Defn != nil {
if m := v.visit(n.Name().Defn.(*ir.Func)); m < min {
min = m
@ -100,7 +100,7 @@ func (v *bottomUpVisitor) visit(n *ir.Func) uint32 {
case ir.ODOTMETH:
n := n.(*ir.SelectorExpr)
fn := methodExprName(n)
if fn != nil && fn.Op() == ir.ONAME && fn.Class() == ir.PFUNC && fn.Defn != nil {
if fn != nil && fn.Op() == ir.ONAME && fn.Class_ == ir.PFUNC && fn.Defn != nil {
if m := v.visit(fn.Defn.(*ir.Func)); m < min {
min = m
}
@ -109,7 +109,7 @@ func (v *bottomUpVisitor) visit(n *ir.Func) uint32 {
n := n.(*ir.CallPartExpr)
fn := ir.AsNode(callpartMethod(n).Nname)
if fn != nil && fn.Op() == ir.ONAME {
if fn := fn.(*ir.Name); fn.Class() == ir.PFUNC && fn.Name().Defn != nil {
if fn := fn.(*ir.Name); fn.Class_ == ir.PFUNC && fn.Name().Defn != nil {
if m := v.visit(fn.Name().Defn.(*ir.Func)); m < min {
min = m
}
@ -117,7 +117,7 @@ func (v *bottomUpVisitor) visit(n *ir.Func) uint32 {
}
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
if m := v.visit(n.Func()); m < min {
if m := v.visit(n.Func); m < min {
min = m
}
}

View file

@ -30,13 +30,13 @@ func findScope(marks []ir.Mark, pos src.XPos) ir.ScopeID {
func assembleScopes(fnsym *obj.LSym, fn *ir.Func, dwarfVars []*dwarf.Var, varScopes []ir.ScopeID) []dwarf.Scope {
// Initialize the DWARF scope tree based on lexical scopes.
dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func().Parents))
for i, parent := range fn.Func().Parents {
dwarfScopes := make([]dwarf.Scope, 1+len(fn.Parents))
for i, parent := range fn.Parents {
dwarfScopes[i+1].Parent = int32(parent)
}
scopeVariables(dwarfVars, varScopes, dwarfScopes)
scopePCs(fnsym, fn.Func().Marks, dwarfScopes)
scopePCs(fnsym, fn.Marks, dwarfScopes)
return compactScopes(dwarfScopes)
}

View file

@ -15,30 +15,30 @@ func typecheckselect(sel *ir.SelectStmt) {
var def ir.Node
lno := setlineno(sel)
typecheckslice(sel.Init().Slice(), ctxStmt)
for _, ncase := range sel.List().Slice() {
for _, ncase := range sel.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
if ncase.List().Len() == 0 {
if ncase.List.Len() == 0 {
// default
if def != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
} else {
def = ncase
}
} else if ncase.List().Len() > 1 {
} else if ncase.List.Len() > 1 {
base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
} else {
ncase.List().SetFirst(typecheck(ncase.List().First(), ctxStmt))
n := ncase.List().First()
ncase.SetLeft(n)
ncase.PtrList().Set(nil)
ncase.List.SetFirst(typecheck(ncase.List.First(), ctxStmt))
n := ncase.List.First()
ncase.Comm = n
ncase.List.Set(nil)
oselrecv2 := func(dst, recv ir.Node, colas bool) {
n := ir.NewAssignListStmt(n.Pos(), ir.OSELRECV2, nil, nil)
n.PtrList().Set2(dst, ir.BlankNode)
n.PtrRlist().Set1(recv)
n.SetColas(colas)
n.Lhs.Set2(dst, ir.BlankNode)
n.Rhs.Set1(recv)
n.Def = colas
n.SetTypecheck(1)
ncase.SetLeft(n)
ncase.Comm = n
}
switch n.Op() {
default:
@ -57,21 +57,21 @@ func typecheckselect(sel *ir.SelectStmt) {
// remove implicit conversions; the eventual assignment
// will reintroduce them.
n := n.(*ir.AssignStmt)
if r := n.Right(); r.Op() == ir.OCONVNOP || r.Op() == ir.OCONVIFACE {
if r := n.Y; r.Op() == ir.OCONVNOP || r.Op() == ir.OCONVIFACE {
r := r.(*ir.ConvExpr)
if r.Implicit() {
n.SetRight(r.Left())
n.Y = r.X
}
}
if n.Right().Op() != ir.ORECV {
if n.Y.Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
oselrecv2(n.Left(), n.Right(), n.Colas())
oselrecv2(n.X, n.Y, n.Def)
case ir.OAS2RECV:
n := n.(*ir.AssignListStmt)
if n.Rlist().First().Op() != ir.ORECV {
if n.Rhs.First().Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
@ -87,7 +87,7 @@ func typecheckselect(sel *ir.SelectStmt) {
}
}
typecheckslice(ncase.Body().Slice(), ctxStmt)
typecheckslice(ncase.Body.Slice(), ctxStmt)
}
base.Pos = lno
@ -95,18 +95,18 @@ func typecheckselect(sel *ir.SelectStmt) {
func walkselect(sel *ir.SelectStmt) {
lno := setlineno(sel)
if sel.Body().Len() != 0 {
if sel.Compiled.Len() != 0 {
base.Fatalf("double walkselect")
}
init := sel.Init().Slice()
sel.PtrInit().Set(nil)
init = append(init, walkselectcases(sel.List())...)
sel.SetList(ir.Nodes{})
init = append(init, walkselectcases(sel.Cases)...)
sel.Cases = ir.Nodes{}
sel.PtrBody().Set(init)
walkstmtlist(sel.Body().Slice())
sel.Compiled.Set(init)
walkstmtlist(sel.Compiled.Slice())
base.Pos = lno
}
@ -125,8 +125,8 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
cas := cases.First().(*ir.CaseStmt)
setlineno(cas)
l := cas.Init().Slice()
if cas.Left() != nil { // not default:
n := cas.Left()
if cas.Comm != nil { // not default:
n := cas.Comm
l = append(l, n.Init().Slice()...)
n.PtrInit().Set(nil)
switch n.Op() {
@ -138,8 +138,8 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
case ir.OSELRECV2:
r := n.(*ir.AssignListStmt)
if ir.IsBlank(r.List().First()) && ir.IsBlank(r.List().Second()) {
n = r.Rlist().First()
if ir.IsBlank(r.Lhs.First()) && ir.IsBlank(r.Lhs.Second()) {
n = r.Rhs.First()
break
}
r.SetOp(ir.OAS2RECV)
@ -148,7 +148,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
l = append(l, n)
}
l = append(l, cas.Body().Slice()...)
l = append(l, cas.Body.Slice()...)
l = append(l, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil))
return l
}
@ -159,7 +159,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
for _, cas := range cases.Slice() {
cas := cas.(*ir.CaseStmt)
setlineno(cas)
n := cas.Left()
n := cas.Comm
if n == nil {
dflt = cas
continue
@ -167,14 +167,14 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
switch n.Op() {
case ir.OSEND:
n := n.(*ir.SendStmt)
n.SetRight(nodAddr(n.Right()))
n.SetRight(typecheck(n.Right(), ctxExpr))
n.Value = nodAddr(n.Value)
n.Value = typecheck(n.Value, ctxExpr)
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
if !ir.IsBlank(n.List().First()) {
n.List().SetIndex(0, nodAddr(n.List().First()))
n.List().SetIndex(0, typecheck(n.List().First(), ctxExpr))
if !ir.IsBlank(n.Lhs.First()) {
n.Lhs.SetIndex(0, nodAddr(n.Lhs.First()))
n.Lhs.SetIndex(0, typecheck(n.Lhs.First(), ctxExpr))
}
}
}
@ -186,7 +186,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
cas = cases.Second().(*ir.CaseStmt)
}
n := cas.Left()
n := cas.Comm
setlineno(n)
r := ir.NewIfStmt(base.Pos, nil, nil, nil)
r.PtrInit().Set(cas.Init().Slice())
@ -198,31 +198,31 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
case ir.OSEND:
// if selectnbsend(c, v) { body } else { default body }
n := n.(*ir.SendStmt)
ch := n.Left()
call = mkcall1(chanfn("selectnbsend", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), ch, n.Right())
ch := n.Chan
call = mkcall1(chanfn("selectnbsend", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), ch, n.Value)
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
recv := n.Rlist().First().(*ir.UnaryExpr)
ch := recv.Left()
elem := n.List().First()
recv := n.Rhs.First().(*ir.UnaryExpr)
ch := recv.X
elem := n.Lhs.First()
if ir.IsBlank(elem) {
elem = nodnil()
}
if ir.IsBlank(n.List().Second()) {
if ir.IsBlank(n.Lhs.Second()) {
// if selectnbrecv(&v, c) { body } else { default body }
call = mkcall1(chanfn("selectnbrecv", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, ch)
} else {
// TODO(cuonglm): make this use selectnbrecv()
// if selectnbrecv2(&v, &received, c) { body } else { default body }
receivedp := typecheck(nodAddr(n.List().Second()), ctxExpr)
receivedp := typecheck(nodAddr(n.Lhs.Second()), ctxExpr)
call = mkcall1(chanfn("selectnbrecv2", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, receivedp, ch)
}
}
r.SetLeft(typecheck(call, ctxExpr))
r.PtrBody().Set(cas.Body().Slice())
r.PtrRlist().Set(append(dflt.Init().Slice(), dflt.Body().Slice()...))
r.Cond = typecheck(call, ctxExpr)
r.Body.Set(cas.Body.Slice())
r.Else.Set(append(dflt.Init().Slice(), dflt.Body.Slice()...))
return []ir.Node{r, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)}
}
@ -258,7 +258,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
init = append(init, cas.Init().Slice()...)
cas.PtrInit().Set(nil)
n := cas.Left()
n := cas.Comm
if n == nil { // default:
continue
}
@ -272,15 +272,15 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
n := n.(*ir.SendStmt)
i = nsends
nsends++
c = n.Left()
elem = n.Right()
c = n.Chan
elem = n.Value
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
nrecvs++
i = ncas - nrecvs
recv := n.Rlist().First().(*ir.UnaryExpr)
c = recv.Left()
elem = n.List().First()
recv := n.Rhs.First().(*ir.UnaryExpr)
c = recv.X
elem = n.Lhs.First()
}
casorder[i] = cas
@ -313,9 +313,9 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
chosen := temp(types.Types[types.TINT])
recvOK := temp(types.Types[types.TBOOL])
r := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
r.PtrList().Set2(chosen, recvOK)
r.Lhs.Set2(chosen, recvOK)
fn := syslook("selectgo")
r.PtrRlist().Set1(mkcall1(fn, fn.Type().Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
r.Rhs.Set1(mkcall1(fn, fn.Type().Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
init = append(init, typecheck(r, ctxStmt))
// selv and order are no longer alive after selectgo.
@ -332,16 +332,16 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
r := ir.NewIfStmt(base.Pos, cond, nil, nil)
if n := cas.Left(); n != nil && n.Op() == ir.OSELRECV2 {
if n := cas.Comm; n != nil && n.Op() == ir.OSELRECV2 {
n := n.(*ir.AssignListStmt)
if !ir.IsBlank(n.List().Second()) {
x := ir.NewAssignStmt(base.Pos, n.List().Second(), recvOK)
r.PtrBody().Append(typecheck(x, ctxStmt))
if !ir.IsBlank(n.Lhs.Second()) {
x := ir.NewAssignStmt(base.Pos, n.Lhs.Second(), recvOK)
r.Body.Append(typecheck(x, ctxStmt))
}
}
r.PtrBody().AppendNodes(cas.PtrBody())
r.PtrBody().Append(ir.NewBranchStmt(base.Pos, ir.OBREAK, nil))
r.Body.AppendNodes(&cas.Body)
r.Body.Append(ir.NewBranchStmt(base.Pos, ir.OBREAK, nil))
init = append(init, r)
}

View file

@ -61,25 +61,25 @@ func (s *InitSchedule) tryStaticInit(nn ir.Node) bool {
return false
}
n := nn.(*ir.AssignStmt)
if ir.IsBlank(n.Left()) && !anySideEffects(n.Right()) {
if ir.IsBlank(n.X) && !anySideEffects(n.Y) {
// Discard.
return true
}
lno := setlineno(n)
defer func() { base.Pos = lno }()
nam := n.Left().(*ir.Name)
return s.staticassign(nam, 0, n.Right(), nam.Type())
nam := n.X.(*ir.Name)
return s.staticassign(nam, 0, n.Y, nam.Type())
}
// like staticassign but we are copying an already
// initialized value r.
func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *types.Type) bool {
if rn.Class() == ir.PFUNC {
if rn.Class_ == ir.PFUNC {
// TODO if roff != 0 { panic }
pfuncsym(l, loff, rn)
return true
}
if rn.Class() != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
if rn.Class_ != ir.PEXTERN || rn.Sym().Pkg != types.LocalPkg {
return false
}
if rn.Defn == nil { // probably zeroed but perhaps supplied externally and of unknown value
@ -92,10 +92,10 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
return false
}
orig := rn
r := rn.Defn.(*ir.AssignStmt).Right()
r := rn.Defn.(*ir.AssignStmt).Y
for r.Op() == ir.OCONVNOP && !types.Identical(r.Type(), typ) {
r = r.(*ir.ConvExpr).Left()
r = r.(*ir.ConvExpr).X
}
switch r.Op() {
@ -128,7 +128,7 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
case ir.OADDR:
r := r.(*ir.AddrExpr)
if a := r.Left(); a.Op() == ir.ONAME {
if a := r.X; a.Op() == ir.ONAME {
a := a.(*ir.Name)
addrsym(l, loff, a, 0)
return true
@ -136,7 +136,7 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
case ir.OPTRLIT:
r := r.(*ir.AddrExpr)
switch r.Left().Op() {
switch r.X.Op() {
case ir.OARRAYLIT, ir.OSLICELIT, ir.OSTRUCTLIT, ir.OMAPLIT:
// copy pointer
addrsym(l, loff, s.inittemps[r], 0)
@ -182,7 +182,7 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *types.Type) bool {
for r.Op() == ir.OCONVNOP {
r = r.(*ir.ConvExpr).Left()
r = r.(*ir.ConvExpr).X
}
switch r.Op() {
@ -206,7 +206,7 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
case ir.OADDR:
r := r.(*ir.AddrExpr)
if name, offset, ok := stataddr(r.Left()); ok {
if name, offset, ok := stataddr(r.X); ok {
addrsym(l, loff, name, offset)
return true
}
@ -214,17 +214,17 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
case ir.OPTRLIT:
r := r.(*ir.AddrExpr)
switch r.Left().Op() {
switch r.X.Op() {
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT, ir.OSTRUCTLIT:
// Init pointer.
a := staticname(r.Left().Type())
a := staticname(r.X.Type())
s.inittemps[r] = a
addrsym(l, loff, a, 0)
// Init underlying literal.
if !s.staticassign(a, 0, r.Left(), a.Type()) {
s.append(ir.NewAssignStmt(base.Pos, a, r.Left()))
if !s.staticassign(a, 0, r.X, a.Type()) {
s.append(ir.NewAssignStmt(base.Pos, a, r.X))
}
return true
}
@ -232,8 +232,8 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
case ir.OSTR2BYTES:
r := r.(*ir.ConvExpr)
if l.Class() == ir.PEXTERN && r.Left().Op() == ir.OLITERAL {
sval := ir.StringVal(r.Left())
if l.Class_ == ir.PEXTERN && r.X.Op() == ir.OLITERAL {
sval := ir.StringVal(r.X)
slicebytes(l, loff, sval)
return true
}
@ -284,7 +284,7 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
// Closures with no captured variables are globals,
// so the assignment can be done at link time.
// TODO if roff != 0 { panic }
pfuncsym(l, loff, r.Func().Nname)
pfuncsym(l, loff, r.Func.Nname)
return true
}
closuredebugruntimecheck(r)
@ -297,7 +297,7 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
r := r.(*ir.ConvExpr)
val := ir.Node(r)
for val.Op() == ir.OCONVIFACE {
val = val.(*ir.ConvExpr).Left()
val = val.(*ir.ConvExpr).X
}
if val.Type().IsInterface() {
@ -321,7 +321,7 @@ func (s *InitSchedule) staticassign(l *ir.Name, loff int64, r ir.Node, typ *type
// Create a copy of l to modify while we emit data.
// Emit itab, advance offset.
addrsym(l, loff, itab.Left().(*ir.Name), 0)
addrsym(l, loff, itab.X.(*ir.Name), 0)
// Emit data.
if isdirectiface(val.Type()) {
@ -409,7 +409,7 @@ func isSimpleName(nn ir.Node) bool {
return false
}
n := nn.(*ir.Name)
return n.Class() != ir.PAUTOHEAP && n.Class() != ir.PEXTERN
return n.Class_ != ir.PAUTOHEAP && n.Class_ != ir.PEXTERN
}
func litas(l ir.Node, r ir.Node, init *ir.Nodes) {
@ -439,7 +439,7 @@ func getdyn(n ir.Node, top bool) initGenType {
if !top {
return initDynamic
}
if n.Len/4 > int64(n.List().Len()) {
if n.Len/4 > int64(n.List.Len()) {
// <25% of entries have explicit values.
// Very rough estimation, it takes 4 bytes of instructions
// to initialize 1 byte of result. So don't use a static
@ -454,12 +454,12 @@ func getdyn(n ir.Node, top bool) initGenType {
lit := n.(*ir.CompLitExpr)
var mode initGenType
for _, n1 := range lit.List().Slice() {
for _, n1 := range lit.List.Slice() {
switch n1.Op() {
case ir.OKEY:
n1 = n1.(*ir.KeyExpr).Right()
n1 = n1.(*ir.KeyExpr).Value
case ir.OSTRUCTKEY:
n1 = n1.(*ir.StructKeyExpr).Left()
n1 = n1.(*ir.StructKeyExpr).Value
}
mode |= getdyn(n1, false)
if mode == initDynamic|initConst {
@ -476,9 +476,9 @@ func isStaticCompositeLiteral(n ir.Node) bool {
return false
case ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
for _, r := range n.List().Slice() {
for _, r := range n.List.Slice() {
if r.Op() == ir.OKEY {
r = r.(*ir.KeyExpr).Right()
r = r.(*ir.KeyExpr).Value
}
if !isStaticCompositeLiteral(r) {
return false
@ -487,9 +487,9 @@ func isStaticCompositeLiteral(n ir.Node) bool {
return true
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
for _, r := range n.List().Slice() {
for _, r := range n.List.Slice() {
r := r.(*ir.StructKeyExpr)
if !isStaticCompositeLiteral(r.Left()) {
if !isStaticCompositeLiteral(r.Value) {
return false
}
}
@ -501,7 +501,7 @@ func isStaticCompositeLiteral(n ir.Node) bool {
n := n.(*ir.ConvExpr)
val := ir.Node(n)
for val.Op() == ir.OCONVIFACE {
val = val.(*ir.ConvExpr).Left()
val = val.(*ir.ConvExpr).X
}
if val.Type().IsInterface() {
return val.Op() == ir.ONIL
@ -542,11 +542,11 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
splitnode = func(r ir.Node) (ir.Node, ir.Node) {
if r.Op() == ir.OKEY {
kv := r.(*ir.KeyExpr)
k = indexconst(kv.Left())
k = indexconst(kv.Key)
if k < 0 {
base.Fatalf("fixedlit: invalid index %v", kv.Left())
base.Fatalf("fixedlit: invalid index %v", kv.Key)
}
r = kv.Right()
r = kv.Value
}
a := ir.NewIndexExpr(base.Pos, var_, nodintconst(k))
k++
@ -558,17 +558,17 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
case ir.OSTRUCTLIT:
splitnode = func(rn ir.Node) (ir.Node, ir.Node) {
r := rn.(*ir.StructKeyExpr)
if r.Sym().IsBlank() || isBlank {
return ir.BlankNode, r.Left()
if r.Field.IsBlank() || isBlank {
return ir.BlankNode, r.Value
}
setlineno(r)
return ir.NewSelectorExpr(base.Pos, ir.ODOT, var_, r.Sym()), r.Left()
return ir.NewSelectorExpr(base.Pos, ir.ODOT, var_, r.Field), r.Value
}
default:
base.Fatalf("fixedlit bad op: %v", n.Op())
}
for _, r := range n.List().Slice() {
for _, r := range n.List.Slice() {
a, value := splitnode(r)
if a == ir.BlankNode && !anySideEffects(value) {
// Discard.
@ -635,7 +635,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// copy static to slice
var_ = typecheck(var_, ctxExpr|ctxAssign)
name, offset, ok := stataddr(var_)
if !ok || name.Class() != ir.PEXTERN {
if !ok || name.Class_ != ir.PEXTERN {
base.Fatalf("slicelit: %v", var_)
}
slicesym(name, offset, vstat, t.NumElem())
@ -703,7 +703,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
a = ir.NewAssignStmt(base.Pos, temp(t), nil)
a = typecheck(a, ctxStmt)
init.Append(a) // zero new temp
a = a.(*ir.AssignStmt).Left()
a = a.(*ir.AssignStmt).X
} else {
init.Append(ir.NewUnaryExpr(base.Pos, ir.OVARDEF, a))
}
@ -722,14 +722,14 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// put dynamics into array (5)
var index int64
for _, value := range n.List().Slice() {
for _, value := range n.List.Slice() {
if value.Op() == ir.OKEY {
kv := value.(*ir.KeyExpr)
index = indexconst(kv.Left())
index = indexconst(kv.Key)
if index < 0 {
base.Fatalf("slicelit: invalid index %v", kv.Left())
base.Fatalf("slicelit: invalid index %v", kv.Key)
}
value = kv.Right()
value = kv.Value
}
a := ir.NewIndexExpr(base.Pos, vauto, nodintconst(index))
a.SetBounded(true)
@ -778,16 +778,16 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
// make the map var
a := ir.NewCallExpr(base.Pos, ir.OMAKE, nil, nil)
a.SetEsc(n.Esc())
a.PtrList().Set2(ir.TypeNode(n.Type()), nodintconst(int64(n.List().Len())))
a.Args.Set2(ir.TypeNode(n.Type()), nodintconst(int64(n.List.Len())))
litas(m, a, init)
entries := n.List().Slice()
entries := n.List.Slice()
// The order pass already removed any dynamic (runtime-computed) entries.
// All remaining entries are static. Double-check that.
for _, r := range entries {
r := r.(*ir.KeyExpr)
if !isStaticCompositeLiteral(r.Left()) || !isStaticCompositeLiteral(r.Right()) {
if !isStaticCompositeLiteral(r.Key) || !isStaticCompositeLiteral(r.Value) {
base.Fatalf("maplit: entry is not a literal: %v", r)
}
}
@ -813,8 +813,8 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
datae := ir.NewCompLitExpr(base.Pos, ir.OARRAYLIT, nil, nil)
for _, r := range entries {
r := r.(*ir.KeyExpr)
datak.PtrList().Append(r.Left())
datae.PtrList().Append(r.Right())
datak.List.Append(r.Key)
datae.List.Append(r.Value)
}
fixedlit(inInitFunction, initKindStatic, datak, vstatk, init)
fixedlit(inInitFunction, initKindStatic, datae, vstate, init)
@ -837,7 +837,7 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
body := ir.NewAssignStmt(base.Pos, lhs, rhs)
loop := ir.NewForStmt(base.Pos, nil, cond, incr, nil)
loop.PtrBody().Set1(body)
loop.Body.Set1(body)
loop.PtrInit().Set1(zero)
appendWalkStmt(init, loop)
@ -853,7 +853,7 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
for _, r := range entries {
r := r.(*ir.KeyExpr)
index, elem := r.Left(), r.Right()
index, elem := r.Key, r.Value
setlineno(index)
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, tmpkey, index))
@ -890,19 +890,19 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
}
var r ir.Node
if n.Right() != nil {
if n.Alloc != nil {
// n.Right is stack temporary used as backing store.
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Right(), nil)) // zero backing store, just in case (#18410)
r = nodAddr(n.Right())
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Alloc, nil)) // zero backing store, just in case (#18410)
r = nodAddr(n.Alloc)
} else {
r = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(n.Left().Type()))
r = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(n.X.Type()))
r.SetEsc(n.Esc())
}
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, r))
var_ = ir.NewStarExpr(base.Pos, var_)
var_ = typecheck(var_, ctxExpr|ctxAssign)
anylit(n.Left(), var_, init)
anylit(n.X, var_, init)
case ir.OSTRUCTLIT, ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
@ -910,7 +910,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
base.Fatalf("anylit: not struct/array")
}
if isSimpleName(var_) && n.List().Len() > 4 {
if isSimpleName(var_) && n.List.Len() > 4 {
// lay out static data
vstat := readonlystaticname(t)
@ -935,7 +935,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
components = int64(t.NumFields())
}
// initialization of an array or struct with unspecified components (missing fields or arrays)
if isSimpleName(var_) || int64(n.List().Len()) < components {
if isSimpleName(var_) || int64(n.List.Len()) < components {
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, nil))
}
@ -958,34 +958,34 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
// It returns true if n's effects have been added to init,
// in which case n should be dropped from the program by the caller.
func oaslit(n *ir.AssignStmt, init *ir.Nodes) bool {
if n.Left() == nil || n.Right() == nil {
if n.X == nil || n.Y == nil {
// not a special composite literal assignment
return false
}
if n.Left().Type() == nil || n.Right().Type() == nil {
if n.X.Type() == nil || n.Y.Type() == nil {
// not a special composite literal assignment
return false
}
if !isSimpleName(n.Left()) {
if !isSimpleName(n.X) {
// not a special composite literal assignment
return false
}
if !types.Identical(n.Left().Type(), n.Right().Type()) {
if !types.Identical(n.X.Type(), n.Y.Type()) {
// not a special composite literal assignment
return false
}
switch n.Right().Op() {
switch n.Y.Op() {
default:
// not a special composite literal assignment
return false
case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
if refersToCommonName(n.Left(), n.Right()) {
if refersToCommonName(n.X, n.Y) {
// not a special composite literal assignment
return false
}
anylit(n.Right(), n.Left(), init)
anylit(n.Y, n.X, init)
}
return true
@ -1015,21 +1015,21 @@ func stataddr(n ir.Node) (name *ir.Name, offset int64, ok bool) {
case ir.ODOT:
n := n.(*ir.SelectorExpr)
if name, offset, ok = stataddr(n.Left()); !ok {
if name, offset, ok = stataddr(n.X); !ok {
break
}
offset += n.Offset()
offset += n.Offset
return name, offset, true
case ir.OINDEX:
n := n.(*ir.IndexExpr)
if n.Left().Type().IsSlice() {
if n.X.Type().IsSlice() {
break
}
if name, offset, ok = stataddr(n.Left()); !ok {
if name, offset, ok = stataddr(n.X); !ok {
break
}
l := getlit(n.Right())
l := getlit(n.Index)
if l < 0 {
break
}
@ -1058,14 +1058,14 @@ func (s *InitSchedule) initplan(n ir.Node) {
case ir.OARRAYLIT, ir.OSLICELIT:
n := n.(*ir.CompLitExpr)
var k int64
for _, a := range n.List().Slice() {
for _, a := range n.List.Slice() {
if a.Op() == ir.OKEY {
kv := a.(*ir.KeyExpr)
k = indexconst(kv.Left())
k = indexconst(kv.Key)
if k < 0 {
base.Fatalf("initplan arraylit: invalid index %v", kv.Left())
base.Fatalf("initplan arraylit: invalid index %v", kv.Key)
}
a = kv.Right()
a = kv.Value
}
s.addvalue(p, k*n.Type().Elem().Width, a)
k++
@ -1073,25 +1073,25 @@ func (s *InitSchedule) initplan(n ir.Node) {
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
for _, a := range n.List().Slice() {
for _, a := range n.List.Slice() {
if a.Op() != ir.OSTRUCTKEY {
base.Fatalf("initplan structlit")
}
a := a.(*ir.StructKeyExpr)
if a.Sym().IsBlank() {
if a.Field.IsBlank() {
continue
}
s.addvalue(p, a.Offset(), a.Left())
s.addvalue(p, a.Offset, a.Value)
}
case ir.OMAPLIT:
n := n.(*ir.CompLitExpr)
for _, a := range n.List().Slice() {
for _, a := range n.List.Slice() {
if a.Op() != ir.OKEY {
base.Fatalf("initplan maplit")
}
a := a.(*ir.KeyExpr)
s.addvalue(p, -1, a.Right())
s.addvalue(p, -1, a.Value)
}
}
}
@ -1135,9 +1135,9 @@ func isZero(n ir.Node) bool {
case ir.OARRAYLIT:
n := n.(*ir.CompLitExpr)
for _, n1 := range n.List().Slice() {
for _, n1 := range n.List.Slice() {
if n1.Op() == ir.OKEY {
n1 = n1.(*ir.KeyExpr).Right()
n1 = n1.(*ir.KeyExpr).Value
}
if !isZero(n1) {
return false
@ -1147,9 +1147,9 @@ func isZero(n ir.Node) bool {
case ir.OSTRUCTLIT:
n := n.(*ir.CompLitExpr)
for _, n1 := range n.List().Slice() {
for _, n1 := range n.List.Slice() {
n1 := n1.(*ir.StructKeyExpr)
if !isZero(n1.Left()) {
if !isZero(n1.Value) {
return false
}
}
@ -1164,16 +1164,16 @@ func isvaluelit(n ir.Node) bool {
}
func genAsStatic(as *ir.AssignStmt) {
if as.Left().Type() == nil {
if as.X.Type() == nil {
base.Fatalf("genAsStatic as.Left not typechecked")
}
name, offset, ok := stataddr(as.Left())
if !ok || (name.Class() != ir.PEXTERN && as.Left() != ir.BlankNode) {
base.Fatalf("genAsStatic: lhs %v", as.Left())
name, offset, ok := stataddr(as.X)
if !ok || (name.Class_ != ir.PEXTERN && as.X != ir.BlankNode) {
base.Fatalf("genAsStatic: lhs %v", as.X)
}
switch r := as.Right(); r.Op() {
switch r := as.Y; r.Op() {
case ir.OLITERAL:
litsym(name, offset, r, int(r.Type().Width))
return
@ -1183,13 +1183,13 @@ func genAsStatic(as *ir.AssignStmt) {
return
case ir.ONAME:
r := r.(*ir.Name)
if r.Offset() != 0 {
if r.Offset_ != 0 {
base.Fatalf("genAsStatic %+v", as)
}
if r.Class() == ir.PFUNC {
if r.Class_ == ir.PFUNC {
pfuncsym(name, offset, r)
return
}
}
base.Fatalf("genAsStatic: rhs %v", as.Right())
base.Fatalf("genAsStatic: rhs %v", as.Y)
}

File diff suppressed because it is too large Load diff

View file

@ -616,7 +616,7 @@ func calcHasCall(n ir.Node) bool {
if instrumenting {
return true
}
return n.Left().HasCall() || n.Right().HasCall()
return n.X.HasCall() || n.Y.HasCall()
case ir.OINDEX, ir.OSLICE, ir.OSLICEARR, ir.OSLICE3, ir.OSLICE3ARR, ir.OSLICESTR,
ir.ODEREF, ir.ODOTPTR, ir.ODOTTYPE, ir.ODIV, ir.OMOD:
// These ops might panic, make sure they are done
@ -630,49 +630,49 @@ func calcHasCall(n ir.Node) bool {
if thearch.SoftFloat && (isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) {
return true
}
return n.Left().HasCall() || n.Right().HasCall()
return n.X.HasCall() || n.Y.HasCall()
case ir.ONEG:
n := n.(*ir.UnaryExpr)
if thearch.SoftFloat && (isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) {
return true
}
return n.Left().HasCall()
return n.X.HasCall()
case ir.OLT, ir.OEQ, ir.ONE, ir.OLE, ir.OGE, ir.OGT:
n := n.(*ir.BinaryExpr)
if thearch.SoftFloat && (isFloat[n.Left().Type().Kind()] || isComplex[n.Left().Type().Kind()]) {
if thearch.SoftFloat && (isFloat[n.X.Type().Kind()] || isComplex[n.X.Type().Kind()]) {
return true
}
return n.Left().HasCall() || n.Right().HasCall()
return n.X.HasCall() || n.Y.HasCall()
case ir.OCONV:
n := n.(*ir.ConvExpr)
if thearch.SoftFloat && ((isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) || (isFloat[n.Left().Type().Kind()] || isComplex[n.Left().Type().Kind()])) {
if thearch.SoftFloat && ((isFloat[n.Type().Kind()] || isComplex[n.Type().Kind()]) || (isFloat[n.X.Type().Kind()] || isComplex[n.X.Type().Kind()])) {
return true
}
return n.Left().HasCall()
return n.X.HasCall()
case ir.OAND, ir.OANDNOT, ir.OLSH, ir.OOR, ir.ORSH, ir.OXOR, ir.OCOPY, ir.OCOMPLEX, ir.OEFACE:
n := n.(*ir.BinaryExpr)
return n.Left().HasCall() || n.Right().HasCall()
return n.X.HasCall() || n.Y.HasCall()
case ir.OAS:
n := n.(*ir.AssignStmt)
return n.Left().HasCall() || n.Right() != nil && n.Right().HasCall()
return n.X.HasCall() || n.Y != nil && n.Y.HasCall()
case ir.OADDR:
n := n.(*ir.AddrExpr)
return n.Left().HasCall()
return n.X.HasCall()
case ir.OPAREN:
n := n.(*ir.ParenExpr)
return n.Left().HasCall()
return n.X.HasCall()
case ir.OBITNOT, ir.ONOT, ir.OPLUS, ir.ORECV,
ir.OALIGNOF, ir.OCAP, ir.OCLOSE, ir.OIMAG, ir.OLEN, ir.ONEW,
ir.OOFFSETOF, ir.OPANIC, ir.OREAL, ir.OSIZEOF,
ir.OCHECKNIL, ir.OCFUNC, ir.OIDATA, ir.OITAB, ir.ONEWOBJ, ir.OSPTR, ir.OVARDEF, ir.OVARKILL, ir.OVARLIVE:
n := n.(*ir.UnaryExpr)
return n.Left().HasCall()
return n.X.HasCall()
case ir.ODOT, ir.ODOTMETH, ir.ODOTINTER:
n := n.(*ir.SelectorExpr)
return n.Left().HasCall()
return n.X.HasCall()
case ir.OGETG, ir.OCLOSUREREAD, ir.OMETHEXPR:
return false
@ -687,15 +687,15 @@ func calcHasCall(n ir.Node) bool {
case ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.OBYTES2STRTMP, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2BYTESTMP, ir.OSTR2RUNES, ir.ORUNESTR:
// TODO(rsc): Some conversions are themselves calls, no?
n := n.(*ir.ConvExpr)
return n.Left().HasCall()
return n.X.HasCall()
case ir.ODOTTYPE2:
// TODO(rsc): Shouldn't this be up with ODOTTYPE above?
n := n.(*ir.TypeAssertExpr)
return n.Left().HasCall()
return n.X.HasCall()
case ir.OSLICEHEADER:
// TODO(rsc): What about len and cap?
n := n.(*ir.SliceHeaderExpr)
return n.Left().HasCall()
return n.Ptr.HasCall()
case ir.OAS2DOTTYPE, ir.OAS2FUNC:
// TODO(rsc): Surely we need to check List and Rlist.
return false
@ -783,44 +783,44 @@ func safeexpr(n ir.Node, init *ir.Nodes) ir.Node {
case ir.OLEN, ir.OCAP:
n := n.(*ir.UnaryExpr)
l := safeexpr(n.Left(), init)
if l == n.Left() {
l := safeexpr(n.X, init)
if l == n.X {
return n
}
a := ir.Copy(n).(*ir.UnaryExpr)
a.SetLeft(l)
a.X = l
return walkexpr(typecheck(a, ctxExpr), init)
case ir.ODOT, ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
l := safeexpr(n.Left(), init)
if l == n.Left() {
l := safeexpr(n.X, init)
if l == n.X {
return n
}
a := ir.Copy(n).(*ir.SelectorExpr)
a.SetLeft(l)
a.X = l
return walkexpr(typecheck(a, ctxExpr), init)
case ir.ODEREF:
n := n.(*ir.StarExpr)
l := safeexpr(n.Left(), init)
if l == n.Left() {
l := safeexpr(n.X, init)
if l == n.X {
return n
}
a := ir.Copy(n).(*ir.StarExpr)
a.SetLeft(l)
a.X = l
return walkexpr(typecheck(a, ctxExpr), init)
case ir.OINDEX, ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
l := safeexpr(n.Left(), init)
r := safeexpr(n.Right(), init)
if l == n.Left() && r == n.Right() {
l := safeexpr(n.X, init)
r := safeexpr(n.Index, init)
if l == n.X && r == n.Index {
return n
}
a := ir.Copy(n).(*ir.IndexExpr)
a.SetLeft(l)
a.SetRight(r)
a.X = l
a.Index = r
return walkexpr(typecheck(a, ctxExpr), init)
case ir.OSTRUCTLIT, ir.OARRAYLIT, ir.OSLICELIT:
@ -992,20 +992,20 @@ func dotpath(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) (
// will give shortest unique addressing.
// modify the tree with missing type names.
func adddot(n *ir.SelectorExpr) *ir.SelectorExpr {
n.SetLeft(typecheck(n.Left(), ctxType|ctxExpr))
if n.Left().Diag() {
n.X = typecheck(n.X, ctxType|ctxExpr)
if n.X.Diag() {
n.SetDiag(true)
}
t := n.Left().Type()
t := n.X.Type()
if t == nil {
return n
}
if n.Left().Op() == ir.OTYPE {
if n.X.Op() == ir.OTYPE {
return n
}
s := n.Sym()
s := n.Sel
if s == nil {
return n
}
@ -1014,14 +1014,14 @@ func adddot(n *ir.SelectorExpr) *ir.SelectorExpr {
case path != nil:
// rebuild elided dots
for c := len(path) - 1; c >= 0; c-- {
dot := ir.NewSelectorExpr(base.Pos, ir.ODOT, n.Left(), path[c].field.Sym)
dot := ir.NewSelectorExpr(base.Pos, ir.ODOT, n.X, path[c].field.Sym)
dot.SetImplicit(true)
dot.SetType(path[c].field.Type)
n.SetLeft(dot)
n.X = dot
}
case ambig:
base.Errorf("ambiguous selector %v", n)
n.SetLeft(nil)
n.X = nil
}
return n
@ -1228,10 +1228,10 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
if rcvr.IsPtr() && rcvr.Elem() == methodrcvr {
// generating wrapper from *T to T.
n := ir.NewIfStmt(base.Pos, nil, nil, nil)
n.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OEQ, nthis, nodnil()))
n.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, nthis, nodnil())
call := ir.NewCallExpr(base.Pos, ir.OCALL, syslook("panicwrap"), nil)
n.PtrBody().Set1(call)
fn.PtrBody().Append(n)
n.Body.Set1(call)
fn.Body.Append(n)
}
dot := adddot(ir.NewSelectorExpr(base.Pos, ir.OXDOT, nthis, method.Sym))
@ -1245,29 +1245,29 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
// value for that function.
if !instrumenting && rcvr.IsPtr() && methodrcvr.IsPtr() && method.Embedded != 0 && !isifacemethod(method.Type) && !(thearch.LinkArch.Name == "ppc64le" && base.Ctxt.Flag_dynlink) {
// generate tail call: adjust pointer receiver and jump to embedded method.
left := dot.Left() // skip final .M
left := dot.X // skip final .M
if !left.Type().IsPtr() {
left = nodAddr(left)
}
as := ir.NewAssignStmt(base.Pos, nthis, convnop(left, rcvr))
fn.PtrBody().Append(as)
fn.PtrBody().Append(ir.NewBranchStmt(base.Pos, ir.ORETJMP, methodSym(methodrcvr, method.Sym)))
fn.Body.Append(as)
fn.Body.Append(ir.NewBranchStmt(base.Pos, ir.ORETJMP, methodSym(methodrcvr, method.Sym)))
} else {
fn.SetWrapper(true) // ignore frame for panic+recover matching
call := ir.NewCallExpr(base.Pos, ir.OCALL, dot, nil)
call.PtrList().Set(paramNnames(tfn.Type()))
call.SetIsDDD(tfn.Type().IsVariadic())
call.Args.Set(paramNnames(tfn.Type()))
call.IsDDD = tfn.Type().IsVariadic()
if method.Type.NumResults() > 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
ret.PtrList().Set1(call)
fn.PtrBody().Append(ret)
ret.Results.Set1(call)
fn.Body.Append(ret)
} else {
fn.PtrBody().Append(call)
fn.Body.Append(call)
}
}
if false && base.Flag.LowerR != 0 {
ir.DumpList("genwrapper body", fn.Body())
ir.DumpList("genwrapper body", fn.Body)
}
funcbody()
@ -1277,7 +1277,7 @@ func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym) {
typecheckFunc(fn)
Curfn = fn
typecheckslice(fn.Body().Slice(), ctxStmt)
typecheckslice(fn.Body.Slice(), ctxStmt)
// Inline calls within (*T).M wrappers. This is safe because we only
// generate those wrappers within the same compilation unit as (T).M.
@ -1422,7 +1422,7 @@ func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool
func liststmt(l []ir.Node) ir.Node {
n := ir.NewBlockStmt(base.Pos, nil)
n.PtrList().Set(l)
n.List.Set(l)
if len(l) != 0 {
n.SetPos(l[0].Pos())
}
@ -1542,8 +1542,8 @@ func itabType(itab ir.Node) ir.Node {
typ := ir.NewSelectorExpr(base.Pos, ir.ODOTPTR, itab, nil)
typ.SetType(types.NewPtr(types.Types[types.TUINT8]))
typ.SetTypecheck(1)
typ.SetOffset(int64(Widthptr)) // offset of _type in runtime.itab
typ.SetBounded(true) // guaranteed not to fault
typ.Offset = int64(Widthptr) // offset of _type in runtime.itab
typ.SetBounded(true) // guaranteed not to fault
return typ
}

View file

@ -17,7 +17,7 @@ import (
// typecheckswitch typechecks a switch statement.
func typecheckswitch(n *ir.SwitchStmt) {
typecheckslice(n.Init().Slice(), ctxStmt)
if n.Left() != nil && n.Left().Op() == ir.OTYPESW {
if n.Tag != nil && n.Tag.Op() == ir.OTYPESW {
typecheckTypeSwitch(n)
} else {
typecheckExprSwitch(n)
@ -25,26 +25,26 @@ func typecheckswitch(n *ir.SwitchStmt) {
}
func typecheckTypeSwitch(n *ir.SwitchStmt) {
guard := n.Left().(*ir.TypeSwitchGuard)
guard.SetRight(typecheck(guard.Right(), ctxExpr))
t := guard.Right().Type()
guard := n.Tag.(*ir.TypeSwitchGuard)
guard.X = typecheck(guard.X, ctxExpr)
t := guard.X.Type()
if t != nil && !t.IsInterface() {
base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", guard.Right())
base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", guard.X)
t = nil
}
// We don't actually declare the type switch's guarded
// declaration itself. So if there are no cases, we won't
// notice that it went unused.
if v := guard.Left(); v != nil && !ir.IsBlank(v) && n.List().Len() == 0 {
if v := guard.Tag; v != nil && !ir.IsBlank(v) && n.Cases.Len() == 0 {
base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
}
var defCase, nilCase ir.Node
var ts typeSet
for _, ncase := range n.List().Slice() {
for _, ncase := range n.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
ls := ncase.List().Slice()
ls := ncase.List.Slice()
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
@ -77,13 +77,13 @@ func typecheckTypeSwitch(n *ir.SwitchStmt) {
if !n1.Type().IsInterface() && !implements(n1.Type(), t, &missing, &have, &ptr) && !missing.Broke() {
if have != nil && !have.Broke() {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", guard.Right(), n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", guard.X, n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else if ptr != 0 {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (%v method has pointer receiver)", guard.Right(), n1.Type(), missing.Sym)
" (%v method has pointer receiver)", guard.X, n1.Type(), missing.Sym)
} else {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (missing %v method)", guard.Right(), n1.Type(), missing.Sym)
" (missing %v method)", guard.X, n1.Type(), missing.Sym)
}
continue
}
@ -91,7 +91,7 @@ func typecheckTypeSwitch(n *ir.SwitchStmt) {
ts.add(ncase.Pos(), n1.Type())
}
if ncase.Rlist().Len() != 0 {
if ncase.Vars.Len() != 0 {
// Assign the clause variable's type.
vt := t
if len(ls) == 1 {
@ -104,7 +104,7 @@ func typecheckTypeSwitch(n *ir.SwitchStmt) {
}
}
nvar := ncase.Rlist().First()
nvar := ncase.Vars.First()
nvar.SetType(vt)
if vt != nil {
nvar = typecheck(nvar, ctxExpr|ctxAssign)
@ -113,10 +113,10 @@ func typecheckTypeSwitch(n *ir.SwitchStmt) {
nvar.SetTypecheck(1)
nvar.SetWalkdef(1)
}
ncase.Rlist().SetFirst(nvar)
ncase.Vars.SetFirst(nvar)
}
typecheckslice(ncase.Body().Slice(), ctxStmt)
typecheckslice(ncase.Body.Slice(), ctxStmt)
}
}
@ -150,10 +150,10 @@ func (s *typeSet) add(pos src.XPos, typ *types.Type) {
func typecheckExprSwitch(n *ir.SwitchStmt) {
t := types.Types[types.TBOOL]
if n.Left() != nil {
n.SetLeft(typecheck(n.Left(), ctxExpr))
n.SetLeft(defaultlit(n.Left(), nil))
t = n.Left().Type()
if n.Tag != nil {
n.Tag = typecheck(n.Tag, ctxExpr)
n.Tag = defaultlit(n.Tag, nil)
t = n.Tag.Type()
}
var nilonly string
@ -168,9 +168,9 @@ func typecheckExprSwitch(n *ir.SwitchStmt) {
case !IsComparable(t):
if t.IsStruct() {
base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Left(), IncomparableField(t).Type)
base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Tag, IncomparableField(t).Type)
} else {
base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Left())
base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Tag)
}
t = nil
}
@ -178,9 +178,9 @@ func typecheckExprSwitch(n *ir.SwitchStmt) {
var defCase ir.Node
var cs constSet
for _, ncase := range n.List().Slice() {
for _, ncase := range n.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
ls := ncase.List().Slice()
ls := ncase.List.Slice()
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
@ -199,15 +199,15 @@ func typecheckExprSwitch(n *ir.SwitchStmt) {
}
if nilonly != "" && !ir.IsNil(n1) {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left())
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Tag)
} else if t.IsInterface() && !n1.Type().IsInterface() && !IsComparable(n1.Type()) {
base.ErrorfAt(ncase.Pos(), "invalid case %L in switch (incomparable type)", n1)
} else {
op1, _ := assignop(n1.Type(), t)
op2, _ := assignop(t, n1.Type())
if op1 == ir.OXXX && op2 == ir.OXXX {
if n.Left() != nil {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left(), n1.Type(), t)
if n.Tag != nil {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Tag, n1.Type(), t)
} else {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type())
}
@ -225,18 +225,18 @@ func typecheckExprSwitch(n *ir.SwitchStmt) {
}
}
typecheckslice(ncase.Body().Slice(), ctxStmt)
typecheckslice(ncase.Body.Slice(), ctxStmt)
}
}
// walkswitch walks a switch statement.
func walkswitch(sw *ir.SwitchStmt) {
// Guard against double walk, see #25776.
if sw.List().Len() == 0 && sw.Body().Len() > 0 {
if sw.Cases.Len() == 0 && sw.Compiled.Len() > 0 {
return // Was fatal, but eliminating every possible source of double-walking is hard
}
if sw.Left() != nil && sw.Left().Op() == ir.OTYPESW {
if sw.Tag != nil && sw.Tag.Op() == ir.OTYPESW {
walkTypeSwitch(sw)
} else {
walkExprSwitch(sw)
@ -248,8 +248,8 @@ func walkswitch(sw *ir.SwitchStmt) {
func walkExprSwitch(sw *ir.SwitchStmt) {
lno := setlineno(sw)
cond := sw.Left()
sw.SetLeft(nil)
cond := sw.Tag
sw.Tag = nil
// convert switch {...} to switch true {...}
if cond == nil {
@ -272,7 +272,7 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
cond = walkexpr(cond, sw.PtrInit())
if cond.Op() != ir.OLITERAL && cond.Op() != ir.ONIL {
cond = copyexpr(cond, cond.Type(), sw.PtrBody())
cond = copyexpr(cond, cond.Type(), &sw.Compiled)
}
base.Pos = lno
@ -283,33 +283,33 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
var defaultGoto ir.Node
var body ir.Nodes
for _, ncase := range sw.List().Slice() {
for _, ncase := range sw.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
label := autolabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
// Process case dispatch.
if ncase.List().Len() == 0 {
if ncase.List.Len() == 0 {
if defaultGoto != nil {
base.Fatalf("duplicate default case not detected during typechecking")
}
defaultGoto = jmp
}
for _, n1 := range ncase.List().Slice() {
for _, n1 := range ncase.List.Slice() {
s.Add(ncase.Pos(), n1, jmp)
}
// Process body.
body.Append(ir.NewLabelStmt(ncase.Pos(), label))
body.Append(ncase.Body().Slice()...)
if fall, pos := endsInFallthrough(ncase.Body().Slice()); !fall {
body.Append(ncase.Body.Slice()...)
if fall, pos := endsInFallthrough(ncase.Body.Slice()); !fall {
br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
br.SetPos(pos)
body.Append(br)
}
}
sw.PtrList().Set(nil)
sw.Cases.Set(nil)
if defaultGoto == nil {
br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
@ -317,10 +317,10 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
defaultGoto = br
}
s.Emit(sw.PtrBody())
sw.PtrBody().Append(defaultGoto)
sw.PtrBody().AppendNodes(&body)
walkstmtlist(sw.Body().Slice())
s.Emit(&sw.Compiled)
sw.Compiled.Append(defaultGoto)
sw.Compiled.AppendNodes(&body)
walkstmtlist(sw.Compiled.Slice())
}
// An exprSwitch walks an expression switch.
@ -402,8 +402,8 @@ func (s *exprSwitch) flush() {
},
func(i int, nif *ir.IfStmt) {
run := runs[i]
nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OEQ, ir.NewUnaryExpr(base.Pos, ir.OLEN, s.exprname), nodintconst(runLen(run))))
s.search(run, nif.PtrBody())
nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, ir.NewUnaryExpr(base.Pos, ir.OLEN, s.exprname), nodintconst(runLen(run)))
s.search(run, &nif.Body)
},
)
return
@ -437,8 +437,8 @@ func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
},
func(i int, nif *ir.IfStmt) {
c := &cc[i]
nif.SetLeft(c.test(s.exprname))
nif.PtrBody().Set1(c.jmp)
nif.Cond = c.test(s.exprname)
nif.Body.Set1(c.jmp)
},
)
}
@ -471,9 +471,9 @@ func allCaseExprsAreSideEffectFree(sw *ir.SwitchStmt) bool {
// Restricting to constants is simple and probably powerful
// enough.
for _, ncase := range sw.List().Slice() {
for _, ncase := range sw.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
for _, v := range ncase.List().Slice() {
for _, v := range ncase.List.Slice() {
if v.Op() != ir.OLITERAL {
return false
}
@ -504,11 +504,11 @@ func endsInFallthrough(stmts []ir.Node) (bool, src.XPos) {
// type switch.
func walkTypeSwitch(sw *ir.SwitchStmt) {
var s typeSwitch
s.facename = sw.Left().(*ir.TypeSwitchGuard).Right()
sw.SetLeft(nil)
s.facename = sw.Tag.(*ir.TypeSwitchGuard).X
sw.Tag = nil
s.facename = walkexpr(s.facename, sw.PtrInit())
s.facename = copyexpr(s.facename, s.facename.Type(), sw.PtrBody())
s.facename = copyexpr(s.facename, s.facename.Type(), &sw.Compiled)
s.okname = temp(types.Types[types.TBOOL])
// Get interface descriptor word.
@ -523,55 +523,55 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
// h := e._type.hash
// Use a similar strategy for non-empty interfaces.
ifNil := ir.NewIfStmt(base.Pos, nil, nil, nil)
ifNil.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, nodnil()))
ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, nodnil())
base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
ifNil.SetLeft(typecheck(ifNil.Left(), ctxExpr))
ifNil.SetLeft(defaultlit(ifNil.Left(), nil))
ifNil.Cond = typecheck(ifNil.Cond, ctxExpr)
ifNil.Cond = defaultlit(ifNil.Cond, nil)
// ifNil.Nbody assigned at end.
sw.PtrBody().Append(ifNil)
sw.Compiled.Append(ifNil)
// Load hash from type or itab.
dotHash := ir.NewSelectorExpr(base.Pos, ir.ODOTPTR, itab, nil)
dotHash.SetType(types.Types[types.TUINT32])
dotHash.SetTypecheck(1)
if s.facename.Type().IsEmptyInterface() {
dotHash.SetOffset(int64(2 * Widthptr)) // offset of hash in runtime._type
dotHash.Offset = int64(2 * Widthptr) // offset of hash in runtime._type
} else {
dotHash.SetOffset(int64(2 * Widthptr)) // offset of hash in runtime.itab
dotHash.Offset = int64(2 * Widthptr) // offset of hash in runtime.itab
}
dotHash.SetBounded(true) // guaranteed not to fault
s.hashname = copyexpr(dotHash, dotHash.Type(), sw.PtrBody())
s.hashname = copyexpr(dotHash, dotHash.Type(), &sw.Compiled)
br := ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)
var defaultGoto, nilGoto ir.Node
var body ir.Nodes
for _, ncase := range sw.List().Slice() {
for _, ncase := range sw.Cases.Slice() {
ncase := ncase.(*ir.CaseStmt)
var caseVar ir.Node
if ncase.Rlist().Len() != 0 {
caseVar = ncase.Rlist().First()
if ncase.Vars.Len() != 0 {
caseVar = ncase.Vars.First()
}
// For single-type cases with an interface type,
// we initialize the case variable as part of the type assertion.
// In other cases, we initialize it in the body.
var singleType *types.Type
if ncase.List().Len() == 1 && ncase.List().First().Op() == ir.OTYPE {
singleType = ncase.List().First().Type()
if ncase.List.Len() == 1 && ncase.List.First().Op() == ir.OTYPE {
singleType = ncase.List.First().Type()
}
caseVarInitialized := false
label := autolabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
if ncase.List().Len() == 0 { // default:
if ncase.List.Len() == 0 { // default:
if defaultGoto != nil {
base.Fatalf("duplicate default case not detected during typechecking")
}
defaultGoto = jmp
}
for _, n1 := range ncase.List().Slice() {
for _, n1 := range ncase.List.Slice() {
if ir.IsNil(n1) { // case nil:
if nilGoto != nil {
base.Fatalf("duplicate nil case not detected during typechecking")
@ -605,10 +605,10 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
typecheckslice(l, ctxStmt)
body.Append(l...)
}
body.Append(ncase.Body().Slice()...)
body.Append(ncase.Body.Slice()...)
body.Append(br)
}
sw.PtrList().Set(nil)
sw.Cases.Set(nil)
if defaultGoto == nil {
defaultGoto = br
@ -616,13 +616,13 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
if nilGoto == nil {
nilGoto = defaultGoto
}
ifNil.PtrBody().Set1(nilGoto)
ifNil.Body.Set1(nilGoto)
s.Emit(sw.PtrBody())
sw.PtrBody().Append(defaultGoto)
sw.PtrBody().AppendNodes(&body)
s.Emit(&sw.Compiled)
sw.Compiled.Append(defaultGoto)
sw.Compiled.AppendNodes(&body)
walkstmtlist(sw.Body().Slice())
walkstmtlist(sw.Compiled.Slice())
}
// A typeSwitch walks a type switch.
@ -656,16 +656,16 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp ir.Node) {
// cv, ok = iface.(type)
as := ir.NewAssignListStmt(pos, ir.OAS2, nil, nil)
as.PtrList().Set2(caseVar, s.okname) // cv, ok =
as.Lhs.Set2(caseVar, s.okname) // cv, ok =
dot := ir.NewTypeAssertExpr(pos, s.facename, nil)
dot.SetType(typ) // iface.(type)
as.PtrRlist().Set1(dot)
as.Rhs.Set1(dot)
appendWalkStmt(&body, as)
// if ok { goto label }
nif := ir.NewIfStmt(pos, nil, nil, nil)
nif.SetLeft(s.okname)
nif.PtrBody().Set1(jmp)
nif.Cond = s.okname
nif.Body.Set1(jmp)
body.Append(nif)
if !typ.IsInterface() {
@ -714,8 +714,8 @@ func (s *typeSwitch) flush() {
// TODO(mdempsky): Omit hash equality check if
// there's only one type.
c := cc[i]
nif.SetLeft(ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashname, nodintconst(int64(c.hash))))
nif.PtrBody().AppendNodes(&c.body)
nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, s.hashname, nodintconst(int64(c.hash)))
nif.Body.AppendNodes(&c.body)
},
)
}
@ -740,22 +740,22 @@ func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i in
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
leaf(i, nif)
base.Pos = base.Pos.WithNotStmt()
nif.SetLeft(typecheck(nif.Left(), ctxExpr))
nif.SetLeft(defaultlit(nif.Left(), nil))
nif.Cond = typecheck(nif.Cond, ctxExpr)
nif.Cond = defaultlit(nif.Cond, nil)
out.Append(nif)
out = nif.PtrRlist()
out = &nif.Else
}
return
}
half := lo + n/2
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
nif.SetLeft(less(half))
nif.Cond = less(half)
base.Pos = base.Pos.WithNotStmt()
nif.SetLeft(typecheck(nif.Left(), ctxExpr))
nif.SetLeft(defaultlit(nif.Left(), nil))
do(lo, half, nif.PtrBody())
do(half, hi, nif.PtrRlist())
nif.Cond = typecheck(nif.Cond, ctxExpr)
nif.Cond = defaultlit(nif.Cond, nil)
do(lo, half, &nif.Body)
do(half, hi, &nif.Else)
out.Append(nif)
}

File diff suppressed because it is too large Load diff

View file

@ -152,14 +152,14 @@ func initUniverse() {
for _, s := range &builtinFuncs {
s2 := types.BuiltinPkg.Lookup(s.name)
def := NewName(s2)
def.SetSubOp(s.op)
def.BuiltinOp = s.op
s2.Def = def
}
for _, s := range &unsafeFuncs {
s2 := unsafepkg.Lookup(s.name)
def := NewName(s2)
def.SetSubOp(s.op)
def.BuiltinOp = s.op
s2.Def = def
}
@ -342,6 +342,6 @@ func finishUniverse() {
nodfp = NewName(lookup(".fp"))
nodfp.SetType(types.Types[types.TINT32])
nodfp.SetClass(ir.PPARAM)
nodfp.Class_ = ir.PPARAM
nodfp.SetUsed(true)
}

View file

@ -14,9 +14,9 @@ func evalunsafe(n ir.Node) int64 {
switch n.Op() {
case ir.OALIGNOF, ir.OSIZEOF:
n := n.(*ir.UnaryExpr)
n.SetLeft(typecheck(n.Left(), ctxExpr))
n.SetLeft(defaultlit(n.Left(), nil))
tr := n.Left().Type()
n.X = typecheck(n.X, ctxExpr)
n.X = defaultlit(n.X, nil)
tr := n.X.Type()
if tr == nil {
return 0
}
@ -29,20 +29,20 @@ func evalunsafe(n ir.Node) int64 {
case ir.OOFFSETOF:
// must be a selector.
n := n.(*ir.UnaryExpr)
if n.Left().Op() != ir.OXDOT {
if n.X.Op() != ir.OXDOT {
base.Errorf("invalid expression %v", n)
return 0
}
sel := n.Left().(*ir.SelectorExpr)
sel := n.X.(*ir.SelectorExpr)
// Remember base of selector to find it back after dot insertion.
// Since r->left may be mutated by typechecking, check it explicitly
// first to track it correctly.
sel.SetLeft(typecheck(sel.Left(), ctxExpr))
sbase := sel.Left()
sel.X = typecheck(sel.X, ctxExpr)
sbase := sel.X
tsel := typecheck(sel, ctxExpr)
n.SetLeft(tsel)
n.X = tsel
if tsel.Type() == nil {
return 0
}
@ -67,15 +67,15 @@ func evalunsafe(n ir.Node) int64 {
// but accessing f must not otherwise involve
// indirection via embedded pointer types.
r := r.(*ir.SelectorExpr)
if r.Left() != sbase {
base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.Left())
if r.X != sbase {
base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.X)
return 0
}
fallthrough
case ir.ODOT:
r := r.(*ir.SelectorExpr)
v += r.Offset()
next = r.Left()
v += r.Offset
next = r.X
default:
ir.Dump("unsafenmagic", tsel)
base.Fatalf("impossible %v node after dot insertion", r.Op())

File diff suppressed because it is too large Load diff

View file

@ -89,7 +89,7 @@ func toNtype(x Node) Ntype {
// An AddStringExpr is a string concatenation Expr[0] + Exprs[1] + ... + Expr[len(Expr)-1].
type AddStringExpr struct {
miniExpr
List_ Nodes
List Nodes
Prealloc *Name
}
@ -97,14 +97,10 @@ func NewAddStringExpr(pos src.XPos, list []Node) *AddStringExpr {
n := &AddStringExpr{}
n.pos = pos
n.op = OADDSTR
n.List_.Set(list)
n.List.Set(list)
return n
}
func (n *AddStringExpr) List() Nodes { return n.List_ }
func (n *AddStringExpr) PtrList() *Nodes { return &n.List_ }
func (n *AddStringExpr) SetList(x Nodes) { n.List_ = x }
// An AddrExpr is an address-of expression &X.
// It may end up being a normal address-of or an allocation of a composite literal.
type AddrExpr struct {
@ -120,10 +116,6 @@ func NewAddrExpr(pos src.XPos, x Node) *AddrExpr {
return n
}
func (n *AddrExpr) Left() Node { return n.X }
func (n *AddrExpr) SetLeft(x Node) { n.X = x }
func (n *AddrExpr) Right() Node { return n.Alloc }
func (n *AddrExpr) SetRight(x Node) { n.Alloc = x }
func (n *AddrExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *AddrExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
@ -170,11 +162,6 @@ func NewBinaryExpr(pos src.XPos, op Op, x, y Node) *BinaryExpr {
return n
}
func (n *BinaryExpr) Left() Node { return n.X }
func (n *BinaryExpr) SetLeft(x Node) { n.X = x }
func (n *BinaryExpr) Right() Node { return n.Y }
func (n *BinaryExpr) SetRight(y Node) { n.Y = y }
func (n *BinaryExpr) SetOp(op Op) {
switch op {
default:
@ -201,14 +188,14 @@ const (
// A CallExpr is a function call X(Args).
type CallExpr struct {
miniExpr
orig Node
X Node
Args Nodes
Rargs Nodes // TODO(rsc): Delete.
Body_ Nodes // TODO(rsc): Delete.
DDD bool
Use CallUse
NoInline_ bool
orig Node
X Node
Args Nodes
Rargs Nodes // TODO(rsc): Delete.
Body Nodes // TODO(rsc): Delete.
IsDDD bool
Use CallUse
NoInline bool
}
func NewCallExpr(pos src.XPos, op Op, fun Node, args []Node) *CallExpr {
@ -222,23 +209,8 @@ func NewCallExpr(pos src.XPos, op Op, fun Node, args []Node) *CallExpr {
func (*CallExpr) isStmt() {}
func (n *CallExpr) Orig() Node { return n.orig }
func (n *CallExpr) SetOrig(x Node) { n.orig = x }
func (n *CallExpr) Left() Node { return n.X }
func (n *CallExpr) SetLeft(x Node) { n.X = x }
func (n *CallExpr) List() Nodes { return n.Args }
func (n *CallExpr) PtrList() *Nodes { return &n.Args }
func (n *CallExpr) SetList(x Nodes) { n.Args = x }
func (n *CallExpr) Rlist() Nodes { return n.Rargs }
func (n *CallExpr) PtrRlist() *Nodes { return &n.Rargs }
func (n *CallExpr) SetRlist(x Nodes) { n.Rargs = x }
func (n *CallExpr) IsDDD() bool { return n.DDD }
func (n *CallExpr) SetIsDDD(x bool) { n.DDD = x }
func (n *CallExpr) NoInline() bool { return n.NoInline_ }
func (n *CallExpr) SetNoInline(x bool) { n.NoInline_ = x }
func (n *CallExpr) Body() Nodes { return n.Body_ }
func (n *CallExpr) PtrBody() *Nodes { return &n.Body_ }
func (n *CallExpr) SetBody(x Nodes) { n.Body_ = x }
func (n *CallExpr) Orig() Node { return n.orig }
func (n *CallExpr) SetOrig(x Node) { n.orig = x }
func (n *CallExpr) SetOp(op Op) {
switch op {
@ -253,65 +225,57 @@ func (n *CallExpr) SetOp(op Op) {
// A CallPartExpr is a method expression X.Method (uncalled).
type CallPartExpr struct {
miniExpr
Func_ *Func
Func *Func
X Node
Method *types.Field
Prealloc *Name
}
func NewCallPartExpr(pos src.XPos, x Node, method *types.Field, fn *Func) *CallPartExpr {
n := &CallPartExpr{Func_: fn, X: x, Method: method}
n := &CallPartExpr{Func: fn, X: x, Method: method}
n.op = OCALLPART
n.pos = pos
n.typ = fn.Type()
n.Func_ = fn
n.Func = fn
return n
}
func (n *CallPartExpr) Func() *Func { return n.Func_ }
func (n *CallPartExpr) Left() Node { return n.X }
func (n *CallPartExpr) Sym() *types.Sym { return n.Method.Sym }
func (n *CallPartExpr) SetLeft(x Node) { n.X = x }
// A ClosureExpr is a function literal expression.
type ClosureExpr struct {
miniExpr
Func_ *Func
Func *Func
Prealloc *Name
}
func NewClosureExpr(pos src.XPos, fn *Func) *ClosureExpr {
n := &ClosureExpr{Func_: fn}
n := &ClosureExpr{Func: fn}
n.op = OCLOSURE
n.pos = pos
return n
}
func (n *ClosureExpr) Func() *Func { return n.Func_ }
// A ClosureRead denotes reading a variable stored within a closure struct.
type ClosureReadExpr struct {
miniExpr
Offset_ int64
Offset int64
}
func NewClosureRead(typ *types.Type, offset int64) *ClosureReadExpr {
n := &ClosureReadExpr{Offset_: offset}
n := &ClosureReadExpr{Offset: offset}
n.typ = typ
n.op = OCLOSUREREAD
return n
}
func (n *ClosureReadExpr) Type() *types.Type { return n.typ }
func (n *ClosureReadExpr) Offset() int64 { return n.Offset_ }
// A CompLitExpr is a composite literal Type{Vals}.
// Before type-checking, the type is Ntype.
type CompLitExpr struct {
miniExpr
orig Node
Ntype Ntype
List_ Nodes // initialized values
List Nodes // initialized values
Prealloc *Name
Len int64 // backing array length for OSLICELIT
}
@ -320,18 +284,13 @@ func NewCompLitExpr(pos src.XPos, op Op, typ Ntype, list []Node) *CompLitExpr {
n := &CompLitExpr{Ntype: typ}
n.pos = pos
n.SetOp(op)
n.List_.Set(list)
n.List.Set(list)
n.orig = n
return n
}
func (n *CompLitExpr) Orig() Node { return n.orig }
func (n *CompLitExpr) SetOrig(x Node) { n.orig = x }
func (n *CompLitExpr) Right() Node { return n.Ntype }
func (n *CompLitExpr) SetRight(x Node) { n.Ntype = toNtype(x) }
func (n *CompLitExpr) List() Nodes { return n.List_ }
func (n *CompLitExpr) PtrList() *Nodes { return &n.List_ }
func (n *CompLitExpr) SetList(x Nodes) { n.List_ = x }
func (n *CompLitExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *CompLitExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
@ -380,8 +339,6 @@ func NewConvExpr(pos src.XPos, op Op, typ *types.Type, x Node) *ConvExpr {
return n
}
func (n *ConvExpr) Left() Node { return n.X }
func (n *ConvExpr) SetLeft(x Node) { n.X = x }
func (n *ConvExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *ConvExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
@ -409,13 +366,6 @@ func NewIndexExpr(pos src.XPos, x, index Node) *IndexExpr {
return n
}
func (n *IndexExpr) Left() Node { return n.X }
func (n *IndexExpr) SetLeft(x Node) { n.X = x }
func (n *IndexExpr) Right() Node { return n.Index }
func (n *IndexExpr) SetRight(y Node) { n.Index = y }
func (n *IndexExpr) IndexMapLValue() bool { return n.Assigned }
func (n *IndexExpr) SetIndexMapLValue(x bool) { n.Assigned = x }
func (n *IndexExpr) SetOp(op Op) {
switch op {
default:
@ -439,38 +389,28 @@ func NewKeyExpr(pos src.XPos, key, value Node) *KeyExpr {
return n
}
func (n *KeyExpr) Left() Node { return n.Key }
func (n *KeyExpr) SetLeft(x Node) { n.Key = x }
func (n *KeyExpr) Right() Node { return n.Value }
func (n *KeyExpr) SetRight(y Node) { n.Value = y }
// A StructKeyExpr is an Field: Value composite literal key.
type StructKeyExpr struct {
miniExpr
Field *types.Sym
Value Node
Offset_ int64
Field *types.Sym
Value Node
Offset int64
}
func NewStructKeyExpr(pos src.XPos, field *types.Sym, value Node) *StructKeyExpr {
n := &StructKeyExpr{Field: field, Value: value}
n.pos = pos
n.op = OSTRUCTKEY
n.Offset_ = types.BADWIDTH
n.Offset = types.BADWIDTH
return n
}
func (n *StructKeyExpr) Sym() *types.Sym { return n.Field }
func (n *StructKeyExpr) SetSym(x *types.Sym) { n.Field = x }
func (n *StructKeyExpr) Left() Node { return n.Value }
func (n *StructKeyExpr) SetLeft(x Node) { n.Value = x }
func (n *StructKeyExpr) Offset() int64 { return n.Offset_ }
func (n *StructKeyExpr) SetOffset(x int64) { n.Offset_ = x }
func (n *StructKeyExpr) Sym() *types.Sym { return n.Field }
// An InlinedCallExpr is an inlined function call.
type InlinedCallExpr struct {
miniExpr
Body_ Nodes
Body Nodes
ReturnVars Nodes
}
@ -478,18 +418,11 @@ func NewInlinedCallExpr(pos src.XPos, body, retvars []Node) *InlinedCallExpr {
n := &InlinedCallExpr{}
n.pos = pos
n.op = OINLCALL
n.Body_.Set(body)
n.Body.Set(body)
n.ReturnVars.Set(retvars)
return n
}
func (n *InlinedCallExpr) Body() Nodes { return n.Body_ }
func (n *InlinedCallExpr) PtrBody() *Nodes { return &n.Body_ }
func (n *InlinedCallExpr) SetBody(x Nodes) { n.Body_ = x }
func (n *InlinedCallExpr) Rlist() Nodes { return n.ReturnVars }
func (n *InlinedCallExpr) PtrRlist() *Nodes { return &n.ReturnVars }
func (n *InlinedCallExpr) SetRlist(x Nodes) { n.ReturnVars = x }
// A LogicalExpr is a expression X Op Y where Op is && or ||.
// It is separate from BinaryExpr to make room for statements
// that must be executed before Y but after X.
@ -506,11 +439,6 @@ func NewLogicalExpr(pos src.XPos, op Op, x, y Node) *LogicalExpr {
return n
}
func (n *LogicalExpr) Left() Node { return n.X }
func (n *LogicalExpr) SetLeft(x Node) { n.X = x }
func (n *LogicalExpr) Right() Node { return n.Y }
func (n *LogicalExpr) SetRight(y Node) { n.Y = y }
func (n *LogicalExpr) SetOp(op Op) {
switch op {
default:
@ -536,11 +464,6 @@ func NewMakeExpr(pos src.XPos, op Op, len, cap Node) *MakeExpr {
return n
}
func (n *MakeExpr) Left() Node { return n.Len }
func (n *MakeExpr) SetLeft(x Node) { n.Len = x }
func (n *MakeExpr) Right() Node { return n.Cap }
func (n *MakeExpr) SetRight(x Node) { n.Cap = x }
func (n *MakeExpr) SetOp(op Op) {
switch op {
default:
@ -565,16 +488,8 @@ func NewMethodExpr(pos src.XPos, t *types.Type, method *types.Field) *MethodExpr
return n
}
func (n *MethodExpr) FuncName() *Name { return n.FuncName_ }
func (n *MethodExpr) Left() Node { panic("MethodExpr.Left") }
func (n *MethodExpr) SetLeft(x Node) { panic("MethodExpr.SetLeft") }
func (n *MethodExpr) Right() Node { panic("MethodExpr.Right") }
func (n *MethodExpr) SetRight(x Node) { panic("MethodExpr.SetRight") }
func (n *MethodExpr) Sym() *types.Sym { panic("MethodExpr.Sym") }
func (n *MethodExpr) Offset() int64 { panic("MethodExpr.Offset") }
func (n *MethodExpr) SetOffset(x int64) { panic("MethodExpr.SetOffset") }
func (n *MethodExpr) Class() Class { panic("MethodExpr.Class") }
func (n *MethodExpr) SetClass(x Class) { panic("MethodExpr.SetClass") }
func (n *MethodExpr) FuncName() *Name { return n.FuncName_ }
func (n *MethodExpr) Sym() *types.Sym { panic("MethodExpr.Sym") }
// A NilExpr represents the predefined untyped constant nil.
// (It may be copied and assigned a type, though.)
@ -607,8 +522,6 @@ func NewParenExpr(pos src.XPos, x Node) *ParenExpr {
return n
}
func (n *ParenExpr) Left() Node { return n.X }
func (n *ParenExpr) SetLeft(x Node) { n.X = x }
func (n *ParenExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *ParenExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
@ -625,20 +538,17 @@ func (n *ParenExpr) SetOTYPE(t *types.Type) {
// A ResultExpr represents a direct access to a result slot on the stack frame.
type ResultExpr struct {
miniExpr
Offset_ int64
Offset int64
}
func NewResultExpr(pos src.XPos, typ *types.Type, offset int64) *ResultExpr {
n := &ResultExpr{Offset_: offset}
n := &ResultExpr{Offset: offset}
n.pos = pos
n.op = ORESULT
n.typ = typ
return n
}
func (n *ResultExpr) Offset() int64 { return n.Offset_ }
func (n *ResultExpr) SetOffset(x int64) { n.Offset_ = x }
// A NameOffsetExpr refers to an offset within a variable.
// It is like a SelectorExpr but without the field name.
type NameOffsetExpr struct {
@ -659,14 +569,14 @@ type SelectorExpr struct {
miniExpr
X Node
Sel *types.Sym
Offset_ int64
Offset int64
Selection *types.Field
}
func NewSelectorExpr(pos src.XPos, op Op, x Node, sel *types.Sym) *SelectorExpr {
n := &SelectorExpr{X: x, Sel: sel}
n.pos = pos
n.Offset_ = types.BADWIDTH
n.Offset = types.BADWIDTH
n.SetOp(op)
return n
}
@ -680,14 +590,9 @@ func (n *SelectorExpr) SetOp(op Op) {
}
}
func (n *SelectorExpr) Left() Node { return n.X }
func (n *SelectorExpr) SetLeft(x Node) { n.X = x }
func (n *SelectorExpr) Sym() *types.Sym { return n.Sel }
func (n *SelectorExpr) SetSym(x *types.Sym) { n.Sel = x }
func (n *SelectorExpr) Offset() int64 { return n.Offset_ }
func (n *SelectorExpr) SetOffset(x int64) { n.Offset_ = x }
func (n *SelectorExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *SelectorExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *SelectorExpr) Sym() *types.Sym { return n.Sel }
func (n *SelectorExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *SelectorExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
// Before type-checking, bytes.Buffer is a SelectorExpr.
// After type-checking it becomes a Name.
@ -696,8 +601,8 @@ func (*SelectorExpr) CanBeNtype() {}
// A SliceExpr is a slice expression X[Low:High] or X[Low:High:Max].
type SliceExpr struct {
miniExpr
X Node
List_ Nodes // TODO(rsc): Use separate Nodes
X Node
List Nodes // TODO(rsc): Use separate Nodes
}
func NewSliceExpr(pos src.XPos, op Op, x Node) *SliceExpr {
@ -707,12 +612,6 @@ func NewSliceExpr(pos src.XPos, op Op, x Node) *SliceExpr {
return n
}
func (n *SliceExpr) Left() Node { return n.X }
func (n *SliceExpr) SetLeft(x Node) { n.X = x }
func (n *SliceExpr) List() Nodes { return n.List_ }
func (n *SliceExpr) PtrList() *Nodes { return &n.List_ }
func (n *SliceExpr) SetList(x Nodes) { n.List_ = x }
func (n *SliceExpr) SetOp(op Op) {
switch op {
default:
@ -725,16 +624,16 @@ func (n *SliceExpr) SetOp(op Op) {
// SliceBounds returns n's slice bounds: low, high, and max in expr[low:high:max].
// n must be a slice expression. max is nil if n is a simple slice expression.
func (n *SliceExpr) SliceBounds() (low, high, max Node) {
if n.List_.Len() == 0 {
if n.List.Len() == 0 {
return nil, nil, nil
}
switch n.Op() {
case OSLICE, OSLICEARR, OSLICESTR:
s := n.List_.Slice()
s := n.List.Slice()
return s[0], s[1], nil
case OSLICE3, OSLICE3ARR:
s := n.List_.Slice()
s := n.List.Slice()
return s[0], s[1], s[2]
}
base.Fatalf("SliceBounds op %v: %v", n.Op(), n)
@ -749,24 +648,24 @@ func (n *SliceExpr) SetSliceBounds(low, high, max Node) {
if max != nil {
base.Fatalf("SetSliceBounds %v given three bounds", n.Op())
}
s := n.List_.Slice()
s := n.List.Slice()
if s == nil {
if low == nil && high == nil {
return
}
n.List_.Set2(low, high)
n.List.Set2(low, high)
return
}
s[0] = low
s[1] = high
return
case OSLICE3, OSLICE3ARR:
s := n.List_.Slice()
s := n.List.Slice()
if s == nil {
if low == nil && high == nil && max == nil {
return
}
n.List_.Set3(low, high, max)
n.List.Set3(low, high, max)
return
}
s[0] = low
@ -793,8 +692,8 @@ func (o Op) IsSlice3() bool {
// A SliceHeader expression constructs a slice header from its parts.
type SliceHeaderExpr struct {
miniExpr
Ptr Node
LenCap_ Nodes // TODO(rsc): Split into two Node fields
Ptr Node
LenCap Nodes // TODO(rsc): Split into two Node fields
}
func NewSliceHeaderExpr(pos src.XPos, typ *types.Type, ptr, len, cap Node) *SliceHeaderExpr {
@ -802,16 +701,10 @@ func NewSliceHeaderExpr(pos src.XPos, typ *types.Type, ptr, len, cap Node) *Slic
n.pos = pos
n.op = OSLICEHEADER
n.typ = typ
n.LenCap_.Set2(len, cap)
n.LenCap.Set2(len, cap)
return n
}
func (n *SliceHeaderExpr) Left() Node { return n.Ptr }
func (n *SliceHeaderExpr) SetLeft(x Node) { n.Ptr = x }
func (n *SliceHeaderExpr) List() Nodes { return n.LenCap_ }
func (n *SliceHeaderExpr) PtrList() *Nodes { return &n.LenCap_ }
func (n *SliceHeaderExpr) SetList(x Nodes) { n.LenCap_ = x }
// A StarExpr is a dereference expression *X.
// It may end up being a value or a type.
type StarExpr struct {
@ -826,8 +719,6 @@ func NewStarExpr(pos src.XPos, x Node) *StarExpr {
return n
}
func (n *StarExpr) Left() Node { return n.X }
func (n *StarExpr) SetLeft(x Node) { n.X = x }
func (n *StarExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *StarExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
@ -858,14 +749,6 @@ func NewTypeAssertExpr(pos src.XPos, x Node, typ Ntype) *TypeAssertExpr {
return n
}
func (n *TypeAssertExpr) Left() Node { return n.X }
func (n *TypeAssertExpr) SetLeft(x Node) { n.X = x }
func (n *TypeAssertExpr) Right() Node { return n.Ntype }
func (n *TypeAssertExpr) SetRight(x Node) { n.Ntype = x } // TODO: toNtype(x)
func (n *TypeAssertExpr) List() Nodes { return n.Itab }
func (n *TypeAssertExpr) PtrList() *Nodes { return &n.Itab }
func (n *TypeAssertExpr) SetList(x Nodes) { n.Itab = x }
func (n *TypeAssertExpr) SetOp(op Op) {
switch op {
default:
@ -889,9 +772,6 @@ func NewUnaryExpr(pos src.XPos, op Op, x Node) *UnaryExpr {
return n
}
func (n *UnaryExpr) Left() Node { return n.X }
func (n *UnaryExpr) SetLeft(x Node) { n.X = x }
func (n *UnaryExpr) SetOp(op Op) {
switch op {
default:

View file

@ -332,75 +332,75 @@ func stmtFmt(n Node, s fmt.State) {
switch n.Op() {
case ODCL:
n := n.(*Decl)
fmt.Fprintf(s, "var %v %v", n.Left().Sym(), n.Left().Type())
fmt.Fprintf(s, "var %v %v", n.X.Sym(), n.X.Type())
// Don't export "v = <N>" initializing statements, hope they're always
// preceded by the DCL which will be re-parsed and typechecked to reproduce
// the "v = <N>" again.
case OAS:
n := n.(*AssignStmt)
if n.Colas() && !complexinit {
fmt.Fprintf(s, "%v := %v", n.Left(), n.Right())
if n.Def && !complexinit {
fmt.Fprintf(s, "%v := %v", n.X, n.Y)
} else {
fmt.Fprintf(s, "%v = %v", n.Left(), n.Right())
fmt.Fprintf(s, "%v = %v", n.X, n.Y)
}
case OASOP:
n := n.(*AssignOpStmt)
if n.Implicit() {
if n.SubOp() == OADD {
fmt.Fprintf(s, "%v++", n.Left())
if n.IncDec {
if n.AsOp == OADD {
fmt.Fprintf(s, "%v++", n.X)
} else {
fmt.Fprintf(s, "%v--", n.Left())
fmt.Fprintf(s, "%v--", n.X)
}
break
}
fmt.Fprintf(s, "%v %v= %v", n.Left(), n.SubOp(), n.Right())
fmt.Fprintf(s, "%v %v= %v", n.X, n.AsOp, n.Y)
case OAS2, OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
n := n.(*AssignListStmt)
if n.Colas() && !complexinit {
fmt.Fprintf(s, "%.v := %.v", n.List(), n.Rlist())
if n.Def && !complexinit {
fmt.Fprintf(s, "%.v := %.v", n.Lhs, n.Rhs)
} else {
fmt.Fprintf(s, "%.v = %.v", n.List(), n.Rlist())
fmt.Fprintf(s, "%.v = %.v", n.Lhs, n.Rhs)
}
case OBLOCK:
n := n.(*BlockStmt)
if n.List().Len() != 0 {
fmt.Fprintf(s, "%v", n.List())
if n.List.Len() != 0 {
fmt.Fprintf(s, "%v", n.List)
}
case ORETURN:
n := n.(*ReturnStmt)
fmt.Fprintf(s, "return %.v", n.List())
fmt.Fprintf(s, "return %.v", n.Results)
case ORETJMP:
n := n.(*BranchStmt)
fmt.Fprintf(s, "retjmp %v", n.Sym())
fmt.Fprintf(s, "retjmp %v", n.Label)
case OINLMARK:
n := n.(*InlineMarkStmt)
fmt.Fprintf(s, "inlmark %d", n.Offset())
fmt.Fprintf(s, "inlmark %d", n.Index)
case OGO:
n := n.(*GoDeferStmt)
fmt.Fprintf(s, "go %v", n.Left())
fmt.Fprintf(s, "go %v", n.Call)
case ODEFER:
n := n.(*GoDeferStmt)
fmt.Fprintf(s, "defer %v", n.Left())
fmt.Fprintf(s, "defer %v", n.Call)
case OIF:
n := n.(*IfStmt)
if simpleinit {
fmt.Fprintf(s, "if %v; %v { %v }", n.Init().First(), n.Left(), n.Body())
fmt.Fprintf(s, "if %v; %v { %v }", n.Init().First(), n.Cond, n.Body)
} else {
fmt.Fprintf(s, "if %v { %v }", n.Left(), n.Body())
fmt.Fprintf(s, "if %v { %v }", n.Cond, n.Body)
}
if n.Rlist().Len() != 0 {
fmt.Fprintf(s, " else { %v }", n.Rlist())
if n.Else.Len() != 0 {
fmt.Fprintf(s, " else { %v }", n.Else)
}
case OFOR, OFORUNTIL:
@ -417,25 +417,25 @@ func stmtFmt(n Node, s fmt.State) {
fmt.Fprint(s, opname)
if simpleinit {
fmt.Fprintf(s, " %v;", n.Init().First())
} else if n.Right() != nil {
} else if n.Post != nil {
fmt.Fprint(s, " ;")
}
if n.Left() != nil {
fmt.Fprintf(s, " %v", n.Left())
if n.Cond != nil {
fmt.Fprintf(s, " %v", n.Cond)
}
if n.Right() != nil {
fmt.Fprintf(s, "; %v", n.Right())
if n.Post != nil {
fmt.Fprintf(s, "; %v", n.Post)
} else if simpleinit {
fmt.Fprint(s, ";")
}
if n.Op() == OFORUNTIL && n.List().Len() != 0 {
fmt.Fprintf(s, "; %v", n.List())
if n.Op() == OFORUNTIL && n.Late.Len() != 0 {
fmt.Fprintf(s, "; %v", n.Late)
}
fmt.Fprintf(s, " { %v }", n.Body())
fmt.Fprintf(s, " { %v }", n.Body)
case ORANGE:
n := n.(*RangeStmt)
@ -444,12 +444,12 @@ func stmtFmt(n Node, s fmt.State) {
break
}
if n.List().Len() == 0 {
fmt.Fprintf(s, "for range %v { %v }", n.Right(), n.Body())
if n.Vars.Len() == 0 {
fmt.Fprintf(s, "for range %v { %v }", n.X, n.Body)
break
}
fmt.Fprintf(s, "for %.v = range %v { %v }", n.List(), n.Right(), n.Body())
fmt.Fprintf(s, "for %.v = range %v { %v }", n.Vars, n.X, n.Body)
case OSELECT:
n := n.(*SelectStmt)
@ -457,7 +457,7 @@ func stmtFmt(n Node, s fmt.State) {
fmt.Fprintf(s, "%v statement", n.Op())
break
}
fmt.Fprintf(s, "select { %v }", n.List())
fmt.Fprintf(s, "select { %v }", n.Cases)
case OSWITCH:
n := n.(*SwitchStmt)
@ -469,31 +469,31 @@ func stmtFmt(n Node, s fmt.State) {
if simpleinit {
fmt.Fprintf(s, " %v;", n.Init().First())
}
if n.Left() != nil {
fmt.Fprintf(s, " %v ", n.Left())
if n.Tag != nil {
fmt.Fprintf(s, " %v ", n.Tag)
}
fmt.Fprintf(s, " { %v }", n.List())
fmt.Fprintf(s, " { %v }", n.Cases)
case OCASE:
n := n.(*CaseStmt)
if n.List().Len() != 0 {
fmt.Fprintf(s, "case %.v", n.List())
if n.List.Len() != 0 {
fmt.Fprintf(s, "case %.v", n.List)
} else {
fmt.Fprint(s, "default")
}
fmt.Fprintf(s, ": %v", n.Body())
fmt.Fprintf(s, ": %v", n.Body)
case OBREAK, OCONTINUE, OGOTO, OFALL:
n := n.(*BranchStmt)
if n.Sym() != nil {
fmt.Fprintf(s, "%v %v", n.Op(), n.Sym())
if n.Label != nil {
fmt.Fprintf(s, "%v %v", n.Op(), n.Label)
} else {
fmt.Fprintf(s, "%v", n.Op())
}
case OLABEL:
n := n.(*LabelStmt)
fmt.Fprintf(s, "%v: ", n.Sym())
fmt.Fprintf(s, "%v: ", n.Label)
}
if extrablock {
@ -527,19 +527,19 @@ func exprFmt(n Node, s fmt.State, prec int) {
case OADDR:
nn := nn.(*AddrExpr)
if nn.Implicit() {
n = nn.Left()
n = nn.X
continue
}
case ODEREF:
nn := nn.(*StarExpr)
if nn.Implicit() {
n = nn.Left()
n = nn.X
continue
}
case OCONV, OCONVNOP, OCONVIFACE:
nn := nn.(*ConvExpr)
if nn.Implicit() {
n = nn.Left()
n = nn.X
continue
}
}
@ -560,7 +560,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
switch n.Op() {
case OPAREN:
n := n.(*ParenExpr)
fmt.Fprintf(s, "(%v)", n.Left())
fmt.Fprintf(s, "(%v)", n.X)
case ONIL:
fmt.Fprint(s, "nil")
@ -694,7 +694,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
fmt.Fprint(s, "func literal")
return
}
fmt.Fprintf(s, "%v { %v }", n.Type(), n.Func().Body())
fmt.Fprintf(s, "%v { %v }", n.Type(), n.Func.Body)
case OCOMPLIT:
n := n.(*CompLitExpr)
@ -703,84 +703,84 @@ func exprFmt(n Node, s fmt.State, prec int) {
fmt.Fprintf(s, "... argument")
return
}
if n.Right() != nil {
fmt.Fprintf(s, "%v{%s}", n.Right(), ellipsisIf(n.List().Len() != 0))
if n.Ntype != nil {
fmt.Fprintf(s, "%v{%s}", n.Ntype, ellipsisIf(n.List.Len() != 0))
return
}
fmt.Fprint(s, "composite literal")
return
}
fmt.Fprintf(s, "(%v{ %.v })", n.Right(), n.List())
fmt.Fprintf(s, "(%v{ %.v })", n.Ntype, n.List)
case OPTRLIT:
n := n.(*AddrExpr)
fmt.Fprintf(s, "&%v", n.Left())
fmt.Fprintf(s, "&%v", n.X)
case OSTRUCTLIT, OARRAYLIT, OSLICELIT, OMAPLIT:
n := n.(*CompLitExpr)
if !exportFormat {
fmt.Fprintf(s, "%v{%s}", n.Type(), ellipsisIf(n.List().Len() != 0))
fmt.Fprintf(s, "%v{%s}", n.Type(), ellipsisIf(n.List.Len() != 0))
return
}
fmt.Fprintf(s, "(%v{ %.v })", n.Type(), n.List())
fmt.Fprintf(s, "(%v{ %.v })", n.Type(), n.List)
case OKEY:
n := n.(*KeyExpr)
if n.Left() != nil && n.Right() != nil {
fmt.Fprintf(s, "%v:%v", n.Left(), n.Right())
if n.Key != nil && n.Value != nil {
fmt.Fprintf(s, "%v:%v", n.Key, n.Value)
return
}
if n.Left() == nil && n.Right() != nil {
fmt.Fprintf(s, ":%v", n.Right())
if n.Key == nil && n.Value != nil {
fmt.Fprintf(s, ":%v", n.Value)
return
}
if n.Left() != nil && n.Right() == nil {
fmt.Fprintf(s, "%v:", n.Left())
if n.Key != nil && n.Value == nil {
fmt.Fprintf(s, "%v:", n.Key)
return
}
fmt.Fprint(s, ":")
case OSTRUCTKEY:
n := n.(*StructKeyExpr)
fmt.Fprintf(s, "%v:%v", n.Sym(), n.Left())
fmt.Fprintf(s, "%v:%v", n.Field, n.Value)
case OCALLPART:
n := n.(*CallPartExpr)
exprFmt(n.Left(), s, nprec)
if n.Sym() == nil {
exprFmt(n.X, s, nprec)
if n.Method.Sym == nil {
fmt.Fprint(s, ".<nil>")
return
}
fmt.Fprintf(s, ".%s", types.SymMethodName(n.Sym()))
fmt.Fprintf(s, ".%s", types.SymMethodName(n.Method.Sym))
case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
n := n.(*SelectorExpr)
exprFmt(n.Left(), s, nprec)
if n.Sym() == nil {
exprFmt(n.X, s, nprec)
if n.Sel == nil {
fmt.Fprint(s, ".<nil>")
return
}
fmt.Fprintf(s, ".%s", types.SymMethodName(n.Sym()))
fmt.Fprintf(s, ".%s", types.SymMethodName(n.Sel))
case ODOTTYPE, ODOTTYPE2:
n := n.(*TypeAssertExpr)
exprFmt(n.Left(), s, nprec)
if n.Right() != nil {
fmt.Fprintf(s, ".(%v)", n.Right())
exprFmt(n.X, s, nprec)
if n.Ntype != nil {
fmt.Fprintf(s, ".(%v)", n.Ntype)
return
}
fmt.Fprintf(s, ".(%v)", n.Type())
case OINDEX, OINDEXMAP:
n := n.(*IndexExpr)
exprFmt(n.Left(), s, nprec)
fmt.Fprintf(s, "[%v]", n.Right())
exprFmt(n.X, s, nprec)
fmt.Fprintf(s, "[%v]", n.Index)
case OSLICE, OSLICESTR, OSLICEARR, OSLICE3, OSLICE3ARR:
n := n.(*SliceExpr)
exprFmt(n.Left(), s, nprec)
exprFmt(n.X, s, nprec)
fmt.Fprint(s, "[")
low, high, max := n.SliceBounds()
if low != nil {
@ -800,14 +800,14 @@ func exprFmt(n Node, s fmt.State, prec int) {
case OSLICEHEADER:
n := n.(*SliceHeaderExpr)
if n.List().Len() != 2 {
base.Fatalf("bad OSLICEHEADER list length %d", n.List().Len())
if n.LenCap.Len() != 2 {
base.Fatalf("bad OSLICEHEADER list length %d", n.LenCap.Len())
}
fmt.Fprintf(s, "sliceheader{%v,%v,%v}", n.Left(), n.List().First(), n.List().Second())
fmt.Fprintf(s, "sliceheader{%v,%v,%v}", n.Ptr, n.LenCap.First(), n.LenCap.Second())
case OCOMPLEX, OCOPY:
n := n.(*BinaryExpr)
fmt.Fprintf(s, "%v(%v, %v)", n.Op(), n.Left(), n.Right())
fmt.Fprintf(s, "%v(%v, %v)", n.Op(), n.X, n.Y)
case OCONV,
OCONVIFACE,
@ -823,7 +823,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
} else {
fmt.Fprintf(s, "%v", n.Type())
}
fmt.Fprintf(s, "(%v)", n.Left())
fmt.Fprintf(s, "(%v)", n.X)
case OREAL,
OIMAG,
@ -836,7 +836,7 @@ func exprFmt(n Node, s fmt.State, prec int) {
OOFFSETOF,
OSIZEOF:
n := n.(*UnaryExpr)
fmt.Fprintf(s, "%v(%v)", n.Op(), n.Left())
fmt.Fprintf(s, "%v(%v)", n.Op(), n.X)
case OAPPEND,
ODELETE,
@ -845,58 +845,58 @@ func exprFmt(n Node, s fmt.State, prec int) {
OPRINT,
OPRINTN:
n := n.(*CallExpr)
if n.IsDDD() {
fmt.Fprintf(s, "%v(%.v...)", n.Op(), n.List())
if n.IsDDD {
fmt.Fprintf(s, "%v(%.v...)", n.Op(), n.Args)
return
}
fmt.Fprintf(s, "%v(%.v)", n.Op(), n.List())
fmt.Fprintf(s, "%v(%.v)", n.Op(), n.Args)
case OCALL, OCALLFUNC, OCALLINTER, OCALLMETH, OGETG:
n := n.(*CallExpr)
exprFmt(n.Left(), s, nprec)
if n.IsDDD() {
fmt.Fprintf(s, "(%.v...)", n.List())
exprFmt(n.X, s, nprec)
if n.IsDDD {
fmt.Fprintf(s, "(%.v...)", n.Args)
return
}
fmt.Fprintf(s, "(%.v)", n.List())
fmt.Fprintf(s, "(%.v)", n.Args)
case OMAKEMAP, OMAKECHAN, OMAKESLICE:
n := n.(*MakeExpr)
if n.Right() != nil {
fmt.Fprintf(s, "make(%v, %v, %v)", n.Type(), n.Left(), n.Right())
if n.Cap != nil {
fmt.Fprintf(s, "make(%v, %v, %v)", n.Type(), n.Len, n.Cap)
return
}
if n.Left() != nil && (n.Op() == OMAKESLICE || !n.Left().Type().IsUntyped()) {
fmt.Fprintf(s, "make(%v, %v)", n.Type(), n.Left())
if n.Len != nil && (n.Op() == OMAKESLICE || !n.Len.Type().IsUntyped()) {
fmt.Fprintf(s, "make(%v, %v)", n.Type(), n.Len)
return
}
fmt.Fprintf(s, "make(%v)", n.Type())
case OMAKESLICECOPY:
n := n.(*MakeExpr)
fmt.Fprintf(s, "makeslicecopy(%v, %v, %v)", n.Type(), n.Left(), n.Right())
fmt.Fprintf(s, "makeslicecopy(%v, %v, %v)", n.Type(), n.Len, n.Cap)
case OPLUS, ONEG, OBITNOT, ONOT, ORECV:
// Unary
n := n.(*UnaryExpr)
fmt.Fprintf(s, "%v", n.Op())
if n.Left() != nil && n.Left().Op() == n.Op() {
if n.X != nil && n.X.Op() == n.Op() {
fmt.Fprint(s, " ")
}
exprFmt(n.Left(), s, nprec+1)
exprFmt(n.X, s, nprec+1)
case OADDR:
n := n.(*AddrExpr)
fmt.Fprintf(s, "%v", n.Op())
if n.Left() != nil && n.Left().Op() == n.Op() {
if n.X != nil && n.X.Op() == n.Op() {
fmt.Fprint(s, " ")
}
exprFmt(n.Left(), s, nprec+1)
exprFmt(n.X, s, nprec+1)
case ODEREF:
n := n.(*StarExpr)
fmt.Fprintf(s, "%v", n.Op())
exprFmt(n.Left(), s, nprec+1)
exprFmt(n.X, s, nprec+1)
// Binary
case OADD,
@ -917,26 +917,26 @@ func exprFmt(n Node, s fmt.State, prec int) {
OSUB,
OXOR:
n := n.(*BinaryExpr)
exprFmt(n.Left(), s, nprec)
exprFmt(n.X, s, nprec)
fmt.Fprintf(s, " %v ", n.Op())
exprFmt(n.Right(), s, nprec+1)
exprFmt(n.Y, s, nprec+1)
case OANDAND,
OOROR:
n := n.(*LogicalExpr)
exprFmt(n.Left(), s, nprec)
exprFmt(n.X, s, nprec)
fmt.Fprintf(s, " %v ", n.Op())
exprFmt(n.Right(), s, nprec+1)
exprFmt(n.Y, s, nprec+1)
case OSEND:
n := n.(*SendStmt)
exprFmt(n.Left(), s, nprec)
exprFmt(n.Chan, s, nprec)
fmt.Fprintf(s, " <- ")
exprFmt(n.Right(), s, nprec+1)
exprFmt(n.Value, s, nprec+1)
case OADDSTR:
n := n.(*AddStringExpr)
for i, n1 := range n.List().Slice() {
for i, n1 := range n.List.Slice() {
if i != 0 {
fmt.Fprint(s, " + ")
}
@ -1098,7 +1098,7 @@ func dumpNodeHeader(w io.Writer, n Node) {
if n.Op() == OCLOSURE {
n := n.(*ClosureExpr)
if fn := n.Func(); fn != nil && fn.Nname.Sym() != nil {
if fn := n.Func; fn != nil && fn.Nname.Sym() != nil {
fmt.Fprintf(w, " fnName(%+v)", fn.Nname.Sym())
}
}
@ -1169,7 +1169,7 @@ func dumpNode(w io.Writer, n Node, depth int) {
case OASOP:
n := n.(*AssignOpStmt)
fmt.Fprintf(w, "%+v-%+v", n.Op(), n.SubOp())
fmt.Fprintf(w, "%+v-%+v", n.Op(), n.AsOp)
dumpNodeHeader(w, n)
case OTYPE:
@ -1192,18 +1192,18 @@ func dumpNode(w io.Writer, n Node, depth int) {
n := n.(*Func)
fmt.Fprintf(w, "%+v", n.Op())
dumpNodeHeader(w, n)
fn := n.Func()
fn := n
if len(fn.Dcl) > 0 {
indent(w, depth)
fmt.Fprintf(w, "%+v-Dcl", n.Op())
for _, dcl := range n.Func().Dcl {
for _, dcl := range n.Dcl {
dumpNode(w, dcl, depth+1)
}
}
if fn.Body().Len() > 0 {
if fn.Body.Len() > 0 {
indent(w, depth)
fmt.Fprintf(w, "%+v-body", n.Op())
dumpNodes(w, fn.Body(), depth+1)
dumpNodes(w, fn.Body, depth+1)
}
return
}

View file

@ -49,9 +49,9 @@ import (
// pointer from the Func back to the OCALLPART.
type Func struct {
miniNode
typ *types.Type
Body_ Nodes
iota int64
typ *types.Type
Body Nodes
Iota int64
Nname *Name // ONAME node
OClosure *ClosureExpr // OCLOSURE node
@ -110,20 +110,14 @@ func NewFunc(pos src.XPos) *Func {
f := new(Func)
f.pos = pos
f.op = ODCLFUNC
f.iota = -1
f.Iota = -1
return f
}
func (f *Func) isStmt() {}
func (f *Func) Func() *Func { return f }
func (f *Func) Body() Nodes { return f.Body_ }
func (f *Func) PtrBody() *Nodes { return &f.Body_ }
func (f *Func) SetBody(x Nodes) { f.Body_ = x }
func (f *Func) Type() *types.Type { return f.typ }
func (f *Func) SetType(x *types.Type) { f.typ = x }
func (f *Func) Iota() int64 { return f.iota }
func (f *Func) SetIota(x int64) { f.iota = x }
func (f *Func) Sym() *types.Sym {
if f.Nname != nil {
@ -218,11 +212,11 @@ func FuncName(n Node) string {
case *Func:
f = n
case *Name:
f = n.Func()
f = n.Func
case *CallPartExpr:
f = n.Func()
f = n.Func
case *ClosureExpr:
f = n.Func()
f = n.Func
}
if f == nil || f.Nname == nil {
return "<nil>"
@ -245,9 +239,9 @@ func PkgFuncName(n Node) string {
var f *Func
switch n := n.(type) {
case *CallPartExpr:
f = n.Func()
f = n.Func
case *ClosureExpr:
f = n.Func()
f = n.Func
case *Func:
f = n
}

View file

@ -39,7 +39,7 @@ type Name struct {
flags bitset16
pragma PragmaFlag // int16
sym *types.Sym
fn *Func
Func *Func
Offset_ int64
val constant.Value
orig Node
@ -225,8 +225,7 @@ func (n *Name) SubOp() Op { return n.BuiltinOp }
func (n *Name) SetSubOp(x Op) { n.BuiltinOp = x }
func (n *Name) Class() Class { return n.Class_ }
func (n *Name) SetClass(x Class) { n.Class_ = x }
func (n *Name) Func() *Func { return n.fn }
func (n *Name) SetFunc(x *Func) { n.fn = x }
func (n *Name) SetFunc(x *Func) { n.Func = x }
func (n *Name) Offset() int64 { panic("Name.Offset") }
func (n *Name) SetOffset(x int64) {
if x != 0 {

View file

@ -8,18 +8,18 @@ func (n *AddStringExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *AddStringExpr) copy() Node {
c := *n
c.init = c.init.Copy()
c.List_ = c.List_.Copy()
c.List = c.List.Copy()
return &c
}
func (n *AddStringExpr) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDoList(n.List_, err, do)
err = maybeDoList(n.List, err, do)
return err
}
func (n *AddStringExpr) editChildren(edit func(Node) Node) {
editList(n.init, edit)
editList(n.List_, edit)
editList(n.List, edit)
}
func (n *AddrExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
@ -154,18 +154,18 @@ func (n *BlockStmt) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *BlockStmt) copy() Node {
c := *n
c.init = c.init.Copy()
c.List_ = c.List_.Copy()
c.List = c.List.Copy()
return &c
}
func (n *BlockStmt) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDoList(n.List_, err, do)
err = maybeDoList(n.List, err, do)
return err
}
func (n *BlockStmt) editChildren(edit func(Node) Node) {
editList(n.init, edit)
editList(n.List_, edit)
editList(n.List, edit)
}
func (n *BranchStmt) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
@ -189,7 +189,7 @@ func (n *CallExpr) copy() Node {
c.init = c.init.Copy()
c.Args = c.Args.Copy()
c.Rargs = c.Rargs.Copy()
c.Body_ = c.Body_.Copy()
c.Body = c.Body.Copy()
return &c
}
func (n *CallExpr) doChildren(do func(Node) error) error {
@ -198,7 +198,7 @@ func (n *CallExpr) doChildren(do func(Node) error) error {
err = maybeDo(n.X, err, do)
err = maybeDoList(n.Args, err, do)
err = maybeDoList(n.Rargs, err, do)
err = maybeDoList(n.Body_, err, do)
err = maybeDoList(n.Body, err, do)
return err
}
func (n *CallExpr) editChildren(edit func(Node) Node) {
@ -206,7 +206,7 @@ func (n *CallExpr) editChildren(edit func(Node) Node) {
n.X = maybeEdit(n.X, edit)
editList(n.Args, edit)
editList(n.Rargs, edit)
editList(n.Body_, edit)
editList(n.Body, edit)
}
func (n *CallPartExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
@ -231,25 +231,25 @@ func (n *CaseStmt) copy() Node {
c := *n
c.init = c.init.Copy()
c.Vars = c.Vars.Copy()
c.List_ = c.List_.Copy()
c.Body_ = c.Body_.Copy()
c.List = c.List.Copy()
c.Body = c.Body.Copy()
return &c
}
func (n *CaseStmt) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDoList(n.Vars, err, do)
err = maybeDoList(n.List_, err, do)
err = maybeDoList(n.List, err, do)
err = maybeDo(n.Comm, err, do)
err = maybeDoList(n.Body_, err, do)
err = maybeDoList(n.Body, err, do)
return err
}
func (n *CaseStmt) editChildren(edit func(Node) Node) {
editList(n.init, edit)
editList(n.Vars, edit)
editList(n.List_, edit)
editList(n.List, edit)
n.Comm = maybeEdit(n.Comm, edit)
editList(n.Body_, edit)
editList(n.Body, edit)
}
func (n *ChanType) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
@ -300,20 +300,20 @@ func (n *CompLitExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *CompLitExpr) copy() Node {
c := *n
c.init = c.init.Copy()
c.List_ = c.List_.Copy()
c.List = c.List.Copy()
return &c
}
func (n *CompLitExpr) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDo(n.Ntype, err, do)
err = maybeDoList(n.List_, err, do)
err = maybeDoList(n.List, err, do)
return err
}
func (n *CompLitExpr) editChildren(edit func(Node) Node) {
editList(n.init, edit)
n.Ntype = toNtype(maybeEdit(n.Ntype, edit))
editList(n.List_, edit)
editList(n.List, edit)
}
func (n *ConstExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
@ -367,7 +367,7 @@ func (n *ForStmt) copy() Node {
c := *n
c.init = c.init.Copy()
c.Late = c.Late.Copy()
c.Body_ = c.Body_.Copy()
c.Body = c.Body.Copy()
return &c
}
func (n *ForStmt) doChildren(do func(Node) error) error {
@ -376,7 +376,7 @@ func (n *ForStmt) doChildren(do func(Node) error) error {
err = maybeDo(n.Cond, err, do)
err = maybeDoList(n.Late, err, do)
err = maybeDo(n.Post, err, do)
err = maybeDoList(n.Body_, err, do)
err = maybeDoList(n.Body, err, do)
return err
}
func (n *ForStmt) editChildren(edit func(Node) Node) {
@ -384,22 +384,22 @@ func (n *ForStmt) editChildren(edit func(Node) Node) {
n.Cond = maybeEdit(n.Cond, edit)
editList(n.Late, edit)
n.Post = maybeEdit(n.Post, edit)
editList(n.Body_, edit)
editList(n.Body, edit)
}
func (n *Func) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *Func) copy() Node {
c := *n
c.Body_ = c.Body_.Copy()
c.Body = c.Body.Copy()
return &c
}
func (n *Func) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.Body_, err, do)
err = maybeDoList(n.Body, err, do)
return err
}
func (n *Func) editChildren(edit func(Node) Node) {
editList(n.Body_, edit)
editList(n.Body, edit)
}
func (n *FuncType) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
@ -461,7 +461,7 @@ func (n *IfStmt) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *IfStmt) copy() Node {
c := *n
c.init = c.init.Copy()
c.Body_ = c.Body_.Copy()
c.Body = c.Body.Copy()
c.Else = c.Else.Copy()
return &c
}
@ -469,14 +469,14 @@ func (n *IfStmt) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDo(n.Cond, err, do)
err = maybeDoList(n.Body_, err, do)
err = maybeDoList(n.Body, err, do)
err = maybeDoList(n.Else, err, do)
return err
}
func (n *IfStmt) editChildren(edit func(Node) Node) {
editList(n.init, edit)
n.Cond = maybeEdit(n.Cond, edit)
editList(n.Body_, edit)
editList(n.Body, edit)
editList(n.Else, edit)
}
@ -518,20 +518,20 @@ func (n *InlinedCallExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *InlinedCallExpr) copy() Node {
c := *n
c.init = c.init.Copy()
c.Body_ = c.Body_.Copy()
c.Body = c.Body.Copy()
c.ReturnVars = c.ReturnVars.Copy()
return &c
}
func (n *InlinedCallExpr) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDoList(n.Body_, err, do)
err = maybeDoList(n.Body, err, do)
err = maybeDoList(n.ReturnVars, err, do)
return err
}
func (n *InlinedCallExpr) editChildren(edit func(Node) Node) {
editList(n.init, edit)
editList(n.Body_, edit)
editList(n.Body, edit)
editList(n.ReturnVars, edit)
}
@ -726,7 +726,7 @@ func (n *RangeStmt) copy() Node {
c := *n
c.init = c.init.Copy()
c.Vars = c.Vars.Copy()
c.Body_ = c.Body_.Copy()
c.Body = c.Body.Copy()
return &c
}
func (n *RangeStmt) doChildren(do func(Node) error) error {
@ -734,14 +734,14 @@ func (n *RangeStmt) doChildren(do func(Node) error) error {
err = maybeDoList(n.init, err, do)
err = maybeDoList(n.Vars, err, do)
err = maybeDo(n.X, err, do)
err = maybeDoList(n.Body_, err, do)
err = maybeDoList(n.Body, err, do)
return err
}
func (n *RangeStmt) editChildren(edit func(Node) Node) {
editList(n.init, edit)
editList(n.Vars, edit)
n.X = maybeEdit(n.X, edit)
editList(n.Body_, edit)
editList(n.Body, edit)
}
func (n *ResultExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
@ -838,40 +838,40 @@ func (n *SliceExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *SliceExpr) copy() Node {
c := *n
c.init = c.init.Copy()
c.List_ = c.List_.Copy()
c.List = c.List.Copy()
return &c
}
func (n *SliceExpr) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDo(n.X, err, do)
err = maybeDoList(n.List_, err, do)
err = maybeDoList(n.List, err, do)
return err
}
func (n *SliceExpr) editChildren(edit func(Node) Node) {
editList(n.init, edit)
n.X = maybeEdit(n.X, edit)
editList(n.List_, edit)
editList(n.List, edit)
}
func (n *SliceHeaderExpr) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }
func (n *SliceHeaderExpr) copy() Node {
c := *n
c.init = c.init.Copy()
c.LenCap_ = c.LenCap_.Copy()
c.LenCap = c.LenCap.Copy()
return &c
}
func (n *SliceHeaderExpr) doChildren(do func(Node) error) error {
var err error
err = maybeDoList(n.init, err, do)
err = maybeDo(n.Ptr, err, do)
err = maybeDoList(n.LenCap_, err, do)
err = maybeDoList(n.LenCap, err, do)
return err
}
func (n *SliceHeaderExpr) editChildren(edit func(Node) Node) {
editList(n.init, edit)
n.Ptr = maybeEdit(n.Ptr, edit)
editList(n.LenCap_, edit)
editList(n.LenCap, edit)
}
func (n *SliceType) Format(s fmt.State, verb rune) { FmtNode(n, s, verb) }

View file

@ -30,9 +30,6 @@ func NewDecl(pos src.XPos, op Op, x Node) *Decl {
func (*Decl) isStmt() {}
func (n *Decl) Left() Node { return n.X }
func (n *Decl) SetLeft(x Node) { n.X = x }
// A Stmt is a Node that can appear as a statement.
// This includes statement-like expressions such as f().
//
@ -78,15 +75,6 @@ func NewAssignListStmt(pos src.XPos, op Op, lhs, rhs []Node) *AssignListStmt {
return n
}
func (n *AssignListStmt) List() Nodes { return n.Lhs }
func (n *AssignListStmt) PtrList() *Nodes { return &n.Lhs }
func (n *AssignListStmt) SetList(x Nodes) { n.Lhs = x }
func (n *AssignListStmt) Rlist() Nodes { return n.Rhs }
func (n *AssignListStmt) PtrRlist() *Nodes { return &n.Rhs }
func (n *AssignListStmt) SetRlist(x Nodes) { n.Rhs = x }
func (n *AssignListStmt) Colas() bool { return n.Def }
func (n *AssignListStmt) SetColas(x bool) { n.Def = x }
func (n *AssignListStmt) SetOp(op Op) {
switch op {
default:
@ -112,13 +100,6 @@ func NewAssignStmt(pos src.XPos, x, y Node) *AssignStmt {
return n
}
func (n *AssignStmt) Left() Node { return n.X }
func (n *AssignStmt) SetLeft(x Node) { n.X = x }
func (n *AssignStmt) Right() Node { return n.Y }
func (n *AssignStmt) SetRight(y Node) { n.Y = y }
func (n *AssignStmt) Colas() bool { return n.Def }
func (n *AssignStmt) SetColas(x bool) { n.Def = x }
func (n *AssignStmt) SetOp(op Op) {
switch op {
default:
@ -145,21 +126,13 @@ func NewAssignOpStmt(pos src.XPos, asOp Op, x, y Node) *AssignOpStmt {
return n
}
func (n *AssignOpStmt) Left() Node { return n.X }
func (n *AssignOpStmt) SetLeft(x Node) { n.X = x }
func (n *AssignOpStmt) Right() Node { return n.Y }
func (n *AssignOpStmt) SetRight(y Node) { n.Y = y }
func (n *AssignOpStmt) SubOp() Op { return n.AsOp }
func (n *AssignOpStmt) SetSubOp(x Op) { n.AsOp = x }
func (n *AssignOpStmt) Implicit() bool { return n.IncDec }
func (n *AssignOpStmt) SetImplicit(b bool) { n.IncDec = b }
func (n *AssignOpStmt) Type() *types.Type { return n.typ }
func (n *AssignOpStmt) SetType(x *types.Type) { n.typ = x }
// A BlockStmt is a block: { List }.
type BlockStmt struct {
miniStmt
List_ Nodes
List Nodes
}
func NewBlockStmt(pos src.XPos, list []Node) *BlockStmt {
@ -172,14 +145,10 @@ func NewBlockStmt(pos src.XPos, list []Node) *BlockStmt {
}
}
n.op = OBLOCK
n.List_.Set(list)
n.List.Set(list)
return n
}
func (n *BlockStmt) List() Nodes { return n.List_ }
func (n *BlockStmt) PtrList() *Nodes { return &n.List_ }
func (n *BlockStmt) SetList(x Nodes) { n.List_ = x }
// A BranchStmt is a break, continue, fallthrough, or goto statement.
//
// For back-end code generation, Op may also be RETJMP (return+jump),
@ -202,49 +171,36 @@ func NewBranchStmt(pos src.XPos, op Op, label *types.Sym) *BranchStmt {
return n
}
func (n *BranchStmt) Sym() *types.Sym { return n.Label }
func (n *BranchStmt) SetSym(sym *types.Sym) { n.Label = sym }
func (n *BranchStmt) Sym() *types.Sym { return n.Label }
// A CaseStmt is a case statement in a switch or select: case List: Body.
type CaseStmt struct {
miniStmt
Vars Nodes // declared variable for this case in type switch
List_ Nodes // list of expressions for switch, early select
Comm Node // communication case (Exprs[0]) after select is type-checked
Body_ Nodes
Vars Nodes // declared variable for this case in type switch
List Nodes // list of expressions for switch, early select
Comm Node // communication case (Exprs[0]) after select is type-checked
Body Nodes
}
func NewCaseStmt(pos src.XPos, list, body []Node) *CaseStmt {
n := &CaseStmt{}
n.pos = pos
n.op = OCASE
n.List_.Set(list)
n.Body_.Set(body)
n.List.Set(list)
n.Body.Set(body)
return n
}
func (n *CaseStmt) List() Nodes { return n.List_ }
func (n *CaseStmt) PtrList() *Nodes { return &n.List_ }
func (n *CaseStmt) SetList(x Nodes) { n.List_ = x }
func (n *CaseStmt) Body() Nodes { return n.Body_ }
func (n *CaseStmt) PtrBody() *Nodes { return &n.Body_ }
func (n *CaseStmt) SetBody(x Nodes) { n.Body_ = x }
func (n *CaseStmt) Rlist() Nodes { return n.Vars }
func (n *CaseStmt) PtrRlist() *Nodes { return &n.Vars }
func (n *CaseStmt) SetRlist(x Nodes) { n.Vars = x }
func (n *CaseStmt) Left() Node { return n.Comm }
func (n *CaseStmt) SetLeft(x Node) { n.Comm = x }
// A ForStmt is a non-range for loop: for Init; Cond; Post { Body }
// Op can be OFOR or OFORUNTIL (!Cond).
type ForStmt struct {
miniStmt
Label *types.Sym
Cond Node
Late Nodes
Post Node
Body_ Nodes
HasBreak_ bool
Label *types.Sym
Cond Node
Late Nodes
Post Node
Body Nodes
HasBreak bool
}
func NewForStmt(pos src.XPos, init []Node, cond, post Node, body []Node) *ForStmt {
@ -252,25 +208,10 @@ func NewForStmt(pos src.XPos, init []Node, cond, post Node, body []Node) *ForStm
n.pos = pos
n.op = OFOR
n.init.Set(init)
n.Body_.Set(body)
n.Body.Set(body)
return n
}
func (n *ForStmt) Sym() *types.Sym { return n.Label }
func (n *ForStmt) SetSym(x *types.Sym) { n.Label = x }
func (n *ForStmt) Left() Node { return n.Cond }
func (n *ForStmt) SetLeft(x Node) { n.Cond = x }
func (n *ForStmt) Right() Node { return n.Post }
func (n *ForStmt) SetRight(x Node) { n.Post = x }
func (n *ForStmt) Body() Nodes { return n.Body_ }
func (n *ForStmt) PtrBody() *Nodes { return &n.Body_ }
func (n *ForStmt) SetBody(x Nodes) { n.Body_ = x }
func (n *ForStmt) List() Nodes { return n.Late }
func (n *ForStmt) PtrList() *Nodes { return &n.Late }
func (n *ForStmt) SetList(x Nodes) { n.Late = x }
func (n *ForStmt) HasBreak() bool { return n.HasBreak_ }
func (n *ForStmt) SetHasBreak(b bool) { n.HasBreak_ = b }
func (n *ForStmt) SetOp(op Op) {
if op != OFOR && op != OFORUNTIL {
panic(n.no("SetOp " + op.String()))
@ -300,38 +241,24 @@ func NewGoDeferStmt(pos src.XPos, op Op, call Node) *GoDeferStmt {
return n
}
func (n *GoDeferStmt) Left() Node { return n.Call }
func (n *GoDeferStmt) SetLeft(x Node) { n.Call = x }
// A IfStmt is a return statement: if Init; Cond { Then } else { Else }.
type IfStmt struct {
miniStmt
Cond Node
Body_ Nodes
Else Nodes
Likely_ bool // code layout hint
Cond Node
Body Nodes
Else Nodes
Likely bool // code layout hint
}
func NewIfStmt(pos src.XPos, cond Node, body, els []Node) *IfStmt {
n := &IfStmt{Cond: cond}
n.pos = pos
n.op = OIF
n.Body_.Set(body)
n.Body.Set(body)
n.Else.Set(els)
return n
}
func (n *IfStmt) Left() Node { return n.Cond }
func (n *IfStmt) SetLeft(x Node) { n.Cond = x }
func (n *IfStmt) Body() Nodes { return n.Body_ }
func (n *IfStmt) PtrBody() *Nodes { return &n.Body_ }
func (n *IfStmt) SetBody(x Nodes) { n.Body_ = x }
func (n *IfStmt) Rlist() Nodes { return n.Else }
func (n *IfStmt) PtrRlist() *Nodes { return &n.Else }
func (n *IfStmt) SetRlist(x Nodes) { n.Else = x }
func (n *IfStmt) Likely() bool { return n.Likely_ }
func (n *IfStmt) SetLikely(x bool) { n.Likely_ = x }
// An InlineMarkStmt is a marker placed just before an inlined body.
type InlineMarkStmt struct {
miniStmt
@ -361,21 +288,20 @@ func NewLabelStmt(pos src.XPos, label *types.Sym) *LabelStmt {
return n
}
func (n *LabelStmt) Sym() *types.Sym { return n.Label }
func (n *LabelStmt) SetSym(x *types.Sym) { n.Label = x }
func (n *LabelStmt) Sym() *types.Sym { return n.Label }
// A RangeStmt is a range loop: for Vars = range X { Stmts }
// Op can be OFOR or OFORUNTIL (!Cond).
type RangeStmt struct {
miniStmt
Label *types.Sym
Vars Nodes // TODO(rsc): Replace with Key, Value Node
Def bool
X Node
Body_ Nodes
HasBreak_ bool
typ *types.Type // TODO(rsc): Remove - use X.Type() instead
Prealloc *Name
Label *types.Sym
Vars Nodes // TODO(rsc): Replace with Key, Value Node
Def bool
X Node
Body Nodes
HasBreak bool
typ *types.Type // TODO(rsc): Remove - use X.Type() instead
Prealloc *Name
}
func NewRangeStmt(pos src.XPos, vars []Node, x Node, body []Node) *RangeStmt {
@ -383,24 +309,10 @@ func NewRangeStmt(pos src.XPos, vars []Node, x Node, body []Node) *RangeStmt {
n.pos = pos
n.op = ORANGE
n.Vars.Set(vars)
n.Body_.Set(body)
n.Body.Set(body)
return n
}
func (n *RangeStmt) Sym() *types.Sym { return n.Label }
func (n *RangeStmt) SetSym(x *types.Sym) { n.Label = x }
func (n *RangeStmt) Right() Node { return n.X }
func (n *RangeStmt) SetRight(x Node) { n.X = x }
func (n *RangeStmt) Body() Nodes { return n.Body_ }
func (n *RangeStmt) PtrBody() *Nodes { return &n.Body_ }
func (n *RangeStmt) SetBody(x Nodes) { n.Body_ = x }
func (n *RangeStmt) List() Nodes { return n.Vars }
func (n *RangeStmt) PtrList() *Nodes { return &n.Vars }
func (n *RangeStmt) SetList(x Nodes) { n.Vars = x }
func (n *RangeStmt) HasBreak() bool { return n.HasBreak_ }
func (n *RangeStmt) SetHasBreak(b bool) { n.HasBreak_ = b }
func (n *RangeStmt) Colas() bool { return n.Def }
func (n *RangeStmt) SetColas(b bool) { n.Def = b }
func (n *RangeStmt) Type() *types.Type { return n.typ }
func (n *RangeStmt) SetType(x *types.Type) { n.typ = x }
@ -420,19 +332,15 @@ func NewReturnStmt(pos src.XPos, results []Node) *ReturnStmt {
return n
}
func (n *ReturnStmt) Orig() Node { return n.orig }
func (n *ReturnStmt) SetOrig(x Node) { n.orig = x }
func (n *ReturnStmt) List() Nodes { return n.Results }
func (n *ReturnStmt) PtrList() *Nodes { return &n.Results }
func (n *ReturnStmt) SetList(x Nodes) { n.Results = x }
func (n *ReturnStmt) IsDDD() bool { return false } // typecheckargs asks
func (n *ReturnStmt) Orig() Node { return n.orig }
func (n *ReturnStmt) SetOrig(x Node) { n.orig = x }
// A SelectStmt is a block: { Cases }.
type SelectStmt struct {
miniStmt
Label *types.Sym
Cases Nodes
HasBreak_ bool
Label *types.Sym
Cases Nodes
HasBreak bool
// TODO(rsc): Instead of recording here, replace with a block?
Compiled Nodes // compiled form, after walkswitch
@ -446,17 +354,6 @@ func NewSelectStmt(pos src.XPos, cases []Node) *SelectStmt {
return n
}
func (n *SelectStmt) List() Nodes { return n.Cases }
func (n *SelectStmt) PtrList() *Nodes { return &n.Cases }
func (n *SelectStmt) SetList(x Nodes) { n.Cases = x }
func (n *SelectStmt) Sym() *types.Sym { return n.Label }
func (n *SelectStmt) SetSym(x *types.Sym) { n.Label = x }
func (n *SelectStmt) HasBreak() bool { return n.HasBreak_ }
func (n *SelectStmt) SetHasBreak(x bool) { n.HasBreak_ = x }
func (n *SelectStmt) Body() Nodes { return n.Compiled }
func (n *SelectStmt) PtrBody() *Nodes { return &n.Compiled }
func (n *SelectStmt) SetBody(x Nodes) { n.Compiled = x }
// A SendStmt is a send statement: X <- Y.
type SendStmt struct {
miniStmt
@ -471,18 +368,13 @@ func NewSendStmt(pos src.XPos, ch, value Node) *SendStmt {
return n
}
func (n *SendStmt) Left() Node { return n.Chan }
func (n *SendStmt) SetLeft(x Node) { n.Chan = x }
func (n *SendStmt) Right() Node { return n.Value }
func (n *SendStmt) SetRight(y Node) { n.Value = y }
// A SwitchStmt is a switch statement: switch Init; Expr { Cases }.
type SwitchStmt struct {
miniStmt
Tag Node
Cases Nodes // list of *CaseStmt
Label *types.Sym
HasBreak_ bool
Tag Node
Cases Nodes // list of *CaseStmt
Label *types.Sym
HasBreak bool
// TODO(rsc): Instead of recording here, replace with a block?
Compiled Nodes // compiled form, after walkswitch
@ -496,19 +388,6 @@ func NewSwitchStmt(pos src.XPos, tag Node, cases []Node) *SwitchStmt {
return n
}
func (n *SwitchStmt) Left() Node { return n.Tag }
func (n *SwitchStmt) SetLeft(x Node) { n.Tag = x }
func (n *SwitchStmt) List() Nodes { return n.Cases }
func (n *SwitchStmt) PtrList() *Nodes { return &n.Cases }
func (n *SwitchStmt) SetList(x Nodes) { n.Cases = x }
func (n *SwitchStmt) Body() Nodes { return n.Compiled }
func (n *SwitchStmt) PtrBody() *Nodes { return &n.Compiled }
func (n *SwitchStmt) SetBody(x Nodes) { n.Compiled = x }
func (n *SwitchStmt) Sym() *types.Sym { return n.Label }
func (n *SwitchStmt) SetSym(x *types.Sym) { n.Label = x }
func (n *SwitchStmt) HasBreak() bool { return n.HasBreak_ }
func (n *SwitchStmt) SetHasBreak(x bool) { n.HasBreak_ = x }
// A TypeSwitchGuard is the [Name :=] X.(type) in a type switch.
type TypeSwitchGuard struct {
miniNode
@ -523,19 +402,3 @@ func NewTypeSwitchGuard(pos src.XPos, tag *Ident, x Node) *TypeSwitchGuard {
n.op = OTYPESW
return n
}
func (n *TypeSwitchGuard) Left() Node {
if n.Tag == nil {
return nil
}
return n.Tag
}
func (n *TypeSwitchGuard) SetLeft(x Node) {
if x == nil {
n.Tag = nil
return
}
n.Tag = x.(*Ident)
}
func (n *TypeSwitchGuard) Right() Node { return n.X }
func (n *TypeSwitchGuard) SetRight(x Node) { n.X = x }