[dev.regabi] cmd/compile: introduce cmd/compile/internal/ir [generated]

If we want to break up package gc at all, we will need to move
the compiler IR it defines into a separate package that can be
imported by packages that gc itself imports. This CL does that.
It also removes the TINT8 etc aliases so that all code is clear
about which package things are coming from.

This CL is automatically generated by the script below.
See the comments in the script for details about the changes.

[git-generate]
cd src/cmd/compile/internal/gc

rf '
        # These names were never fully qualified
        # when the types package was added.
        # Do it now, to avoid confusion about where they live.
        inline -rm \
                Txxx \
                TINT8 \
                TUINT8 \
                TINT16 \
                TUINT16 \
                TINT32 \
                TUINT32 \
                TINT64 \
                TUINT64 \
                TINT \
                TUINT \
                TUINTPTR \
                TCOMPLEX64 \
                TCOMPLEX128 \
                TFLOAT32 \
                TFLOAT64 \
                TBOOL \
                TPTR \
                TFUNC \
                TSLICE \
                TARRAY \
                TSTRUCT \
                TCHAN \
                TMAP \
                TINTER \
                TFORW \
                TANY \
                TSTRING \
                TUNSAFEPTR \
                TIDEAL \
                TNIL \
                TBLANK \
                TFUNCARGS \
                TCHANARGS \
                NTYPE \
                BADWIDTH

        # esc.go and escape.go do not need to be split.
        # Append esc.go onto the end of escape.go.
        mv esc.go escape.go

        # Pull out the type format installation from func Main,
        # so it can be carried into package ir.
        mv Main:/Sconv.=/-0,/TypeLinkSym/-1 InstallTypeFormats

        # Names that need to be exported for use by code left in gc.
        mv Isconst IsConst
        mv asNode AsNode
        mv asNodes AsNodes
        mv asTypesNode AsTypesNode
        mv basicnames BasicTypeNames
        mv builtinpkg BuiltinPkg
        mv consttype ConstType
        mv dumplist DumpList
        mv fdumplist FDumpList
        mv fmtMode FmtMode
        mv goopnames OpNames
        mv inspect Inspect
        mv inspectList InspectList
        mv localpkg LocalPkg
        mv nblank BlankNode
        mv numImport NumImport
        mv opprec OpPrec
        mv origSym OrigSym
        mv stmtwithinit StmtWithInit
        mv dump DumpAny
        mv fdump FDumpAny
        mv nod Nod
        mv nodl NodAt
        mv newname NewName
        mv newnamel NewNameAt
        mv assertRepresents AssertValidTypeForConst
        mv represents ValidTypeForConst
        mv nodlit NewLiteral

        # Types and fields that need to be exported for use by gc.
        mv nowritebarrierrecCallSym SymAndPos
        mv SymAndPos.lineno SymAndPos.Pos
        mv SymAndPos.target SymAndPos.Sym

        mv Func.lsym Func.LSym
        mv Func.setWBPos Func.SetWBPos
        mv Func.numReturns Func.NumReturns
        mv Func.numDefers Func.NumDefers
        mv Func.nwbrCalls Func.NWBRCalls

        # initLSym is an algorithm left behind in gc,
        # not an operation on Func itself.
        mv Func.initLSym initLSym

        mv nodeQueue NodeQueue
        mv NodeQueue.empty NodeQueue.Empty
        mv NodeQueue.popLeft NodeQueue.PopLeft
        mv NodeQueue.pushRight NodeQueue.PushRight

        # Many methods on Node are actually algorithms that
        # would apply to any node implementation.
        # Those become plain functions.
        mv Node.funcname FuncName
        mv Node.isBlank IsBlank
        mv Node.isGoConst isGoConst
        mv Node.isNil IsNil
        mv Node.isParamHeapCopy isParamHeapCopy
        mv Node.isParamStackCopy isParamStackCopy
        mv Node.isSimpleName isSimpleName
        mv Node.mayBeShared MayBeShared
        mv Node.pkgFuncName PkgFuncName
        mv Node.backingArrayPtrLen backingArrayPtrLen
        mv Node.isterminating isTermNode
        mv Node.labeledControl labeledControl
        mv Nodes.isterminating isTermNodes
        mv Nodes.sigerr fmtSignature
        mv Node.MethodName methodExprName
        mv Node.MethodFunc methodExprFunc
        mv Node.IsMethod IsMethod

        # Every node will need to implement RawCopy;
        # Copy and SepCopy algorithms will use it.
        mv Node.rawcopy Node.RawCopy
        mv Node.copy Copy
        mv Node.sepcopy SepCopy

        # Extract Node.Format method body into func FmtNode,
        # but leave method wrapper behind.
        mv Node.Format:0,$ FmtNode

        # Formatting helpers that will apply to all node implementations.
        mv Node.Line Line
        mv Node.exprfmt exprFmt
        mv Node.jconv jconvFmt
        mv Node.modeString modeString
        mv Node.nconv nconvFmt
        mv Node.nodedump nodeDumpFmt
        mv Node.nodefmt nodeFmt
        mv Node.stmtfmt stmtFmt

	# Constant support needed for code moving to ir.
        mv okforconst OKForConst
        mv vconv FmtConst
        mv int64Val Int64Val
        mv float64Val Float64Val
        mv Node.ValueInterface ConstValue

        # Organize code into files.
        mv LocalPkg BuiltinPkg ir.go
        mv NumImport InstallTypeFormats Line fmt.go
        mv syntax.go Nod NodAt NewNameAt Class Pxxx PragmaFlag Nointerface SymAndPos \
                AsNode AsTypesNode BlankNode OrigSym \
                Node.SliceBounds Node.SetSliceBounds Op.IsSlice3 \
                IsConst Node.Int64Val Node.CanInt64 Node.Uint64Val Node.BoolVal Node.StringVal \
                Node.RawCopy SepCopy Copy \
                IsNil IsBlank IsMethod \
                Node.Typ Node.StorageClass node.go
        mv ConstType ConstValue Int64Val Float64Val AssertValidTypeForConst ValidTypeForConst NewLiteral idealType OKForConst val.go

        # Move files to new ir package.
        mv bitset.go class_string.go dump.go fmt.go \
                ir.go node.go op_string.go val.go \
                sizeof_test.go cmd/compile/internal/ir
'

: # fix mkbuiltin.go to generate the changes made to builtin.go during rf
sed -i '' '
        s/\[T/[types.T/g
        s/\*Node/*ir.Node/g
        /internal\/types/c \
                fmt.Fprintln(&b, `import (`) \
                fmt.Fprintln(&b, `      "cmd/compile/internal/ir"`) \
                fmt.Fprintln(&b, `      "cmd/compile/internal/types"`) \
                fmt.Fprintln(&b, `)`)
' mkbuiltin.go
gofmt -w mkbuiltin.go

: # update cmd/dist to add internal/ir
cd ../../../dist
sed -i '' '/compile.internal.gc/a\
        "cmd/compile/internal/ir",
' buildtool.go
gofmt -w buildtool.go

: # update cmd/compile TestFormats
cd ../..
go install std cmd
cd cmd/compile
go test -u || go test  # first one updates but fails; second passes

Change-Id: I5f7caf6b20629b51970279e81231a3574d5b51db
Reviewed-on: https://go-review.googlesource.com/c/go/+/273008
Trust: Russ Cox <rsc@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Russ Cox 2020-11-19 21:09:22 -05:00
parent 331b8b4797
commit 84e2bd611f
65 changed files with 6626 additions and 6602 deletions

View file

@ -22,14 +22,14 @@ package main_test
var knownFormats = map[string]string{
"*bytes.Buffer %s": "",
"*cmd/compile/internal/gc.EscLocation %v": "",
"*cmd/compile/internal/gc.Node %#v": "",
"*cmd/compile/internal/gc.Node %+S": "",
"*cmd/compile/internal/gc.Node %+v": "",
"*cmd/compile/internal/gc.Node %L": "",
"*cmd/compile/internal/gc.Node %S": "",
"*cmd/compile/internal/gc.Node %j": "",
"*cmd/compile/internal/gc.Node %p": "",
"*cmd/compile/internal/gc.Node %v": "",
"*cmd/compile/internal/ir.Node %#v": "",
"*cmd/compile/internal/ir.Node %+S": "",
"*cmd/compile/internal/ir.Node %+v": "",
"*cmd/compile/internal/ir.Node %L": "",
"*cmd/compile/internal/ir.Node %S": "",
"*cmd/compile/internal/ir.Node %j": "",
"*cmd/compile/internal/ir.Node %p": "",
"*cmd/compile/internal/ir.Node %v": "",
"*cmd/compile/internal/ssa.Block %s": "",
"*cmd/compile/internal/ssa.Block %v": "",
"*cmd/compile/internal/ssa.Func %s": "",
@ -78,18 +78,18 @@ var knownFormats = map[string]string{
"byte %q": "",
"byte %v": "",
"cmd/compile/internal/arm.shift %d": "",
"cmd/compile/internal/gc.Class %d": "",
"cmd/compile/internal/gc.Class %s": "",
"cmd/compile/internal/gc.Class %v": "",
"cmd/compile/internal/gc.Nodes %#v": "",
"cmd/compile/internal/gc.Nodes %+v": "",
"cmd/compile/internal/gc.Nodes %.v": "",
"cmd/compile/internal/gc.Nodes %v": "",
"cmd/compile/internal/gc.Op %#v": "",
"cmd/compile/internal/gc.Op %v": "",
"cmd/compile/internal/gc.fmtMode %d": "",
"cmd/compile/internal/gc.initKind %d": "",
"cmd/compile/internal/gc.itag %v": "",
"cmd/compile/internal/ir.Class %d": "",
"cmd/compile/internal/ir.Class %s": "",
"cmd/compile/internal/ir.Class %v": "",
"cmd/compile/internal/ir.FmtMode %d": "",
"cmd/compile/internal/ir.Nodes %#v": "",
"cmd/compile/internal/ir.Nodes %+v": "",
"cmd/compile/internal/ir.Nodes %.v": "",
"cmd/compile/internal/ir.Nodes %v": "",
"cmd/compile/internal/ir.Op %#v": "",
"cmd/compile/internal/ir.Op %v": "",
"cmd/compile/internal/ssa.BranchPrediction %d": "",
"cmd/compile/internal/ssa.Edge %v": "",
"cmd/compile/internal/ssa.GCNode %v": "",
@ -162,8 +162,8 @@ var knownFormats = map[string]string{
"interface{} %q": "",
"interface{} %s": "",
"interface{} %v": "",
"map[*cmd/compile/internal/gc.Node]*cmd/compile/internal/ssa.Value %v": "",
"map[*cmd/compile/internal/gc.Node][]*cmd/compile/internal/gc.Node %v": "",
"map[*cmd/compile/internal/ir.Node]*cmd/compile/internal/ssa.Value %v": "",
"map[*cmd/compile/internal/ir.Node][]*cmd/compile/internal/ir.Node %v": "",
"map[cmd/compile/internal/ssa.ID]uint32 %v": "",
"map[int64]uint32 %v": "",
"math/big.Accuracy %s": "",

View file

@ -11,6 +11,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
@ -545,7 +546,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym:
wantreg = "SB"
gc.AddAux(&p.From, v)
case *gc.Node:
case *ir.Node:
wantreg = "SP"
gc.AddAux(&p.From, v)
case nil:

View file

@ -9,6 +9,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
@ -395,7 +396,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym:
wantreg = "SB"
gc.AddAux(&p.From, v)
case *gc.Node:
case *ir.Node:
wantreg = "SP"
gc.AddAux(&p.From, v)
case nil:

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/obj"
"fmt"
@ -70,11 +71,11 @@ func EqCanPanic(t *types.Type) bool {
switch t.Etype {
default:
return false
case TINTER:
case types.TINTER:
return true
case TARRAY:
case types.TARRAY:
return EqCanPanic(t.Elem())
case TSTRUCT:
case types.TSTRUCT:
for _, f := range t.FieldSlice() {
if !f.Sym.IsBlank() && EqCanPanic(f.Type) {
return true
@ -120,45 +121,45 @@ func algtype1(t *types.Type) (AlgKind, *types.Type) {
}
switch t.Etype {
case TANY, TFORW:
case types.TANY, types.TFORW:
// will be defined later.
return ANOEQ, t
case TINT8, TUINT8, TINT16, TUINT16,
TINT32, TUINT32, TINT64, TUINT64,
TINT, TUINT, TUINTPTR,
TBOOL, TPTR,
TCHAN, TUNSAFEPTR:
case types.TINT8, types.TUINT8, types.TINT16, types.TUINT16,
types.TINT32, types.TUINT32, types.TINT64, types.TUINT64,
types.TINT, types.TUINT, types.TUINTPTR,
types.TBOOL, types.TPTR,
types.TCHAN, types.TUNSAFEPTR:
return AMEM, nil
case TFUNC, TMAP:
case types.TFUNC, types.TMAP:
return ANOEQ, t
case TFLOAT32:
case types.TFLOAT32:
return AFLOAT32, nil
case TFLOAT64:
case types.TFLOAT64:
return AFLOAT64, nil
case TCOMPLEX64:
case types.TCOMPLEX64:
return ACPLX64, nil
case TCOMPLEX128:
case types.TCOMPLEX128:
return ACPLX128, nil
case TSTRING:
case types.TSTRING:
return ASTRING, nil
case TINTER:
case types.TINTER:
if t.IsEmptyInterface() {
return ANILINTER, nil
}
return AINTER, nil
case TSLICE:
case types.TSLICE:
return ANOEQ, t
case TARRAY:
case types.TARRAY:
a, bad := algtype1(t.Elem())
switch a {
case AMEM:
@ -178,7 +179,7 @@ func algtype1(t *types.Type) (AlgKind, *types.Type) {
return ASPECIAL, nil
case TSTRUCT:
case types.TSTRUCT:
fields := t.FieldSlice()
// One-field struct is same as that one field alone.
@ -288,19 +289,19 @@ func genhash(t *types.Type) *obj.LSym {
}
base.Pos = autogeneratedPos // less confusing than end of input
dclcontext = PEXTERN
dclcontext = ir.PEXTERN
// func sym(p *T, h uintptr) uintptr
tfn := nod(OTFUNC, nil, nil)
tfn := ir.Nod(ir.OTFUNC, nil, nil)
tfn.List.Set2(
namedfield("p", types.NewPtr(t)),
namedfield("h", types.Types[TUINTPTR]),
namedfield("h", types.Types[types.TUINTPTR]),
)
tfn.Rlist.Set1(anonfield(types.Types[TUINTPTR]))
tfn.Rlist.Set1(anonfield(types.Types[types.TUINTPTR]))
fn := dclfunc(sym, tfn)
np := asNode(tfn.Type.Params().Field(0).Nname)
nh := asNode(tfn.Type.Params().Field(1).Nname)
np := ir.AsNode(tfn.Type.Params().Field(0).Nname)
nh := ir.AsNode(tfn.Type.Params().Field(1).Nname)
switch t.Etype {
case types.TARRAY:
@ -309,23 +310,23 @@ func genhash(t *types.Type) *obj.LSym {
// pure memory.
hashel := hashfor(t.Elem())
n := nod(ORANGE, nil, nod(ODEREF, np, nil))
ni := newname(lookup("i"))
ni.Type = types.Types[TINT]
n := ir.Nod(ir.ORANGE, nil, ir.Nod(ir.ODEREF, np, nil))
ni := NewName(lookup("i"))
ni.Type = types.Types[types.TINT]
n.List.Set1(ni)
n.SetColas(true)
colasdefn(n.List.Slice(), n)
ni = n.List.First()
// h = hashel(&p[i], h)
call := nod(OCALL, hashel, nil)
call := ir.Nod(ir.OCALL, hashel, nil)
nx := nod(OINDEX, np, ni)
nx := ir.Nod(ir.OINDEX, np, ni)
nx.SetBounded(true)
na := nod(OADDR, nx, nil)
na := ir.Nod(ir.OADDR, nx, nil)
call.List.Append(na)
call.List.Append(nh)
n.Nbody.Append(nod(OAS, nh, call))
n.Nbody.Append(ir.Nod(ir.OAS, nh, call))
fn.Nbody.Append(n)
@ -344,12 +345,12 @@ func genhash(t *types.Type) *obj.LSym {
// Hash non-memory fields with appropriate hash function.
if !IsRegularMemory(f.Type) {
hashel := hashfor(f.Type)
call := nod(OCALL, hashel, nil)
nx := nodSym(OXDOT, np, f.Sym) // TODO: fields from other packages?
na := nod(OADDR, nx, nil)
call := ir.Nod(ir.OCALL, hashel, nil)
nx := nodSym(ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := ir.Nod(ir.OADDR, nx, nil)
call.List.Append(na)
call.List.Append(nh)
fn.Nbody.Append(nod(OAS, nh, call))
fn.Nbody.Append(ir.Nod(ir.OAS, nh, call))
i++
continue
}
@ -359,24 +360,24 @@ func genhash(t *types.Type) *obj.LSym {
// h = hashel(&p.first, size, h)
hashel := hashmem(f.Type)
call := nod(OCALL, hashel, nil)
nx := nodSym(OXDOT, np, f.Sym) // TODO: fields from other packages?
na := nod(OADDR, nx, nil)
call := ir.Nod(ir.OCALL, hashel, nil)
nx := nodSym(ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := ir.Nod(ir.OADDR, nx, nil)
call.List.Append(na)
call.List.Append(nh)
call.List.Append(nodintconst(size))
fn.Nbody.Append(nod(OAS, nh, call))
fn.Nbody.Append(ir.Nod(ir.OAS, nh, call))
i = next
}
}
r := nod(ORETURN, nil, nil)
r := ir.Nod(ir.ORETURN, nil, nil)
r.List.Append(nh)
fn.Nbody.Append(r)
if base.Flag.LowerR != 0 {
dumplist("genhash body", fn.Nbody)
ir.DumpList("genhash body", fn.Nbody)
}
funcbody()
@ -403,7 +404,7 @@ func genhash(t *types.Type) *obj.LSym {
return closure
}
func hashfor(t *types.Type) *Node {
func hashfor(t *types.Type) *ir.Node {
var sym *types.Sym
switch a, _ := algtype1(t); a {
@ -429,13 +430,13 @@ func hashfor(t *types.Type) *Node {
sym = typesymprefix(".hash", t)
}
n := newname(sym)
n := NewName(sym)
setNodeNameFunc(n)
n.Type = functype(nil, []*Node{
n.Type = functype(nil, []*ir.Node{
anonfield(types.NewPtr(t)),
anonfield(types.Types[TUINTPTR]),
}, []*Node{
anonfield(types.Types[TUINTPTR]),
anonfield(types.Types[types.TUINTPTR]),
}, []*ir.Node{
anonfield(types.Types[types.TUINTPTR]),
})
return n
}
@ -517,20 +518,20 @@ func geneq(t *types.Type) *obj.LSym {
// Autogenerate code for equality of structs and arrays.
base.Pos = autogeneratedPos // less confusing than end of input
dclcontext = PEXTERN
dclcontext = ir.PEXTERN
// func sym(p, q *T) bool
tfn := nod(OTFUNC, nil, nil)
tfn := ir.Nod(ir.OTFUNC, nil, nil)
tfn.List.Set2(
namedfield("p", types.NewPtr(t)),
namedfield("q", types.NewPtr(t)),
)
tfn.Rlist.Set1(namedfield("r", types.Types[TBOOL]))
tfn.Rlist.Set1(namedfield("r", types.Types[types.TBOOL]))
fn := dclfunc(sym, tfn)
np := asNode(tfn.Type.Params().Field(0).Nname)
nq := asNode(tfn.Type.Params().Field(1).Nname)
nr := asNode(tfn.Type.Results().Field(0).Nname)
np := ir.AsNode(tfn.Type.Params().Field(0).Nname)
nq := ir.AsNode(tfn.Type.Params().Field(1).Nname)
nr := ir.AsNode(tfn.Type.Results().Field(0).Nname)
// Label to jump to if an equality test fails.
neq := autolabel(".neq")
@ -542,7 +543,7 @@ func geneq(t *types.Type) *obj.LSym {
default:
base.Fatalf("geneq %v", t)
case TARRAY:
case types.TARRAY:
nelem := t.NumElem()
// checkAll generates code to check the equality of all array elements.
@ -566,15 +567,15 @@ func geneq(t *types.Type) *obj.LSym {
//
// TODO(josharian): consider doing some loop unrolling
// for larger nelem as well, processing a few elements at a time in a loop.
checkAll := func(unroll int64, last bool, eq func(pi, qi *Node) *Node) {
checkAll := func(unroll int64, last bool, eq func(pi, qi *ir.Node) *ir.Node) {
// checkIdx generates a node to check for equality at index i.
checkIdx := func(i *Node) *Node {
checkIdx := func(i *ir.Node) *ir.Node {
// pi := p[i]
pi := nod(OINDEX, np, i)
pi := ir.Nod(ir.OINDEX, np, i)
pi.SetBounded(true)
pi.Type = t.Elem()
// qi := q[i]
qi := nod(OINDEX, nq, i)
qi := ir.Nod(ir.OINDEX, nq, i)
qi.SetBounded(true)
qi.Type = t.Elem()
return eq(pi, qi)
@ -588,68 +589,68 @@ func geneq(t *types.Type) *obj.LSym {
// Generate a series of checks.
for i := int64(0); i < nelem; i++ {
// if check {} else { goto neq }
nif := nod(OIF, checkIdx(nodintconst(i)), nil)
nif.Rlist.Append(nodSym(OGOTO, nil, neq))
nif := ir.Nod(ir.OIF, checkIdx(nodintconst(i)), nil)
nif.Rlist.Append(nodSym(ir.OGOTO, nil, neq))
fn.Nbody.Append(nif)
}
if last {
fn.Nbody.Append(nod(OAS, nr, checkIdx(nodintconst(nelem))))
fn.Nbody.Append(ir.Nod(ir.OAS, nr, checkIdx(nodintconst(nelem))))
}
} else {
// Generate a for loop.
// for i := 0; i < nelem; i++
i := temp(types.Types[TINT])
init := nod(OAS, i, nodintconst(0))
cond := nod(OLT, i, nodintconst(nelem))
post := nod(OAS, i, nod(OADD, i, nodintconst(1)))
loop := nod(OFOR, cond, post)
i := temp(types.Types[types.TINT])
init := ir.Nod(ir.OAS, i, nodintconst(0))
cond := ir.Nod(ir.OLT, i, nodintconst(nelem))
post := ir.Nod(ir.OAS, i, ir.Nod(ir.OADD, i, nodintconst(1)))
loop := ir.Nod(ir.OFOR, cond, post)
loop.Ninit.Append(init)
// if eq(pi, qi) {} else { goto neq }
nif := nod(OIF, checkIdx(i), nil)
nif.Rlist.Append(nodSym(OGOTO, nil, neq))
nif := ir.Nod(ir.OIF, checkIdx(i), nil)
nif.Rlist.Append(nodSym(ir.OGOTO, nil, neq))
loop.Nbody.Append(nif)
fn.Nbody.Append(loop)
if last {
fn.Nbody.Append(nod(OAS, nr, nodbool(true)))
fn.Nbody.Append(ir.Nod(ir.OAS, nr, nodbool(true)))
}
}
}
switch t.Elem().Etype {
case TSTRING:
case types.TSTRING:
// Do two loops. First, check that all the lengths match (cheap).
// Second, check that all the contents match (expensive).
// TODO: when the array size is small, unroll the length match checks.
checkAll(3, false, func(pi, qi *Node) *Node {
checkAll(3, false, func(pi, qi *ir.Node) *ir.Node {
// Compare lengths.
eqlen, _ := eqstring(pi, qi)
return eqlen
})
checkAll(1, true, func(pi, qi *Node) *Node {
checkAll(1, true, func(pi, qi *ir.Node) *ir.Node {
// Compare contents.
_, eqmem := eqstring(pi, qi)
return eqmem
})
case TFLOAT32, TFLOAT64:
checkAll(2, true, func(pi, qi *Node) *Node {
case types.TFLOAT32, types.TFLOAT64:
checkAll(2, true, func(pi, qi *ir.Node) *ir.Node {
// p[i] == q[i]
return nod(OEQ, pi, qi)
return ir.Nod(ir.OEQ, pi, qi)
})
// TODO: pick apart structs, do them piecemeal too
default:
checkAll(1, true, func(pi, qi *Node) *Node {
checkAll(1, true, func(pi, qi *ir.Node) *ir.Node {
// p[i] == q[i]
return nod(OEQ, pi, qi)
return ir.Nod(ir.OEQ, pi, qi)
})
}
case TSTRUCT:
case types.TSTRUCT:
// Build a list of conditions to satisfy.
// The conditions are a list-of-lists. Conditions are reorderable
// within each inner list. The outer lists must be evaluated in order.
var conds [][]*Node
conds = append(conds, []*Node{})
and := func(n *Node) {
var conds [][]*ir.Node
conds = append(conds, []*ir.Node{})
and := func(n *ir.Node) {
i := len(conds) - 1
conds[i] = append(conds[i], n)
}
@ -669,21 +670,21 @@ func geneq(t *types.Type) *obj.LSym {
if !IsRegularMemory(f.Type) {
if EqCanPanic(f.Type) {
// Enforce ordering by starting a new set of reorderable conditions.
conds = append(conds, []*Node{})
conds = append(conds, []*ir.Node{})
}
p := nodSym(OXDOT, np, f.Sym)
q := nodSym(OXDOT, nq, f.Sym)
p := nodSym(ir.OXDOT, np, f.Sym)
q := nodSym(ir.OXDOT, nq, f.Sym)
switch {
case f.Type.IsString():
eqlen, eqmem := eqstring(p, q)
and(eqlen)
and(eqmem)
default:
and(nod(OEQ, p, q))
and(ir.Nod(ir.OEQ, p, q))
}
if EqCanPanic(f.Type) {
// Also enforce ordering after something that can panic.
conds = append(conds, []*Node{})
conds = append(conds, []*ir.Node{})
}
i++
continue
@ -708,10 +709,10 @@ func geneq(t *types.Type) *obj.LSym {
// Sort conditions to put runtime calls last.
// Preserve the rest of the ordering.
var flatConds []*Node
var flatConds []*ir.Node
for _, c := range conds {
isCall := func(n *Node) bool {
return n.Op == OCALL || n.Op == OCALLFUNC
isCall := func(n *ir.Node) bool {
return n.Op == ir.OCALL || n.Op == ir.OCALLFUNC
}
sort.SliceStable(c, func(i, j int) bool {
return !isCall(c[i]) && isCall(c[j])
@ -720,42 +721,42 @@ func geneq(t *types.Type) *obj.LSym {
}
if len(flatConds) == 0 {
fn.Nbody.Append(nod(OAS, nr, nodbool(true)))
fn.Nbody.Append(ir.Nod(ir.OAS, nr, nodbool(true)))
} else {
for _, c := range flatConds[:len(flatConds)-1] {
// if cond {} else { goto neq }
n := nod(OIF, c, nil)
n.Rlist.Append(nodSym(OGOTO, nil, neq))
n := ir.Nod(ir.OIF, c, nil)
n.Rlist.Append(nodSym(ir.OGOTO, nil, neq))
fn.Nbody.Append(n)
}
fn.Nbody.Append(nod(OAS, nr, flatConds[len(flatConds)-1]))
fn.Nbody.Append(ir.Nod(ir.OAS, nr, flatConds[len(flatConds)-1]))
}
}
// ret:
// return
ret := autolabel(".ret")
fn.Nbody.Append(nodSym(OLABEL, nil, ret))
fn.Nbody.Append(nod(ORETURN, nil, nil))
fn.Nbody.Append(nodSym(ir.OLABEL, nil, ret))
fn.Nbody.Append(ir.Nod(ir.ORETURN, nil, nil))
// neq:
// r = false
// return (or goto ret)
fn.Nbody.Append(nodSym(OLABEL, nil, neq))
fn.Nbody.Append(nod(OAS, nr, nodbool(false)))
fn.Nbody.Append(nodSym(ir.OLABEL, nil, neq))
fn.Nbody.Append(ir.Nod(ir.OAS, nr, nodbool(false)))
if EqCanPanic(t) || hasCall(fn) {
// Epilogue is large, so share it with the equal case.
fn.Nbody.Append(nodSym(OGOTO, nil, ret))
fn.Nbody.Append(nodSym(ir.OGOTO, nil, ret))
} else {
// Epilogue is small, so don't bother sharing.
fn.Nbody.Append(nod(ORETURN, nil, nil))
fn.Nbody.Append(ir.Nod(ir.ORETURN, nil, nil))
}
// TODO(khr): the epilogue size detection condition above isn't perfect.
// We should really do a generic CL that shares epilogues across
// the board. See #24936.
if base.Flag.LowerR != 0 {
dumplist("geneq body", fn.Nbody)
ir.DumpList("geneq body", fn.Nbody)
}
funcbody()
@ -784,8 +785,8 @@ func geneq(t *types.Type) *obj.LSym {
return closure
}
func hasCall(n *Node) bool {
if n.Op == OCALL || n.Op == OCALLFUNC {
func hasCall(n *ir.Node) bool {
if n.Op == ir.OCALL || n.Op == ir.OCALLFUNC {
return true
}
if n.Left != nil && hasCall(n.Left) {
@ -819,10 +820,10 @@ func hasCall(n *Node) bool {
// eqfield returns the node
// p.field == q.field
func eqfield(p *Node, q *Node, field *types.Sym) *Node {
nx := nodSym(OXDOT, p, field)
ny := nodSym(OXDOT, q, field)
ne := nod(OEQ, nx, ny)
func eqfield(p *ir.Node, q *ir.Node, field *types.Sym) *ir.Node {
nx := nodSym(ir.OXDOT, p, field)
ny := nodSym(ir.OXDOT, q, field)
ne := ir.Nod(ir.OEQ, nx, ny)
return ne
}
@ -832,23 +833,23 @@ func eqfield(p *Node, q *Node, field *types.Sym) *Node {
// memequal(s.ptr, t.ptr, len(s))
// which can be used to construct string equality comparison.
// eqlen must be evaluated before eqmem, and shortcircuiting is required.
func eqstring(s, t *Node) (eqlen, eqmem *Node) {
s = conv(s, types.Types[TSTRING])
t = conv(t, types.Types[TSTRING])
sptr := nod(OSPTR, s, nil)
tptr := nod(OSPTR, t, nil)
slen := conv(nod(OLEN, s, nil), types.Types[TUINTPTR])
tlen := conv(nod(OLEN, t, nil), types.Types[TUINTPTR])
func eqstring(s, t *ir.Node) (eqlen, eqmem *ir.Node) {
s = conv(s, types.Types[types.TSTRING])
t = conv(t, types.Types[types.TSTRING])
sptr := ir.Nod(ir.OSPTR, s, nil)
tptr := ir.Nod(ir.OSPTR, t, nil)
slen := conv(ir.Nod(ir.OLEN, s, nil), types.Types[types.TUINTPTR])
tlen := conv(ir.Nod(ir.OLEN, t, nil), types.Types[types.TUINTPTR])
fn := syslook("memequal")
fn = substArgTypes(fn, types.Types[TUINT8], types.Types[TUINT8])
call := nod(OCALL, fn, nil)
call.List.Append(sptr, tptr, slen.copy())
fn = substArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
call := ir.Nod(ir.OCALL, fn, nil)
call.List.Append(sptr, tptr, ir.Copy(slen))
call = typecheck(call, ctxExpr|ctxMultiOK)
cmp := nod(OEQ, slen, tlen)
cmp := ir.Nod(ir.OEQ, slen, tlen)
cmp = typecheck(cmp, ctxExpr)
cmp.Type = types.Types[TBOOL]
cmp.Type = types.Types[types.TBOOL]
return cmp, call
}
@ -858,48 +859,48 @@ func eqstring(s, t *Node) (eqlen, eqmem *Node) {
// ifaceeq(s.tab, s.data, t.data) (or efaceeq(s.typ, s.data, t.data), as appropriate)
// which can be used to construct interface equality comparison.
// eqtab must be evaluated before eqdata, and shortcircuiting is required.
func eqinterface(s, t *Node) (eqtab, eqdata *Node) {
func eqinterface(s, t *ir.Node) (eqtab, eqdata *ir.Node) {
if !types.Identical(s.Type, t.Type) {
base.Fatalf("eqinterface %v %v", s.Type, t.Type)
}
// func ifaceeq(tab *uintptr, x, y unsafe.Pointer) (ret bool)
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
var fn *Node
var fn *ir.Node
if s.Type.IsEmptyInterface() {
fn = syslook("efaceeq")
} else {
fn = syslook("ifaceeq")
}
stab := nod(OITAB, s, nil)
ttab := nod(OITAB, t, nil)
sdata := nod(OIDATA, s, nil)
tdata := nod(OIDATA, t, nil)
sdata.Type = types.Types[TUNSAFEPTR]
tdata.Type = types.Types[TUNSAFEPTR]
stab := ir.Nod(ir.OITAB, s, nil)
ttab := ir.Nod(ir.OITAB, t, nil)
sdata := ir.Nod(ir.OIDATA, s, nil)
tdata := ir.Nod(ir.OIDATA, t, nil)
sdata.Type = types.Types[types.TUNSAFEPTR]
tdata.Type = types.Types[types.TUNSAFEPTR]
sdata.SetTypecheck(1)
tdata.SetTypecheck(1)
call := nod(OCALL, fn, nil)
call := ir.Nod(ir.OCALL, fn, nil)
call.List.Append(stab, sdata, tdata)
call = typecheck(call, ctxExpr|ctxMultiOK)
cmp := nod(OEQ, stab, ttab)
cmp := ir.Nod(ir.OEQ, stab, ttab)
cmp = typecheck(cmp, ctxExpr)
cmp.Type = types.Types[TBOOL]
cmp.Type = types.Types[types.TBOOL]
return cmp, call
}
// eqmem returns the node
// memequal(&p.field, &q.field [, size])
func eqmem(p *Node, q *Node, field *types.Sym, size int64) *Node {
nx := nod(OADDR, nodSym(OXDOT, p, field), nil)
ny := nod(OADDR, nodSym(OXDOT, q, field), nil)
func eqmem(p *ir.Node, q *ir.Node, field *types.Sym, size int64) *ir.Node {
nx := ir.Nod(ir.OADDR, nodSym(ir.OXDOT, p, field), nil)
ny := ir.Nod(ir.OADDR, nodSym(ir.OXDOT, q, field), nil)
nx = typecheck(nx, ctxExpr)
ny = typecheck(ny, ctxExpr)
fn, needsize := eqmemfunc(size, nx.Type.Elem())
call := nod(OCALL, fn, nil)
call := ir.Nod(ir.OCALL, fn, nil)
call.List.Append(nx)
call.List.Append(ny)
if needsize {
@ -909,7 +910,7 @@ func eqmem(p *Node, q *Node, field *types.Sym, size int64) *Node {
return call
}
func eqmemfunc(size int64, t *types.Type) (fn *Node, needsize bool) {
func eqmemfunc(size int64, t *types.Type) (fn *ir.Node, needsize bool) {
switch size {
default:
fn = syslook("memequal")

View file

@ -7,6 +7,7 @@ package gc
import (
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"fmt"
"sort"
@ -117,7 +118,7 @@ func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
o = Rnd(o, int64(f.Type.Align))
}
f.Offset = o
if n := asNode(f.Nname); n != nil {
if n := ir.AsNode(f.Nname); n != nil {
// addrescapes has similar code to update these offsets.
// Usually addrescapes runs after widstruct,
// in which case we could drop this,
@ -197,7 +198,7 @@ func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
}
*path = append(*path, t)
if p := asNode(t.Nod).Name.Param; p != nil && findTypeLoop(p.Ntype.Type, path) {
if p := ir.AsNode(t.Nod).Name.Param; p != nil && findTypeLoop(p.Ntype.Type, path) {
return true
}
*path = (*path)[:len(*path)-1]
@ -205,17 +206,17 @@ func findTypeLoop(t *types.Type, path *[]*types.Type) bool {
// Anonymous type. Recurse on contained types.
switch t.Etype {
case TARRAY:
case types.TARRAY:
if findTypeLoop(t.Elem(), path) {
return true
}
case TSTRUCT:
case types.TSTRUCT:
for _, f := range t.Fields().Slice() {
if findTypeLoop(f.Type, path) {
return true
}
}
case TINTER:
case types.TINTER:
for _, m := range t.Methods().Slice() {
if m.Type.IsInterface() { // embedded interface
if findTypeLoop(m.Type, path) {
@ -306,8 +307,8 @@ func dowidth(t *types.Type) {
defercheckwidth()
lno := base.Pos
if asNode(t.Nod) != nil {
base.Pos = asNode(t.Nod).Pos
if ir.AsNode(t.Nod) != nil {
base.Pos = ir.AsNode(t.Nod).Pos
}
t.Width = -2
@ -315,7 +316,7 @@ func dowidth(t *types.Type) {
et := t.Etype
switch et {
case TFUNC, TCHAN, TMAP, TSTRING:
case types.TFUNC, types.TCHAN, types.TMAP, types.TSTRING:
break
// simtype == 0 during bootstrap
@ -331,41 +332,41 @@ func dowidth(t *types.Type) {
base.Fatalf("dowidth: unknown type: %v", t)
// compiler-specific stuff
case TINT8, TUINT8, TBOOL:
case types.TINT8, types.TUINT8, types.TBOOL:
// bool is int8
w = 1
case TINT16, TUINT16:
case types.TINT16, types.TUINT16:
w = 2
case TINT32, TUINT32, TFLOAT32:
case types.TINT32, types.TUINT32, types.TFLOAT32:
w = 4
case TINT64, TUINT64, TFLOAT64:
case types.TINT64, types.TUINT64, types.TFLOAT64:
w = 8
t.Align = uint8(Widthreg)
case TCOMPLEX64:
case types.TCOMPLEX64:
w = 8
t.Align = 4
case TCOMPLEX128:
case types.TCOMPLEX128:
w = 16
t.Align = uint8(Widthreg)
case TPTR:
case types.TPTR:
w = int64(Widthptr)
checkwidth(t.Elem())
case TUNSAFEPTR:
case types.TUNSAFEPTR:
w = int64(Widthptr)
case TINTER: // implemented as 2 pointers
case types.TINTER: // implemented as 2 pointers
w = 2 * int64(Widthptr)
t.Align = uint8(Widthptr)
expandiface(t)
case TCHAN: // implemented as pointer
case types.TCHAN: // implemented as pointer
w = int64(Widthptr)
checkwidth(t.Elem())
@ -375,7 +376,7 @@ func dowidth(t *types.Type) {
t1 := types.NewChanArgs(t)
checkwidth(t1)
case TCHANARGS:
case types.TCHANARGS:
t1 := t.ChanArgs()
dowidth(t1) // just in case
if t1.Elem().Width >= 1<<16 {
@ -383,27 +384,27 @@ func dowidth(t *types.Type) {
}
w = 1 // anything will do
case TMAP: // implemented as pointer
case types.TMAP: // implemented as pointer
w = int64(Widthptr)
checkwidth(t.Elem())
checkwidth(t.Key())
case TFORW: // should have been filled in
case types.TFORW: // should have been filled in
reportTypeLoop(t)
w = 1 // anything will do
case TANY:
case types.TANY:
// not a real type; should be replaced before use.
base.Fatalf("dowidth any")
case TSTRING:
case types.TSTRING:
if sizeofString == 0 {
base.Fatalf("early dowidth string")
}
w = sizeofString
t.Align = uint8(Widthptr)
case TARRAY:
case types.TARRAY:
if t.Elem() == nil {
break
}
@ -418,7 +419,7 @@ func dowidth(t *types.Type) {
w = t.NumElem() * t.Elem().Width
t.Align = t.Elem().Align
case TSLICE:
case types.TSLICE:
if t.Elem() == nil {
break
}
@ -426,7 +427,7 @@ func dowidth(t *types.Type) {
checkwidth(t.Elem())
t.Align = uint8(Widthptr)
case TSTRUCT:
case types.TSTRUCT:
if t.IsFuncArgStruct() {
base.Fatalf("dowidth fn struct %v", t)
}
@ -434,14 +435,14 @@ func dowidth(t *types.Type) {
// make fake type to check later to
// trigger function argument computation.
case TFUNC:
case types.TFUNC:
t1 := types.NewFuncArgs(t)
checkwidth(t1)
w = int64(Widthptr) // width of func type is pointer
// function is 3 cated structures;
// compute their widths as side-effect.
case TFUNCARGS:
case types.TFUNCARGS:
t1 := t.FuncArgs()
w = widstruct(t1, t1.Recvs(), 0, 0)
w = widstruct(t1, t1.Params(), w, Widthreg)

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
)
@ -13,8 +14,8 @@ type exporter struct {
}
// markObject visits a reachable object.
func (p *exporter) markObject(n *Node) {
if n.Op == ONAME && n.Class() == PFUNC {
func (p *exporter) markObject(n *ir.Node) {
if n.Op == ir.ONAME && n.Class() == ir.PFUNC {
inlFlood(n)
}
@ -34,10 +35,10 @@ func (p *exporter) markType(t *types.Type) {
// only their unexpanded method set (i.e., exclusive of
// interface embeddings), and the switch statement below
// handles their full method set.
if t.Sym != nil && t.Etype != TINTER {
if t.Sym != nil && t.Etype != types.TINTER {
for _, m := range t.Methods().Slice() {
if types.IsExported(m.Sym.Name) {
p.markObject(asNode(m.Nname))
p.markObject(ir.AsNode(m.Nname))
}
}
}
@ -52,31 +53,31 @@ func (p *exporter) markType(t *types.Type) {
// the user already needs some way to construct values of
// those types.
switch t.Etype {
case TPTR, TARRAY, TSLICE:
case types.TPTR, types.TARRAY, types.TSLICE:
p.markType(t.Elem())
case TCHAN:
case types.TCHAN:
if t.ChanDir().CanRecv() {
p.markType(t.Elem())
}
case TMAP:
case types.TMAP:
p.markType(t.Key())
p.markType(t.Elem())
case TSTRUCT:
case types.TSTRUCT:
for _, f := range t.FieldSlice() {
if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
p.markType(f.Type)
}
}
case TFUNC:
case types.TFUNC:
for _, f := range t.Results().FieldSlice() {
p.markType(f.Type)
}
case TINTER:
case types.TINTER:
for _, f := range t.FieldSlice() {
if types.IsExported(f.Sym.Name) {
p.markType(f.Type)
@ -133,23 +134,23 @@ func predeclared() []*types.Type {
// elements have been initialized before
predecl = []*types.Type{
// basic types
types.Types[TBOOL],
types.Types[TINT],
types.Types[TINT8],
types.Types[TINT16],
types.Types[TINT32],
types.Types[TINT64],
types.Types[TUINT],
types.Types[TUINT8],
types.Types[TUINT16],
types.Types[TUINT32],
types.Types[TUINT64],
types.Types[TUINTPTR],
types.Types[TFLOAT32],
types.Types[TFLOAT64],
types.Types[TCOMPLEX64],
types.Types[TCOMPLEX128],
types.Types[TSTRING],
types.Types[types.TBOOL],
types.Types[types.TINT],
types.Types[types.TINT8],
types.Types[types.TINT16],
types.Types[types.TINT32],
types.Types[types.TINT64],
types.Types[types.TUINT],
types.Types[types.TUINT8],
types.Types[types.TUINT16],
types.Types[types.TUINT32],
types.Types[types.TUINT64],
types.Types[types.TUINTPTR],
types.Types[types.TFLOAT32],
types.Types[types.TFLOAT64],
types.Types[types.TCOMPLEX64],
types.Types[types.TCOMPLEX128],
types.Types[types.TSTRING],
// basic type aliases
types.Bytetype,
@ -165,16 +166,16 @@ func predeclared() []*types.Type {
types.UntypedFloat,
types.UntypedComplex,
types.UntypedString,
types.Types[TNIL],
types.Types[types.TNIL],
// package unsafe
types.Types[TUNSAFEPTR],
types.Types[types.TUNSAFEPTR],
// invalid type (package contains errors)
types.Types[Txxx],
types.Types[types.Txxx],
// any type, for builtin export data
types.Types[TANY],
types.Types[types.TANY],
}
}
return predecl

View file

@ -5,20 +5,15 @@
package gc
import (
"cmd/compile/internal/ir"
"cmd/internal/src"
)
// numImport tracks how often a package with a given name is imported.
// It is used to provide a better error message (by using the package
// path to disambiguate) if a package that appears multiple times with
// the same name appears in an error message.
var numImport = make(map[string]int)
func npos(pos src.XPos, n *Node) *Node {
func npos(pos src.XPos, n *ir.Node) *ir.Node {
n.Pos = pos
return n
}
func builtinCall(op Op) *Node {
return nod(OCALL, mkname(builtinpkg.Lookup(goopnames[op])), nil)
func builtinCall(op ir.Op) *ir.Node {
return ir.Nod(ir.OCALL, mkname(ir.BuiltinPkg.Lookup(ir.OpNames[op])), nil)
}

View file

@ -2,7 +2,10 @@
package gc
import "cmd/compile/internal/types"
import (
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
)
var runtimeDecls = [...]struct {
name string
@ -205,134 +208,134 @@ func runtimeTypes() []*types.Type {
var typs [131]*types.Type
typs[0] = types.Bytetype
typs[1] = types.NewPtr(typs[0])
typs[2] = types.Types[TANY]
typs[2] = types.Types[types.TANY]
typs[3] = types.NewPtr(typs[2])
typs[4] = functype(nil, []*Node{anonfield(typs[1])}, []*Node{anonfield(typs[3])})
typs[5] = types.Types[TUINTPTR]
typs[6] = types.Types[TBOOL]
typs[7] = types.Types[TUNSAFEPTR]
typs[8] = functype(nil, []*Node{anonfield(typs[5]), anonfield(typs[1]), anonfield(typs[6])}, []*Node{anonfield(typs[7])})
typs[4] = functype(nil, []*ir.Node{anonfield(typs[1])}, []*ir.Node{anonfield(typs[3])})
typs[5] = types.Types[types.TUINTPTR]
typs[6] = types.Types[types.TBOOL]
typs[7] = types.Types[types.TUNSAFEPTR]
typs[8] = functype(nil, []*ir.Node{anonfield(typs[5]), anonfield(typs[1]), anonfield(typs[6])}, []*ir.Node{anonfield(typs[7])})
typs[9] = functype(nil, nil, nil)
typs[10] = types.Types[TINTER]
typs[11] = functype(nil, []*Node{anonfield(typs[10])}, nil)
typs[12] = types.Types[TINT32]
typs[10] = types.Types[types.TINTER]
typs[11] = functype(nil, []*ir.Node{anonfield(typs[10])}, nil)
typs[12] = types.Types[types.TINT32]
typs[13] = types.NewPtr(typs[12])
typs[14] = functype(nil, []*Node{anonfield(typs[13])}, []*Node{anonfield(typs[10])})
typs[15] = types.Types[TINT]
typs[16] = functype(nil, []*Node{anonfield(typs[15]), anonfield(typs[15])}, nil)
typs[17] = types.Types[TUINT]
typs[18] = functype(nil, []*Node{anonfield(typs[17]), anonfield(typs[15])}, nil)
typs[19] = functype(nil, []*Node{anonfield(typs[6])}, nil)
typs[20] = types.Types[TFLOAT64]
typs[21] = functype(nil, []*Node{anonfield(typs[20])}, nil)
typs[22] = types.Types[TINT64]
typs[23] = functype(nil, []*Node{anonfield(typs[22])}, nil)
typs[24] = types.Types[TUINT64]
typs[25] = functype(nil, []*Node{anonfield(typs[24])}, nil)
typs[26] = types.Types[TCOMPLEX128]
typs[27] = functype(nil, []*Node{anonfield(typs[26])}, nil)
typs[28] = types.Types[TSTRING]
typs[29] = functype(nil, []*Node{anonfield(typs[28])}, nil)
typs[30] = functype(nil, []*Node{anonfield(typs[2])}, nil)
typs[31] = functype(nil, []*Node{anonfield(typs[5])}, nil)
typs[14] = functype(nil, []*ir.Node{anonfield(typs[13])}, []*ir.Node{anonfield(typs[10])})
typs[15] = types.Types[types.TINT]
typs[16] = functype(nil, []*ir.Node{anonfield(typs[15]), anonfield(typs[15])}, nil)
typs[17] = types.Types[types.TUINT]
typs[18] = functype(nil, []*ir.Node{anonfield(typs[17]), anonfield(typs[15])}, nil)
typs[19] = functype(nil, []*ir.Node{anonfield(typs[6])}, nil)
typs[20] = types.Types[types.TFLOAT64]
typs[21] = functype(nil, []*ir.Node{anonfield(typs[20])}, nil)
typs[22] = types.Types[types.TINT64]
typs[23] = functype(nil, []*ir.Node{anonfield(typs[22])}, nil)
typs[24] = types.Types[types.TUINT64]
typs[25] = functype(nil, []*ir.Node{anonfield(typs[24])}, nil)
typs[26] = types.Types[types.TCOMPLEX128]
typs[27] = functype(nil, []*ir.Node{anonfield(typs[26])}, nil)
typs[28] = types.Types[types.TSTRING]
typs[29] = functype(nil, []*ir.Node{anonfield(typs[28])}, nil)
typs[30] = functype(nil, []*ir.Node{anonfield(typs[2])}, nil)
typs[31] = functype(nil, []*ir.Node{anonfield(typs[5])}, nil)
typs[32] = types.NewArray(typs[0], 32)
typs[33] = types.NewPtr(typs[32])
typs[34] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[28])})
typs[35] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[28])})
typs[36] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[28])})
typs[37] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[28])})
typs[34] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[28])})
typs[35] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[28])})
typs[36] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[28])})
typs[37] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[28])})
typs[38] = types.NewSlice(typs[28])
typs[39] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[38])}, []*Node{anonfield(typs[28])})
typs[40] = functype(nil, []*Node{anonfield(typs[28]), anonfield(typs[28])}, []*Node{anonfield(typs[15])})
typs[39] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[38])}, []*ir.Node{anonfield(typs[28])})
typs[40] = functype(nil, []*ir.Node{anonfield(typs[28]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[15])})
typs[41] = types.NewArray(typs[0], 4)
typs[42] = types.NewPtr(typs[41])
typs[43] = functype(nil, []*Node{anonfield(typs[42]), anonfield(typs[22])}, []*Node{anonfield(typs[28])})
typs[44] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[1]), anonfield(typs[15])}, []*Node{anonfield(typs[28])})
typs[45] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15])}, []*Node{anonfield(typs[28])})
typs[43] = functype(nil, []*ir.Node{anonfield(typs[42]), anonfield(typs[22])}, []*ir.Node{anonfield(typs[28])})
typs[44] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[1]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[28])})
typs[45] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[28])})
typs[46] = types.Runetype
typs[47] = types.NewSlice(typs[46])
typs[48] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[47])}, []*Node{anonfield(typs[28])})
typs[48] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[47])}, []*ir.Node{anonfield(typs[28])})
typs[49] = types.NewSlice(typs[0])
typs[50] = functype(nil, []*Node{anonfield(typs[33]), anonfield(typs[28])}, []*Node{anonfield(typs[49])})
typs[50] = functype(nil, []*ir.Node{anonfield(typs[33]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[49])})
typs[51] = types.NewArray(typs[46], 32)
typs[52] = types.NewPtr(typs[51])
typs[53] = functype(nil, []*Node{anonfield(typs[52]), anonfield(typs[28])}, []*Node{anonfield(typs[47])})
typs[54] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[5])}, []*Node{anonfield(typs[15])})
typs[55] = functype(nil, []*Node{anonfield(typs[28]), anonfield(typs[15])}, []*Node{anonfield(typs[46]), anonfield(typs[15])})
typs[56] = functype(nil, []*Node{anonfield(typs[28])}, []*Node{anonfield(typs[15])})
typs[57] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[2])}, []*Node{anonfield(typs[2])})
typs[58] = functype(nil, []*Node{anonfield(typs[2])}, []*Node{anonfield(typs[7])})
typs[59] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3])}, []*Node{anonfield(typs[2])})
typs[60] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[2])}, []*Node{anonfield(typs[2]), anonfield(typs[6])})
typs[61] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[1])}, nil)
typs[62] = functype(nil, []*Node{anonfield(typs[1])}, nil)
typs[53] = functype(nil, []*ir.Node{anonfield(typs[52]), anonfield(typs[28])}, []*ir.Node{anonfield(typs[47])})
typs[54] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[5])}, []*ir.Node{anonfield(typs[15])})
typs[55] = functype(nil, []*ir.Node{anonfield(typs[28]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[46]), anonfield(typs[15])})
typs[56] = functype(nil, []*ir.Node{anonfield(typs[28])}, []*ir.Node{anonfield(typs[15])})
typs[57] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[2])}, []*ir.Node{anonfield(typs[2])})
typs[58] = functype(nil, []*ir.Node{anonfield(typs[2])}, []*ir.Node{anonfield(typs[7])})
typs[59] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[2])})
typs[60] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[2])}, []*ir.Node{anonfield(typs[2]), anonfield(typs[6])})
typs[61] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[1])}, nil)
typs[62] = functype(nil, []*ir.Node{anonfield(typs[1])}, nil)
typs[63] = types.NewPtr(typs[5])
typs[64] = functype(nil, []*Node{anonfield(typs[63]), anonfield(typs[7]), anonfield(typs[7])}, []*Node{anonfield(typs[6])})
typs[65] = types.Types[TUINT32]
typs[66] = functype(nil, nil, []*Node{anonfield(typs[65])})
typs[64] = functype(nil, []*ir.Node{anonfield(typs[63]), anonfield(typs[7]), anonfield(typs[7])}, []*ir.Node{anonfield(typs[6])})
typs[65] = types.Types[types.TUINT32]
typs[66] = functype(nil, nil, []*ir.Node{anonfield(typs[65])})
typs[67] = types.NewMap(typs[2], typs[2])
typs[68] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[3])}, []*Node{anonfield(typs[67])})
typs[69] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[3])}, []*Node{anonfield(typs[67])})
typs[70] = functype(nil, nil, []*Node{anonfield(typs[67])})
typs[71] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []*Node{anonfield(typs[3])})
typs[72] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []*Node{anonfield(typs[3])})
typs[73] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []*Node{anonfield(typs[3])})
typs[74] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []*Node{anonfield(typs[3]), anonfield(typs[6])})
typs[75] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []*Node{anonfield(typs[3]), anonfield(typs[6])})
typs[76] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []*Node{anonfield(typs[3]), anonfield(typs[6])})
typs[77] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, nil)
typs[78] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, nil)
typs[79] = functype(nil, []*Node{anonfield(typs[3])}, nil)
typs[80] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[67])}, nil)
typs[68] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[67])})
typs[69] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[67])})
typs[70] = functype(nil, nil, []*ir.Node{anonfield(typs[67])})
typs[71] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[3])})
typs[72] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []*ir.Node{anonfield(typs[3])})
typs[73] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []*ir.Node{anonfield(typs[3])})
typs[74] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[3]), anonfield(typs[6])})
typs[75] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, []*ir.Node{anonfield(typs[3]), anonfield(typs[6])})
typs[76] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3]), anonfield(typs[1])}, []*ir.Node{anonfield(typs[3]), anonfield(typs[6])})
typs[77] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[3])}, nil)
typs[78] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67]), anonfield(typs[2])}, nil)
typs[79] = functype(nil, []*ir.Node{anonfield(typs[3])}, nil)
typs[80] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[67])}, nil)
typs[81] = types.NewChan(typs[2], types.Cboth)
typs[82] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[22])}, []*Node{anonfield(typs[81])})
typs[83] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15])}, []*Node{anonfield(typs[81])})
typs[82] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[22])}, []*ir.Node{anonfield(typs[81])})
typs[83] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[81])})
typs[84] = types.NewChan(typs[2], types.Crecv)
typs[85] = functype(nil, []*Node{anonfield(typs[84]), anonfield(typs[3])}, nil)
typs[86] = functype(nil, []*Node{anonfield(typs[84]), anonfield(typs[3])}, []*Node{anonfield(typs[6])})
typs[85] = functype(nil, []*ir.Node{anonfield(typs[84]), anonfield(typs[3])}, nil)
typs[86] = functype(nil, []*ir.Node{anonfield(typs[84]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[6])})
typs[87] = types.NewChan(typs[2], types.Csend)
typs[88] = functype(nil, []*Node{anonfield(typs[87]), anonfield(typs[3])}, nil)
typs[88] = functype(nil, []*ir.Node{anonfield(typs[87]), anonfield(typs[3])}, nil)
typs[89] = types.NewArray(typs[0], 3)
typs[90] = tostruct([]*Node{namedfield("enabled", typs[6]), namedfield("pad", typs[89]), namedfield("needed", typs[6]), namedfield("cgo", typs[6]), namedfield("alignme", typs[24])})
typs[91] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[3])}, nil)
typs[92] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3])}, nil)
typs[93] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15])}, []*Node{anonfield(typs[15])})
typs[94] = functype(nil, []*Node{anonfield(typs[87]), anonfield(typs[3])}, []*Node{anonfield(typs[6])})
typs[95] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[84])}, []*Node{anonfield(typs[6])})
typs[90] = tostruct([]*ir.Node{namedfield("enabled", typs[6]), namedfield("pad", typs[89]), namedfield("needed", typs[6]), namedfield("cgo", typs[6]), namedfield("alignme", typs[24])})
typs[91] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[3])}, nil)
typs[92] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[3])}, nil)
typs[93] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[15])})
typs[94] = functype(nil, []*ir.Node{anonfield(typs[87]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[6])})
typs[95] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[84])}, []*ir.Node{anonfield(typs[6])})
typs[96] = types.NewPtr(typs[6])
typs[97] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[96]), anonfield(typs[84])}, []*Node{anonfield(typs[6])})
typs[98] = functype(nil, []*Node{anonfield(typs[63])}, nil)
typs[99] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[63]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[6])}, []*Node{anonfield(typs[15]), anonfield(typs[6])})
typs[100] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15])}, []*Node{anonfield(typs[7])})
typs[101] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[22])}, []*Node{anonfield(typs[7])})
typs[102] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[7])}, []*Node{anonfield(typs[7])})
typs[97] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[96]), anonfield(typs[84])}, []*ir.Node{anonfield(typs[6])})
typs[98] = functype(nil, []*ir.Node{anonfield(typs[63])}, nil)
typs[99] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[63]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[6])}, []*ir.Node{anonfield(typs[15]), anonfield(typs[6])})
typs[100] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[7])})
typs[101] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[22]), anonfield(typs[22])}, []*ir.Node{anonfield(typs[7])})
typs[102] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15]), anonfield(typs[7])}, []*ir.Node{anonfield(typs[7])})
typs[103] = types.NewSlice(typs[2])
typs[104] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[103]), anonfield(typs[15])}, []*Node{anonfield(typs[103])})
typs[105] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, nil)
typs[106] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[5])}, nil)
typs[107] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, []*Node{anonfield(typs[6])})
typs[108] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[3])}, []*Node{anonfield(typs[6])})
typs[109] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[7])}, []*Node{anonfield(typs[6])})
typs[110] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[5]), anonfield(typs[5])}, []*Node{anonfield(typs[5])})
typs[111] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[5])}, []*Node{anonfield(typs[5])})
typs[112] = functype(nil, []*Node{anonfield(typs[22]), anonfield(typs[22])}, []*Node{anonfield(typs[22])})
typs[113] = functype(nil, []*Node{anonfield(typs[24]), anonfield(typs[24])}, []*Node{anonfield(typs[24])})
typs[114] = functype(nil, []*Node{anonfield(typs[20])}, []*Node{anonfield(typs[22])})
typs[115] = functype(nil, []*Node{anonfield(typs[20])}, []*Node{anonfield(typs[24])})
typs[116] = functype(nil, []*Node{anonfield(typs[20])}, []*Node{anonfield(typs[65])})
typs[117] = functype(nil, []*Node{anonfield(typs[22])}, []*Node{anonfield(typs[20])})
typs[118] = functype(nil, []*Node{anonfield(typs[24])}, []*Node{anonfield(typs[20])})
typs[119] = functype(nil, []*Node{anonfield(typs[65])}, []*Node{anonfield(typs[20])})
typs[120] = functype(nil, []*Node{anonfield(typs[26]), anonfield(typs[26])}, []*Node{anonfield(typs[26])})
typs[121] = functype(nil, []*Node{anonfield(typs[5]), anonfield(typs[5])}, nil)
typs[122] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[1]), anonfield(typs[5])}, nil)
typs[104] = functype(nil, []*ir.Node{anonfield(typs[1]), anonfield(typs[103]), anonfield(typs[15])}, []*ir.Node{anonfield(typs[103])})
typs[105] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, nil)
typs[106] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[5])}, nil)
typs[107] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[3]), anonfield(typs[5])}, []*ir.Node{anonfield(typs[6])})
typs[108] = functype(nil, []*ir.Node{anonfield(typs[3]), anonfield(typs[3])}, []*ir.Node{anonfield(typs[6])})
typs[109] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[7])}, []*ir.Node{anonfield(typs[6])})
typs[110] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[5]), anonfield(typs[5])}, []*ir.Node{anonfield(typs[5])})
typs[111] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[5])}, []*ir.Node{anonfield(typs[5])})
typs[112] = functype(nil, []*ir.Node{anonfield(typs[22]), anonfield(typs[22])}, []*ir.Node{anonfield(typs[22])})
typs[113] = functype(nil, []*ir.Node{anonfield(typs[24]), anonfield(typs[24])}, []*ir.Node{anonfield(typs[24])})
typs[114] = functype(nil, []*ir.Node{anonfield(typs[20])}, []*ir.Node{anonfield(typs[22])})
typs[115] = functype(nil, []*ir.Node{anonfield(typs[20])}, []*ir.Node{anonfield(typs[24])})
typs[116] = functype(nil, []*ir.Node{anonfield(typs[20])}, []*ir.Node{anonfield(typs[65])})
typs[117] = functype(nil, []*ir.Node{anonfield(typs[22])}, []*ir.Node{anonfield(typs[20])})
typs[118] = functype(nil, []*ir.Node{anonfield(typs[24])}, []*ir.Node{anonfield(typs[20])})
typs[119] = functype(nil, []*ir.Node{anonfield(typs[65])}, []*ir.Node{anonfield(typs[20])})
typs[120] = functype(nil, []*ir.Node{anonfield(typs[26]), anonfield(typs[26])}, []*ir.Node{anonfield(typs[26])})
typs[121] = functype(nil, []*ir.Node{anonfield(typs[5]), anonfield(typs[5])}, nil)
typs[122] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[1]), anonfield(typs[5])}, nil)
typs[123] = types.NewSlice(typs[7])
typs[124] = functype(nil, []*Node{anonfield(typs[7]), anonfield(typs[123])}, nil)
typs[125] = types.Types[TUINT8]
typs[126] = functype(nil, []*Node{anonfield(typs[125]), anonfield(typs[125])}, nil)
typs[127] = types.Types[TUINT16]
typs[128] = functype(nil, []*Node{anonfield(typs[127]), anonfield(typs[127])}, nil)
typs[129] = functype(nil, []*Node{anonfield(typs[65]), anonfield(typs[65])}, nil)
typs[130] = functype(nil, []*Node{anonfield(typs[24]), anonfield(typs[24])}, nil)
typs[124] = functype(nil, []*ir.Node{anonfield(typs[7]), anonfield(typs[123])}, nil)
typs[125] = types.Types[types.TUINT8]
typs[126] = functype(nil, []*ir.Node{anonfield(typs[125]), anonfield(typs[125])}, nil)
typs[127] = types.Types[types.TUINT16]
typs[128] = functype(nil, []*ir.Node{anonfield(typs[127]), anonfield(typs[127])}, nil)
typs[129] = functype(nil, []*ir.Node{anonfield(typs[65]), anonfield(typs[65])}, nil)
typs[130] = functype(nil, []*ir.Node{anonfield(typs[24]), anonfield(typs[24])}, nil)
return typs[:]
}

View file

@ -6,24 +6,25 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
)
func (p *noder) funcLit(expr *syntax.FuncLit) *Node {
func (p *noder) funcLit(expr *syntax.FuncLit) *ir.Node {
xtype := p.typeExpr(expr.Type)
ntype := p.typeExpr(expr.Type)
dcl := p.nod(expr, ODCLFUNC, nil, nil)
dcl := p.nod(expr, ir.ODCLFUNC, nil, nil)
fn := dcl.Func
fn.SetIsHiddenClosure(Curfn != nil)
fn.Nname = newfuncnamel(p.pos(expr), nblank.Sym, fn) // filled in by typecheckclosure
fn.Nname = newfuncnamel(p.pos(expr), ir.BlankNode.Sym, fn) // filled in by typecheckclosure
fn.Nname.Name.Param.Ntype = xtype
fn.Nname.Name.Defn = dcl
clo := p.nod(expr, OCLOSURE, nil, nil)
clo := p.nod(expr, ir.OCLOSURE, nil, nil)
clo.Func = fn
fn.ClosureType = ntype
fn.OClosure = clo
@ -77,7 +78,7 @@ func (p *noder) funcLit(expr *syntax.FuncLit) *Node {
// function associated with the closure.
// TODO: This creation of the named function should probably really be done in a
// separate pass from type-checking.
func typecheckclosure(clo *Node, top int) {
func typecheckclosure(clo *ir.Node, top int) {
fn := clo.Func
dcl := fn.Decl
// Set current associated iota value, so iota can be used inside
@ -139,7 +140,7 @@ var globClosgen int
// closurename generates a new unique name for a closure within
// outerfunc.
func closurename(outerfunc *Node) *types.Sym {
func closurename(outerfunc *ir.Node) *types.Sym {
outer := "glob."
prefix := "func"
gen := &globClosgen
@ -149,12 +150,12 @@ func closurename(outerfunc *Node) *types.Sym {
prefix = ""
}
outer = outerfunc.funcname()
outer = ir.FuncName(outerfunc)
// There may be multiple functions named "_". In those
// cases, we can't use their individual Closgens as it
// would lead to name clashes.
if !outerfunc.Func.Nname.isBlank() {
if !ir.IsBlank(outerfunc.Func.Nname) {
gen = &outerfunc.Func.Closgen
}
}
@ -171,7 +172,7 @@ var capturevarscomplete bool
// by value or by reference.
// We use value capturing for values <= 128 bytes that are never reassigned
// after capturing (effectively constant).
func capturevars(dcl *Node) {
func capturevars(dcl *ir.Node) {
lno := base.Pos
base.Pos = dcl.Pos
fn := dcl.Func
@ -197,11 +198,11 @@ func capturevars(dcl *Node) {
outermost := v.Name.Defn
// out parameters will be assigned to implicitly upon return.
if outermost.Class() != PPARAMOUT && !outermost.Name.Addrtaken() && !outermost.Name.Assigned() && v.Type.Width <= 128 {
if outermost.Class() != ir.PPARAMOUT && !outermost.Name.Addrtaken() && !outermost.Name.Assigned() && v.Type.Width <= 128 {
v.Name.SetByval(true)
} else {
outermost.Name.SetAddrtaken(true)
outer = nod(OADDR, outer, nil)
outer = ir.Nod(ir.OADDR, outer, nil)
}
if base.Flag.LowerM > 1 {
@ -226,7 +227,7 @@ func capturevars(dcl *Node) {
// transformclosure is called in a separate phase after escape analysis.
// It transform closure bodies to properly reference captured variables.
func transformclosure(dcl *Node) {
func transformclosure(dcl *ir.Node) {
lno := base.Pos
base.Pos = dcl.Pos
fn := dcl.Func
@ -252,24 +253,24 @@ func transformclosure(dcl *Node) {
// We are going to insert captured variables before input args.
var params []*types.Field
var decls []*Node
var decls []*ir.Node
for _, v := range fn.ClosureVars.Slice() {
if !v.Name.Byval() {
// If v of type T is captured by reference,
// we introduce function param &v *T
// and v remains PAUTOHEAP with &v heapaddr
// (accesses will implicitly deref &v).
addr := newname(lookup("&" + v.Sym.Name))
addr := NewName(lookup("&" + v.Sym.Name))
addr.Type = types.NewPtr(v.Type)
v.Name.Param.Heapaddr = addr
v = addr
}
v.SetClass(PPARAM)
v.SetClass(ir.PPARAM)
decls = append(decls, v)
fld := types.NewField(src.NoXPos, v.Sym, v.Type)
fld.Nname = asTypesNode(v)
fld.Nname = ir.AsTypesNode(v)
params = append(params, fld)
}
@ -283,11 +284,11 @@ func transformclosure(dcl *Node) {
dcl.Type = f.Type // update type of ODCLFUNC
} else {
// The closure is not called, so it is going to stay as closure.
var body []*Node
var body []*ir.Node
offset := int64(Widthptr)
for _, v := range fn.ClosureVars.Slice() {
// cv refers to the field inside of closure OSTRUCTLIT.
cv := nod(OCLOSUREVAR, nil, nil)
cv := ir.Nod(ir.OCLOSUREVAR, nil, nil)
cv.Type = v.Type
if !v.Name.Byval() {
@ -299,23 +300,23 @@ func transformclosure(dcl *Node) {
if v.Name.Byval() && v.Type.Width <= int64(2*Widthptr) {
// If it is a small variable captured by value, downgrade it to PAUTO.
v.SetClass(PAUTO)
v.SetClass(ir.PAUTO)
fn.Dcl = append(fn.Dcl, v)
body = append(body, nod(OAS, v, cv))
body = append(body, ir.Nod(ir.OAS, v, cv))
} else {
// Declare variable holding addresses taken from closure
// and initialize in entry prologue.
addr := newname(lookup("&" + v.Sym.Name))
addr := NewName(lookup("&" + v.Sym.Name))
addr.Type = types.NewPtr(v.Type)
addr.SetClass(PAUTO)
addr.SetClass(ir.PAUTO)
addr.Name.SetUsed(true)
addr.Name.Curfn = dcl
fn.Dcl = append(fn.Dcl, addr)
v.Name.Param.Heapaddr = addr
if v.Name.Byval() {
cv = nod(OADDR, cv, nil)
cv = ir.Nod(ir.OADDR, cv, nil)
}
body = append(body, nod(OAS, addr, cv))
body = append(body, ir.Nod(ir.OAS, addr, cv))
}
}
@ -331,13 +332,13 @@ func transformclosure(dcl *Node) {
// hasemptycvars reports whether closure clo has an
// empty list of captured vars.
func hasemptycvars(clo *Node) bool {
func hasemptycvars(clo *ir.Node) bool {
return clo.Func.ClosureVars.Len() == 0
}
// closuredebugruntimecheck applies boilerplate checks for debug flags
// and compiling runtime
func closuredebugruntimecheck(clo *Node) {
func closuredebugruntimecheck(clo *ir.Node) {
if base.Debug.Closure > 0 {
if clo.Esc == EscHeap {
base.WarnfAt(clo.Pos, "heap closure, captured vars = %v", clo.Func.ClosureVars)
@ -353,7 +354,7 @@ func closuredebugruntimecheck(clo *Node) {
// closureType returns the struct type used to hold all the information
// needed in the closure for clo (clo must be a OCLOSURE node).
// The address of a variable of the returned type can be cast to a func.
func closureType(clo *Node) *types.Type {
func closureType(clo *ir.Node) *types.Type {
// Create closure in the form of a composite literal.
// supposing the closure captures an int i and a string s
// and has one float64 argument and no results,
@ -367,8 +368,8 @@ func closureType(clo *Node) *types.Type {
// The information appears in the binary in the form of type descriptors;
// the struct is unnamed so that closures in multiple packages with the
// same struct type can share the descriptor.
fields := []*Node{
namedfield(".F", types.Types[TUINTPTR]),
fields := []*ir.Node{
namedfield(".F", types.Types[types.TUINTPTR]),
}
for _, v := range clo.Func.ClosureVars.Slice() {
typ := v.Type
@ -382,7 +383,7 @@ func closureType(clo *Node) *types.Type {
return typ
}
func walkclosure(clo *Node, init *Nodes) *Node {
func walkclosure(clo *ir.Node, init *ir.Nodes) *ir.Node {
fn := clo.Func
// If no closure vars, don't bother wrapping.
@ -396,11 +397,11 @@ func walkclosure(clo *Node, init *Nodes) *Node {
typ := closureType(clo)
clos := nod(OCOMPLIT, nil, typenod(typ))
clos := ir.Nod(ir.OCOMPLIT, nil, typenod(typ))
clos.Esc = clo.Esc
clos.List.Set(append([]*Node{nod(OCFUNC, fn.Nname, nil)}, fn.ClosureEnter.Slice()...))
clos.List.Set(append([]*ir.Node{ir.Nod(ir.OCFUNC, fn.Nname, nil)}, fn.ClosureEnter.Slice()...))
clos = nod(OADDR, clos, nil)
clos = ir.Nod(ir.OADDR, clos, nil)
clos.Esc = clo.Esc
// Force type conversion from *struct to the func type.
@ -418,9 +419,9 @@ func walkclosure(clo *Node, init *Nodes) *Node {
return walkexpr(clos, init)
}
func typecheckpartialcall(dot *Node, sym *types.Sym) {
func typecheckpartialcall(dot *ir.Node, sym *types.Sym) {
switch dot.Op {
case ODOTINTER, ODOTMETH:
case ir.ODOTINTER, ir.ODOTMETH:
break
default:
@ -430,8 +431,8 @@ func typecheckpartialcall(dot *Node, sym *types.Sym) {
// Create top-level function.
dcl := makepartialcall(dot, dot.Type, sym)
dcl.Func.SetWrapper(true)
dot.Op = OCALLPART
dot.Right = newname(sym)
dot.Op = ir.OCALLPART
dot.Right = NewName(sym)
dot.Type = dcl.Type
dot.Func = dcl.Func
dot.SetOpt(nil) // clear types.Field from ODOTMETH
@ -439,12 +440,12 @@ func typecheckpartialcall(dot *Node, sym *types.Sym) {
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls.
func makepartialcall(dot *Node, t0 *types.Type, meth *types.Sym) *Node {
func makepartialcall(dot *ir.Node, t0 *types.Type, meth *types.Sym) *ir.Node {
rcvrtype := dot.Left.Type
sym := methodSymSuffix(rcvrtype, meth, "-fm")
if sym.Uniq() {
return asNode(sym.Def)
return ir.AsNode(sym.Def)
}
sym.SetUniq(true)
@ -463,7 +464,7 @@ func makepartialcall(dot *Node, t0 *types.Type, meth *types.Sym) *Node {
// number at the use of the method expression in this
// case. See issue 29389.
tfn := nod(OTFUNC, nil, nil)
tfn := ir.Nod(ir.OTFUNC, nil, nil)
tfn.List.Set(structargs(t0.Params(), true))
tfn.Rlist.Set(structargs(t0.Results(), false))
@ -476,27 +477,27 @@ func makepartialcall(dot *Node, t0 *types.Type, meth *types.Sym) *Node {
// Declare and initialize variable holding receiver.
cv := nod(OCLOSUREVAR, nil, nil)
cv := ir.Nod(ir.OCLOSUREVAR, nil, nil)
cv.Type = rcvrtype
cv.Xoffset = Rnd(int64(Widthptr), int64(cv.Type.Align))
ptr := newname(lookup(".this"))
declare(ptr, PAUTO)
ptr := NewName(lookup(".this"))
declare(ptr, ir.PAUTO)
ptr.Name.SetUsed(true)
var body []*Node
var body []*ir.Node
if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
ptr.Type = rcvrtype
body = append(body, nod(OAS, ptr, cv))
body = append(body, ir.Nod(ir.OAS, ptr, cv))
} else {
ptr.Type = types.NewPtr(rcvrtype)
body = append(body, nod(OAS, ptr, nod(OADDR, cv, nil)))
body = append(body, ir.Nod(ir.OAS, ptr, ir.Nod(ir.OADDR, cv, nil)))
}
call := nod(OCALL, nodSym(OXDOT, ptr, meth), nil)
call := ir.Nod(ir.OCALL, nodSym(ir.OXDOT, ptr, meth), nil)
call.List.Set(paramNnames(tfn.Type))
call.SetIsDDD(tfn.Type.IsVariadic())
if t0.NumResults() != 0 {
n := nod(ORETURN, nil, nil)
n := ir.Nod(ir.ORETURN, nil, nil)
n.List.Set1(call)
call = n
}
@ -510,7 +511,7 @@ func makepartialcall(dot *Node, t0 *types.Type, meth *types.Sym) *Node {
// typecheckslice() requires that Curfn is set when processing an ORETURN.
Curfn = dcl
typecheckslice(dcl.Nbody.Slice(), ctxStmt)
sym.Def = asTypesNode(dcl)
sym.Def = ir.AsTypesNode(dcl)
xtop = append(xtop, dcl)
Curfn = savecurfn
base.Pos = saveLineNo
@ -521,16 +522,16 @@ func makepartialcall(dot *Node, t0 *types.Type, meth *types.Sym) *Node {
// partialCallType returns the struct type used to hold all the information
// needed in the closure for n (n must be a OCALLPART node).
// The address of a variable of the returned type can be cast to a func.
func partialCallType(n *Node) *types.Type {
t := tostruct([]*Node{
namedfield("F", types.Types[TUINTPTR]),
func partialCallType(n *ir.Node) *types.Type {
t := tostruct([]*ir.Node{
namedfield("F", types.Types[types.TUINTPTR]),
namedfield("R", n.Left.Type),
})
t.SetNoalg(true)
return t
}
func walkpartialcall(n *Node, init *Nodes) *Node {
func walkpartialcall(n *ir.Node, init *ir.Nodes) *ir.Node {
// Create closure in the form of a composite literal.
// For x.M with receiver (x) type T, the generated code looks like:
//
@ -544,21 +545,21 @@ func walkpartialcall(n *Node, init *Nodes) *Node {
n.Left = cheapexpr(n.Left, init)
n.Left = walkexpr(n.Left, nil)
tab := nod(OITAB, n.Left, nil)
tab := ir.Nod(ir.OITAB, n.Left, nil)
tab = typecheck(tab, ctxExpr)
c := nod(OCHECKNIL, tab, nil)
c := ir.Nod(ir.OCHECKNIL, tab, nil)
c.SetTypecheck(1)
init.Append(c)
}
typ := partialCallType(n)
clos := nod(OCOMPLIT, nil, typenod(typ))
clos := ir.Nod(ir.OCOMPLIT, nil, typenod(typ))
clos.Esc = n.Esc
clos.List.Set2(nod(OCFUNC, n.Func.Nname, nil), n.Left)
clos.List.Set2(ir.Nod(ir.OCFUNC, n.Func.Nname, nil), n.Left)
clos = nod(OADDR, clos, nil)
clos = ir.Nod(ir.OADDR, clos, nil)
clos.Esc = n.Esc
// Force type conversion from *struct to the func type.
@ -578,8 +579,8 @@ func walkpartialcall(n *Node, init *Nodes) *Node {
// callpartMethod returns the *types.Field representing the method
// referenced by method value n.
func callpartMethod(n *Node) *types.Field {
if n.Op != OCALLPART {
func callpartMethod(n *ir.Node) *types.Field {
if n.Op != ir.OCALLPART {
base.Fatalf("expected OCALLPART, got %v", n)
}

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
@ -23,51 +24,6 @@ const (
Mpprec = 512
)
// ValueInterface returns the constant value stored in n as an interface{}.
// It returns int64s for ints and runes, float64s for floats,
// and complex128s for complex values.
func (n *Node) ValueInterface() interface{} {
switch v := n.Val(); v.Kind() {
default:
base.Fatalf("unexpected constant: %v", v)
panic("unreachable")
case constant.Bool:
return constant.BoolVal(v)
case constant.String:
return constant.StringVal(v)
case constant.Int:
return int64Val(n.Type, v)
case constant.Float:
return float64Val(v)
case constant.Complex:
return complex(float64Val(constant.Real(v)), float64Val(constant.Imag(v)))
}
}
// int64Val returns v converted to int64.
// Note: if t is uint64, very large values will be converted to negative int64.
func int64Val(t *types.Type, v constant.Value) int64 {
if t.IsUnsigned() {
if x, ok := constant.Uint64Val(v); ok {
return int64(x)
}
} else {
if x, ok := constant.Int64Val(v); ok {
return x
}
}
base.Fatalf("%v out of range for %v", v, t)
panic("unreachable")
}
func float64Val(v constant.Value) float64 {
if x, _ := constant.Float64Val(v); !math.IsInf(x, 0) {
return x + 0 // avoid -0 (should not be needed, but be conservative)
}
base.Fatalf("bad float64 value: %v", v)
panic("unreachable")
}
func bigFloatVal(v constant.Value) *big.Float {
f := new(big.Float)
f.SetPrec(Mpprec)
@ -86,62 +42,6 @@ func bigFloatVal(v constant.Value) *big.Float {
return f
}
// Int64Val returns n as an int64.
// n must be an integer or rune constant.
func (n *Node) Int64Val() int64 {
if !Isconst(n, constant.Int) {
base.Fatalf("Int64Val(%v)", n)
}
x, ok := constant.Int64Val(n.Val())
if !ok {
base.Fatalf("Int64Val(%v)", n)
}
return x
}
// CanInt64 reports whether it is safe to call Int64Val() on n.
func (n *Node) CanInt64() bool {
if !Isconst(n, constant.Int) {
return false
}
// if the value inside n cannot be represented as an int64, the
// return value of Int64 is undefined
_, ok := constant.Int64Val(n.Val())
return ok
}
// Uint64Val returns n as an uint64.
// n must be an integer or rune constant.
func (n *Node) Uint64Val() uint64 {
if !Isconst(n, constant.Int) {
base.Fatalf("Uint64Val(%v)", n)
}
x, ok := constant.Uint64Val(n.Val())
if !ok {
base.Fatalf("Uint64Val(%v)", n)
}
return x
}
// BoolVal returns n as a bool.
// n must be a boolean constant.
func (n *Node) BoolVal() bool {
if !Isconst(n, constant.Bool) {
base.Fatalf("BoolVal(%v)", n)
}
return constant.BoolVal(n.Val())
}
// StringVal returns the value of a literal string Node as a string.
// n must be a string constant.
func (n *Node) StringVal() string {
if !Isconst(n, constant.String) {
base.Fatalf("StringVal(%v)", n)
}
return constant.StringVal(n.Val())
}
func roundFloat(v constant.Value, sz int64) constant.Value {
switch sz {
case 4:
@ -184,8 +84,8 @@ func trunccmplxlit(v constant.Value, t *types.Type) constant.Value {
}
// TODO(mdempsky): Replace these with better APIs.
func convlit(n *Node, t *types.Type) *Node { return convlit1(n, t, false, nil) }
func defaultlit(n *Node, t *types.Type) *Node { return convlit1(n, t, false, nil) }
func convlit(n *ir.Node, t *types.Type) *ir.Node { return convlit1(n, t, false, nil) }
func defaultlit(n *ir.Node, t *types.Type) *ir.Node { return convlit1(n, t, false, nil) }
// convlit1 converts an untyped expression n to type t. If n already
// has a type, convlit1 has no effect.
@ -198,7 +98,7 @@ func defaultlit(n *Node, t *types.Type) *Node { return convlit1(n, t, false, nil
//
// If there's an error converting n to t, context is used in the error
// message.
func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Node {
func convlit1(n *ir.Node, t *types.Type, explicit bool, context func() string) *ir.Node {
if explicit && t == nil {
base.Fatalf("explicit conversion missing type")
}
@ -215,15 +115,15 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod
return n
}
if n.Op == OLITERAL || n.Op == ONIL {
if n.Op == ir.OLITERAL || n.Op == ir.ONIL {
// Can't always set n.Type directly on OLITERAL nodes.
// See discussion on CL 20813.
n = n.rawcopy()
n = n.RawCopy()
}
// Nil is technically not a constant, so handle it specially.
if n.Type.Etype == TNIL {
if n.Op != ONIL {
if n.Type.Etype == types.TNIL {
if n.Op != ir.ONIL {
base.Fatalf("unexpected op: %v (%v)", n, n.Op)
}
if t == nil {
@ -242,7 +142,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod
return n
}
if t == nil || !okforconst[t.Etype] {
if t == nil || !ir.OKForConst[t.Etype] {
t = defaultType(n.Type)
}
@ -250,7 +150,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod
default:
base.Fatalf("unexpected untyped expression: %v", n)
case OLITERAL:
case ir.OLITERAL:
v := convertVal(n.Val(), t, explicit)
if v.Kind() == constant.Unknown {
break
@ -259,7 +159,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod
n.SetVal(v)
return n
case OPLUS, ONEG, OBITNOT, ONOT, OREAL, OIMAG:
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.OREAL, ir.OIMAG:
ot := operandType(n.Op, t)
if ot == nil {
n = defaultlit(n, nil)
@ -274,7 +174,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod
n.Type = t
return n
case OADD, OSUB, OMUL, ODIV, OMOD, OOR, OXOR, OAND, OANDNOT, OOROR, OANDAND, OCOMPLEX:
case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT, ir.OOROR, ir.OANDAND, ir.OCOMPLEX:
ot := operandType(n.Op, t)
if ot == nil {
n = defaultlit(n, nil)
@ -296,14 +196,14 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod
n.Type = t
return n
case OEQ, ONE, OLT, OLE, OGT, OGE:
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
if !t.IsBoolean() {
break
}
n.Type = t
return n
case OLSH, ORSH:
case ir.OLSH, ir.ORSH:
n.Left = convlit1(n.Left, t, explicit, nil)
n.Type = n.Left.Type
if n.Type != nil && !n.Type.IsInteger() {
@ -329,13 +229,13 @@ func convlit1(n *Node, t *types.Type, explicit bool, context func() string) *Nod
return n
}
func operandType(op Op, t *types.Type) *types.Type {
func operandType(op ir.Op, t *types.Type) *types.Type {
switch op {
case OCOMPLEX:
case ir.OCOMPLEX:
if t.IsComplex() {
return floatForComplex(t)
}
case OREAL, OIMAG:
case ir.OREAL, ir.OIMAG:
if t.IsFloat() {
return complexForFloat(t)
}
@ -488,7 +388,7 @@ func overflow(v constant.Value, t *types.Type) bool {
return true
}
if doesoverflow(v, t) {
base.Errorf("constant %v overflows %v", vconv(v, 0), t)
base.Errorf("constant %v overflows %v", ir.FmtConst(v, 0), t)
return true
}
return false
@ -505,57 +405,46 @@ func tostr(v constant.Value) constant.Value {
return v
}
func consttype(n *Node) constant.Kind {
if n == nil || n.Op != OLITERAL {
return constant.Unknown
}
return n.Val().Kind()
}
func Isconst(n *Node, ct constant.Kind) bool {
return consttype(n) == ct
}
var tokenForOp = [...]token.Token{
OPLUS: token.ADD,
ONEG: token.SUB,
ONOT: token.NOT,
OBITNOT: token.XOR,
ir.OPLUS: token.ADD,
ir.ONEG: token.SUB,
ir.ONOT: token.NOT,
ir.OBITNOT: token.XOR,
OADD: token.ADD,
OSUB: token.SUB,
OMUL: token.MUL,
ODIV: token.QUO,
OMOD: token.REM,
OOR: token.OR,
OXOR: token.XOR,
OAND: token.AND,
OANDNOT: token.AND_NOT,
OOROR: token.LOR,
OANDAND: token.LAND,
ir.OADD: token.ADD,
ir.OSUB: token.SUB,
ir.OMUL: token.MUL,
ir.ODIV: token.QUO,
ir.OMOD: token.REM,
ir.OOR: token.OR,
ir.OXOR: token.XOR,
ir.OAND: token.AND,
ir.OANDNOT: token.AND_NOT,
ir.OOROR: token.LOR,
ir.OANDAND: token.LAND,
OEQ: token.EQL,
ONE: token.NEQ,
OLT: token.LSS,
OLE: token.LEQ,
OGT: token.GTR,
OGE: token.GEQ,
ir.OEQ: token.EQL,
ir.ONE: token.NEQ,
ir.OLT: token.LSS,
ir.OLE: token.LEQ,
ir.OGT: token.GTR,
ir.OGE: token.GEQ,
OLSH: token.SHL,
ORSH: token.SHR,
ir.OLSH: token.SHL,
ir.ORSH: token.SHR,
}
// evalConst returns a constant-evaluated expression equivalent to n.
// If n is not a constant, evalConst returns n.
// Otherwise, evalConst returns a new OLITERAL with the same value as n,
// and with .Orig pointing back to n.
func evalConst(n *Node) *Node {
func evalConst(n *ir.Node) *ir.Node {
nl, nr := n.Left, n.Right
// Pick off just the opcodes that can be constant evaluated.
switch op := n.Op; op {
case OPLUS, ONEG, OBITNOT, ONOT:
if nl.Op == OLITERAL {
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
if nl.Op == ir.OLITERAL {
var prec uint
if n.Type.IsUnsigned() {
prec = uint(n.Type.Size() * 8)
@ -563,36 +452,36 @@ func evalConst(n *Node) *Node {
return origConst(n, constant.UnaryOp(tokenForOp[op], nl.Val(), prec))
}
case OADD, OSUB, OMUL, ODIV, OMOD, OOR, OXOR, OAND, OANDNOT, OOROR, OANDAND:
if nl.Op == OLITERAL && nr.Op == OLITERAL {
case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT, ir.OOROR, ir.OANDAND:
if nl.Op == ir.OLITERAL && nr.Op == ir.OLITERAL {
rval := nr.Val()
// check for divisor underflow in complex division (see issue 20227)
if op == ODIV && n.Type.IsComplex() && constant.Sign(square(constant.Real(rval))) == 0 && constant.Sign(square(constant.Imag(rval))) == 0 {
if op == ir.ODIV && n.Type.IsComplex() && constant.Sign(square(constant.Real(rval))) == 0 && constant.Sign(square(constant.Imag(rval))) == 0 {
base.Errorf("complex division by zero")
n.Type = nil
return n
}
if (op == ODIV || op == OMOD) && constant.Sign(rval) == 0 {
if (op == ir.ODIV || op == ir.OMOD) && constant.Sign(rval) == 0 {
base.Errorf("division by zero")
n.Type = nil
return n
}
tok := tokenForOp[op]
if op == ODIV && n.Type.IsInteger() {
if op == ir.ODIV && n.Type.IsInteger() {
tok = token.QUO_ASSIGN // integer division
}
return origConst(n, constant.BinaryOp(nl.Val(), tok, rval))
}
case OEQ, ONE, OLT, OLE, OGT, OGE:
if nl.Op == OLITERAL && nr.Op == OLITERAL {
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
if nl.Op == ir.OLITERAL && nr.Op == ir.OLITERAL {
return origBoolConst(n, constant.Compare(nl.Val(), tokenForOp[op], nr.Val()))
}
case OLSH, ORSH:
if nl.Op == OLITERAL && nr.Op == OLITERAL {
case ir.OLSH, ir.ORSH:
if nl.Op == ir.OLITERAL && nr.Op == ir.OLITERAL {
// shiftBound from go/types; "so we can express smallestFloat64"
const shiftBound = 1023 - 1 + 52
s, ok := constant.Uint64Val(nr.Val())
@ -604,24 +493,24 @@ func evalConst(n *Node) *Node {
return origConst(n, constant.Shift(toint(nl.Val()), tokenForOp[op], uint(s)))
}
case OCONV, ORUNESTR:
if okforconst[n.Type.Etype] && nl.Op == OLITERAL {
case ir.OCONV, ir.ORUNESTR:
if ir.OKForConst[n.Type.Etype] && nl.Op == ir.OLITERAL {
return origConst(n, convertVal(nl.Val(), n.Type, true))
}
case OCONVNOP:
if okforconst[n.Type.Etype] && nl.Op == OLITERAL {
case ir.OCONVNOP:
if ir.OKForConst[n.Type.Etype] && nl.Op == ir.OLITERAL {
// set so n.Orig gets OCONV instead of OCONVNOP
n.Op = OCONV
n.Op = ir.OCONV
return origConst(n, nl.Val())
}
case OADDSTR:
case ir.OADDSTR:
// Merge adjacent constants in the argument list.
s := n.List.Slice()
need := 0
for i := 0; i < len(s); i++ {
if i == 0 || !Isconst(s[i-1], constant.String) || !Isconst(s[i], constant.String) {
if i == 0 || !ir.IsConst(s[i-1], constant.String) || !ir.IsConst(s[i], constant.String) {
// Can't merge s[i] into s[i-1]; need a slot in the list.
need++
}
@ -636,13 +525,13 @@ func evalConst(n *Node) *Node {
}
return origConst(n, constant.MakeString(strings.Join(strs, "")))
}
newList := make([]*Node, 0, need)
newList := make([]*ir.Node, 0, need)
for i := 0; i < len(s); i++ {
if Isconst(s[i], constant.String) && i+1 < len(s) && Isconst(s[i+1], constant.String) {
if ir.IsConst(s[i], constant.String) && i+1 < len(s) && ir.IsConst(s[i+1], constant.String) {
// merge from i up to but not including i2
var strs []string
i2 := i
for i2 < len(s) && Isconst(s[i2], constant.String) {
for i2 < len(s) && ir.IsConst(s[i2], constant.String) {
strs = append(strs, s[i2].StringVal())
i2++
}
@ -656,37 +545,37 @@ func evalConst(n *Node) *Node {
}
}
n = n.copy()
n = ir.Copy(n)
n.List.Set(newList)
return n
case OCAP, OLEN:
case ir.OCAP, ir.OLEN:
switch nl.Type.Etype {
case TSTRING:
if Isconst(nl, constant.String) {
case types.TSTRING:
if ir.IsConst(nl, constant.String) {
return origIntConst(n, int64(len(nl.StringVal())))
}
case TARRAY:
case types.TARRAY:
if !hascallchan(nl) {
return origIntConst(n, nl.Type.NumElem())
}
}
case OALIGNOF, OOFFSETOF, OSIZEOF:
case ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
return origIntConst(n, evalunsafe(n))
case OREAL:
if nl.Op == OLITERAL {
case ir.OREAL:
if nl.Op == ir.OLITERAL {
return origConst(n, constant.Real(nl.Val()))
}
case OIMAG:
if nl.Op == OLITERAL {
case ir.OIMAG:
if nl.Op == ir.OLITERAL {
return origConst(n, constant.Imag(nl.Val()))
}
case OCOMPLEX:
if nl.Op == OLITERAL && nr.Op == OLITERAL {
case ir.OCOMPLEX:
if nl.Op == ir.OLITERAL && nr.Op == ir.OLITERAL {
return origConst(n, makeComplex(nl.Val(), nr.Val()))
}
}
@ -721,16 +610,16 @@ func square(x constant.Value) constant.Value {
// For matching historical "constant OP overflow" error messages.
// TODO(mdempsky): Replace with error messages like go/types uses.
var overflowNames = [...]string{
OADD: "addition",
OSUB: "subtraction",
OMUL: "multiplication",
OLSH: "shift",
OXOR: "bitwise XOR",
OBITNOT: "bitwise complement",
ir.OADD: "addition",
ir.OSUB: "subtraction",
ir.OMUL: "multiplication",
ir.OLSH: "shift",
ir.OXOR: "bitwise XOR",
ir.OBITNOT: "bitwise complement",
}
// origConst returns an OLITERAL with orig n and value v.
func origConst(n *Node, v constant.Value) *Node {
func origConst(n *ir.Node, v constant.Value) *ir.Node {
lno := setlineno(n)
v = convertVal(v, n.Type, false)
base.Pos = lno
@ -752,81 +641,28 @@ func origConst(n *Node, v constant.Value) *Node {
}
orig := n
n = nodl(orig.Pos, OLITERAL, nil, nil)
n = ir.NodAt(orig.Pos, ir.OLITERAL, nil, nil)
n.Orig = orig
n.Type = orig.Type
n.SetVal(v)
return n
}
func assertRepresents(t *types.Type, v constant.Value) {
if !represents(t, v) {
base.Fatalf("%v does not represent %v", t, v)
}
}
func represents(t *types.Type, v constant.Value) bool {
switch v.Kind() {
case constant.Unknown:
return okforconst[t.Etype]
case constant.Bool:
return t.IsBoolean()
case constant.String:
return t.IsString()
case constant.Int:
return t.IsInteger()
case constant.Float:
return t.IsFloat()
case constant.Complex:
return t.IsComplex()
}
base.Fatalf("unexpected constant kind: %v", v)
panic("unreachable")
}
func origBoolConst(n *Node, v bool) *Node {
func origBoolConst(n *ir.Node, v bool) *ir.Node {
return origConst(n, constant.MakeBool(v))
}
func origIntConst(n *Node, v int64) *Node {
func origIntConst(n *ir.Node, v int64) *ir.Node {
return origConst(n, constant.MakeInt64(v))
}
// nodlit returns a new untyped constant with value v.
func nodlit(v constant.Value) *Node {
n := nod(OLITERAL, nil, nil)
if k := v.Kind(); k != constant.Unknown {
n.Type = idealType(k)
n.SetVal(v)
}
return n
}
func idealType(ct constant.Kind) *types.Type {
switch ct {
case constant.String:
return types.UntypedString
case constant.Bool:
return types.UntypedBool
case constant.Int:
return types.UntypedInt
case constant.Float:
return types.UntypedFloat
case constant.Complex:
return types.UntypedComplex
}
base.Fatalf("unexpected Ctype: %v", ct)
return nil
}
// defaultlit on both nodes simultaneously;
// if they're both ideal going in they better
// get the same type going out.
// force means must assign concrete (non-ideal) type.
// The results of defaultlit2 MUST be assigned back to l and r, e.g.
// n.Left, n.Right = defaultlit2(n.Left, n.Right, force)
func defaultlit2(l *Node, r *Node, force bool) (*Node, *Node) {
func defaultlit2(l *ir.Node, r *ir.Node, force bool) (*ir.Node, *ir.Node) {
if l.Type == nil || r.Type == nil {
return l, r
}
@ -851,7 +687,7 @@ func defaultlit2(l *Node, r *Node, force bool) (*Node, *Node) {
if l.Type.IsString() != r.Type.IsString() {
return l, r
}
if l.isNil() || r.isNil() {
if ir.IsNil(l) || ir.IsNil(r) {
return l, r
}
@ -888,31 +724,31 @@ func mixUntyped(t1, t2 *types.Type) *types.Type {
}
func defaultType(t *types.Type) *types.Type {
if !t.IsUntyped() || t.Etype == TNIL {
if !t.IsUntyped() || t.Etype == types.TNIL {
return t
}
switch t {
case types.UntypedBool:
return types.Types[TBOOL]
return types.Types[types.TBOOL]
case types.UntypedString:
return types.Types[TSTRING]
return types.Types[types.TSTRING]
case types.UntypedInt:
return types.Types[TINT]
return types.Types[types.TINT]
case types.UntypedRune:
return types.Runetype
case types.UntypedFloat:
return types.Types[TFLOAT64]
return types.Types[types.TFLOAT64]
case types.UntypedComplex:
return types.Types[TCOMPLEX128]
return types.Types[types.TCOMPLEX128]
}
base.Fatalf("bad type %v", t)
return nil
}
func smallintconst(n *Node) bool {
if n.Op == OLITERAL {
func smallintconst(n *ir.Node) bool {
if n.Op == ir.OLITERAL {
v, ok := constant.Int64Val(n.Val())
return ok && int64(int32(v)) == v
}
@ -924,11 +760,11 @@ func smallintconst(n *Node) bool {
// If n is not a constant expression, not representable as an
// integer, or negative, it returns -1. If n is too large, it
// returns -2.
func indexconst(n *Node) int64 {
if n.Op != OLITERAL {
func indexconst(n *ir.Node) int64 {
if n.Op != ir.OLITERAL {
return -1
}
if !n.Type.IsInteger() && n.Type.Etype != TIDEAL {
if !n.Type.IsInteger() && n.Type.Etype != types.TIDEAL {
return -1
}
@ -936,10 +772,10 @@ func indexconst(n *Node) int64 {
if v.Kind() != constant.Int || constant.Sign(v) < 0 {
return -1
}
if doesoverflow(v, types.Types[TINT]) {
if doesoverflow(v, types.Types[types.TINT]) {
return -2
}
return int64Val(types.Types[TINT], v)
return ir.Int64Val(types.Types[types.TINT], v)
}
// isGoConst reports whether n is a Go language constant (as opposed to a
@ -947,35 +783,35 @@ func indexconst(n *Node) int64 {
//
// Expressions derived from nil, like string([]byte(nil)), while they
// may be known at compile time, are not Go language constants.
func (n *Node) isGoConst() bool {
return n.Op == OLITERAL
func isGoConst(n *ir.Node) bool {
return n.Op == ir.OLITERAL
}
func hascallchan(n *Node) bool {
func hascallchan(n *ir.Node) bool {
if n == nil {
return false
}
switch n.Op {
case OAPPEND,
OCALL,
OCALLFUNC,
OCALLINTER,
OCALLMETH,
OCAP,
OCLOSE,
OCOMPLEX,
OCOPY,
ODELETE,
OIMAG,
OLEN,
OMAKE,
ONEW,
OPANIC,
OPRINT,
OPRINTN,
OREAL,
ORECOVER,
ORECV:
case ir.OAPPEND,
ir.OCALL,
ir.OCALLFUNC,
ir.OCALLINTER,
ir.OCALLMETH,
ir.OCAP,
ir.OCLOSE,
ir.OCOMPLEX,
ir.OCOPY,
ir.ODELETE,
ir.OIMAG,
ir.OLEN,
ir.OMAKE,
ir.ONEW,
ir.OPANIC,
ir.OPRINT,
ir.OPRINTN,
ir.OREAL,
ir.ORECOVER,
ir.ORECV:
return true
}
@ -1015,12 +851,12 @@ type constSetKey struct {
// where are used in the error message.
//
// n must not be an untyped constant.
func (s *constSet) add(pos src.XPos, n *Node, what, where string) {
if n.Op == OCONVIFACE && n.Implicit() {
func (s *constSet) add(pos src.XPos, n *ir.Node, what, where string) {
if n.Op == ir.OCONVIFACE && n.Implicit() {
n = n.Left
}
if !n.isGoConst() {
if !isGoConst(n) {
return
}
if n.Type.IsUntyped() {
@ -1045,11 +881,11 @@ func (s *constSet) add(pos src.XPos, n *Node, what, where string) {
typ := n.Type
switch typ {
case types.Bytetype:
typ = types.Types[TUINT8]
typ = types.Types[types.TUINT8]
case types.Runetype:
typ = types.Types[TINT32]
typ = types.Types[types.TINT32]
}
k := constSetKey{typ, n.ValueInterface()}
k := constSetKey{typ, ir.ConstValue(n)}
if hasUniquePos(n) {
pos = n.Pos
@ -1072,9 +908,9 @@ func (s *constSet) add(pos src.XPos, n *Node, what, where string) {
// the latter is non-obvious.
//
// TODO(mdempsky): This could probably be a fmt.go flag.
func nodeAndVal(n *Node) string {
func nodeAndVal(n *ir.Node) string {
show := n.String()
val := n.ValueInterface()
val := ir.ConstValue(n)
if s := fmt.Sprintf("%#v", val); show != s {
show += " (value " + s + ")"
}

View file

@ -7,6 +7,7 @@ package gc
import (
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
@ -17,7 +18,7 @@ import (
// Declaration stack & operations
var externdcl []*Node
var externdcl []*ir.Node
func testdclstack() {
if !types.IsDclstackValid() {
@ -58,25 +59,25 @@ var declare_typegen int
// declare records that Node n declares symbol n.Sym in the specified
// declaration context.
func declare(n *Node, ctxt Class) {
if n.isBlank() {
func declare(n *ir.Node, ctxt ir.Class) {
if ir.IsBlank(n) {
return
}
if n.Name == nil {
// named OLITERAL needs Name; most OLITERALs don't.
n.Name = new(Name)
n.Name = new(ir.Name)
}
s := n.Sym
// kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later.
if !inimport && !typecheckok && s.Pkg != localpkg {
if !inimport && !typecheckok && s.Pkg != ir.LocalPkg {
base.ErrorfAt(n.Pos, "cannot declare name %v", s)
}
gen := 0
if ctxt == PEXTERN {
if ctxt == ir.PEXTERN {
if s.Name == "init" {
base.ErrorfAt(n.Pos, "cannot declare init - must be func")
}
@ -85,17 +86,17 @@ func declare(n *Node, ctxt Class) {
}
externdcl = append(externdcl, n)
} else {
if Curfn == nil && ctxt == PAUTO {
if Curfn == nil && ctxt == ir.PAUTO {
base.Pos = n.Pos
base.Fatalf("automatic outside function")
}
if Curfn != nil && ctxt != PFUNC {
if Curfn != nil && ctxt != ir.PFUNC {
Curfn.Func.Dcl = append(Curfn.Func.Dcl, n)
}
if n.Op == OTYPE {
if n.Op == ir.OTYPE {
declare_typegen++
gen = declare_typegen
} else if n.Op == ONAME && ctxt == PAUTO && !strings.Contains(s.Name, "·") {
} else if n.Op == ir.ONAME && ctxt == ir.PAUTO && !strings.Contains(s.Name, "·") {
vargen++
gen = vargen
}
@ -103,58 +104,58 @@ func declare(n *Node, ctxt Class) {
n.Name.Curfn = Curfn
}
if ctxt == PAUTO {
if ctxt == ir.PAUTO {
n.Xoffset = 0
}
if s.Block == types.Block {
// functype will print errors about duplicate function arguments.
// Don't repeat the error here.
if ctxt != PPARAM && ctxt != PPARAMOUT {
if ctxt != ir.PPARAM && ctxt != ir.PPARAMOUT {
redeclare(n.Pos, s, "in this block")
}
}
s.Block = types.Block
s.Lastlineno = base.Pos
s.Def = asTypesNode(n)
s.Def = ir.AsTypesNode(n)
n.Name.Vargen = int32(gen)
n.SetClass(ctxt)
if ctxt == PFUNC {
if ctxt == ir.PFUNC {
n.Sym.SetFunc(true)
}
autoexport(n, ctxt)
}
func addvar(n *Node, t *types.Type, ctxt Class) {
if n == nil || n.Sym == nil || (n.Op != ONAME && n.Op != ONONAME) || t == nil {
func addvar(n *ir.Node, t *types.Type, ctxt ir.Class) {
if n == nil || n.Sym == nil || (n.Op != ir.ONAME && n.Op != ir.ONONAME) || t == nil {
base.Fatalf("addvar: n=%v t=%v nil", n, t)
}
n.Op = ONAME
n.Op = ir.ONAME
declare(n, ctxt)
n.Type = t
}
// declare variables from grammar
// new_name_list (type | [type] = expr_list)
func variter(vl []*Node, t *Node, el []*Node) []*Node {
var init []*Node
func variter(vl []*ir.Node, t *ir.Node, el []*ir.Node) []*ir.Node {
var init []*ir.Node
doexpr := len(el) > 0
if len(el) == 1 && len(vl) > 1 {
e := el[0]
as2 := nod(OAS2, nil, nil)
as2 := ir.Nod(ir.OAS2, nil, nil)
as2.List.Set(vl)
as2.Rlist.Set1(e)
for _, v := range vl {
v.Op = ONAME
v.Op = ir.ONAME
declare(v, dclcontext)
v.Name.Param.Ntype = t
v.Name.Defn = as2
if Curfn != nil {
init = append(init, nod(ODCL, v, nil))
init = append(init, ir.Nod(ir.ODCL, v, nil))
}
}
@ -163,7 +164,7 @@ func variter(vl []*Node, t *Node, el []*Node) []*Node {
nel := len(el)
for _, v := range vl {
var e *Node
var e *ir.Node
if doexpr {
if len(el) == 0 {
base.Errorf("assignment mismatch: %d variables but %d values", len(vl), nel)
@ -173,15 +174,15 @@ func variter(vl []*Node, t *Node, el []*Node) []*Node {
el = el[1:]
}
v.Op = ONAME
v.Op = ir.ONAME
declare(v, dclcontext)
v.Name.Param.Ntype = t
if e != nil || Curfn != nil || v.isBlank() {
if e != nil || Curfn != nil || ir.IsBlank(v) {
if Curfn != nil {
init = append(init, nod(ODCL, v, nil))
init = append(init, ir.Nod(ir.ODCL, v, nil))
}
e = nod(OAS, v, e)
e = ir.Nod(ir.OAS, v, e)
init = append(init, e)
if e.Right != nil {
v.Name.Defn = e
@ -196,22 +197,22 @@ func variter(vl []*Node, t *Node, el []*Node) []*Node {
}
// newnoname returns a new ONONAME Node associated with symbol s.
func newnoname(s *types.Sym) *Node {
func newnoname(s *types.Sym) *ir.Node {
if s == nil {
base.Fatalf("newnoname nil")
}
n := nod(ONONAME, nil, nil)
n := ir.Nod(ir.ONONAME, nil, nil)
n.Sym = s
n.Xoffset = 0
return n
}
// newfuncnamel generates a new name node for a function or method.
func newfuncnamel(pos src.XPos, s *types.Sym, fn *Func) *Node {
func newfuncnamel(pos src.XPos, s *types.Sym, fn *ir.Func) *ir.Node {
if fn.Nname != nil {
base.Fatalf("newfuncnamel - already have name")
}
n := newnamel(pos, s)
n := ir.NewNameAt(pos, s)
n.Func = fn
fn.Nname = n
return n
@ -219,39 +220,39 @@ func newfuncnamel(pos src.XPos, s *types.Sym, fn *Func) *Node {
// this generates a new name node for a name
// being declared.
func dclname(s *types.Sym) *Node {
n := newname(s)
n.Op = ONONAME // caller will correct it
func dclname(s *types.Sym) *ir.Node {
n := NewName(s)
n.Op = ir.ONONAME // caller will correct it
return n
}
func typenod(t *types.Type) *Node {
func typenod(t *types.Type) *ir.Node {
return typenodl(src.NoXPos, t)
}
func typenodl(pos src.XPos, t *types.Type) *Node {
func typenodl(pos src.XPos, t *types.Type) *ir.Node {
// if we copied another type with *t = *u
// then t->nod might be out of date, so
// check t->nod->type too
if asNode(t.Nod) == nil || asNode(t.Nod).Type != t {
t.Nod = asTypesNode(nodl(pos, OTYPE, nil, nil))
asNode(t.Nod).Type = t
asNode(t.Nod).Sym = t.Sym
if ir.AsNode(t.Nod) == nil || ir.AsNode(t.Nod).Type != t {
t.Nod = ir.AsTypesNode(ir.NodAt(pos, ir.OTYPE, nil, nil))
ir.AsNode(t.Nod).Type = t
ir.AsNode(t.Nod).Sym = t.Sym
}
return asNode(t.Nod)
return ir.AsNode(t.Nod)
}
func anonfield(typ *types.Type) *Node {
func anonfield(typ *types.Type) *ir.Node {
return symfield(nil, typ)
}
func namedfield(s string, typ *types.Type) *Node {
func namedfield(s string, typ *types.Type) *ir.Node {
return symfield(lookup(s), typ)
}
func symfield(s *types.Sym, typ *types.Type) *Node {
n := nodSym(ODCLFIELD, nil, s)
func symfield(s *types.Sym, typ *types.Type) *ir.Node {
n := nodSym(ir.ODCLFIELD, nil, s)
n.Type = typ
return n
}
@ -260,8 +261,8 @@ func symfield(s *types.Sym, typ *types.Type) *Node {
// If no such Node currently exists, an ONONAME Node is returned instead.
// Automatically creates a new closure variable if the referenced symbol was
// declared in a different (containing) function.
func oldname(s *types.Sym) *Node {
n := asNode(s.Def)
func oldname(s *types.Sym) *ir.Node {
n := ir.AsNode(s.Def)
if n == nil {
// Maybe a top-level declaration will come along later to
// define s. resolve will check s.Def again once all input
@ -269,7 +270,7 @@ func oldname(s *types.Sym) *Node {
return newnoname(s)
}
if Curfn != nil && n.Op == ONAME && n.Name.Curfn != nil && n.Name.Curfn != Curfn {
if Curfn != nil && n.Op == ir.ONAME && n.Name.Curfn != nil && n.Name.Curfn != Curfn {
// Inner func is referring to var in outer func.
//
// TODO(rsc): If there is an outer variable x and we
@ -279,8 +280,8 @@ func oldname(s *types.Sym) *Node {
c := n.Name.Param.Innermost
if c == nil || c.Name.Curfn != Curfn {
// Do not have a closure var for the active closure yet; make one.
c = newname(s)
c.SetClass(PAUTOHEAP)
c = NewName(s)
c.SetClass(ir.PAUTOHEAP)
c.Name.SetIsClosureVar(true)
c.SetIsDDD(n.IsDDD())
c.Name.Defn = n
@ -301,9 +302,9 @@ func oldname(s *types.Sym) *Node {
}
// importName is like oldname, but it reports an error if sym is from another package and not exported.
func importName(sym *types.Sym) *Node {
func importName(sym *types.Sym) *ir.Node {
n := oldname(sym)
if !types.IsExported(sym.Name) && sym.Pkg != localpkg {
if !types.IsExported(sym.Name) && sym.Pkg != ir.LocalPkg {
n.SetDiag(true)
base.Errorf("cannot refer to unexported name %s.%s", sym.Pkg.Name, sym.Name)
}
@ -311,20 +312,20 @@ func importName(sym *types.Sym) *Node {
}
// := declarations
func colasname(n *Node) bool {
func colasname(n *ir.Node) bool {
switch n.Op {
case ONAME,
ONONAME,
OPACK,
OTYPE,
OLITERAL:
case ir.ONAME,
ir.ONONAME,
ir.OPACK,
ir.OTYPE,
ir.OLITERAL:
return n.Sym != nil
}
return false
}
func colasdefn(left []*Node, defn *Node) {
func colasdefn(left []*ir.Node, defn *ir.Node) {
for _, n := range left {
if n.Sym != nil {
n.Sym.SetUniq(true)
@ -333,7 +334,7 @@ func colasdefn(left []*Node, defn *Node) {
var nnew, nerr int
for i, n := range left {
if n.isBlank() {
if ir.IsBlank(n) {
continue
}
if !colasname(n) {
@ -355,10 +356,10 @@ func colasdefn(left []*Node, defn *Node) {
}
nnew++
n = newname(n.Sym)
n = NewName(n.Sym)
declare(n, dclcontext)
n.Name.Defn = defn
defn.Ninit.Append(nod(ODCL, n, nil))
defn.Ninit.Append(ir.Nod(ir.ODCL, n, nil))
left[i] = n
}
@ -369,8 +370,8 @@ func colasdefn(left []*Node, defn *Node) {
// declare the arguments in an
// interface field declaration.
func ifacedcl(n *Node) {
if n.Op != ODCLFIELD || n.Left == nil {
func ifacedcl(n *ir.Node) {
if n.Op != ir.ODCLFIELD || n.Left == nil {
base.Fatalf("ifacedcl")
}
@ -383,11 +384,11 @@ func ifacedcl(n *Node) {
// and declare the arguments.
// called in extern-declaration context
// returns in auto-declaration context.
func funchdr(n *Node) {
func funchdr(n *ir.Node) {
// change the declaration context from extern to auto
funcStack = append(funcStack, funcStackEnt{Curfn, dclcontext})
Curfn = n
dclcontext = PAUTO
dclcontext = ir.PAUTO
types.Markdcl()
@ -398,8 +399,8 @@ func funchdr(n *Node) {
}
}
func funcargs(nt *Node) {
if nt.Op != OTFUNC {
func funcargs(nt *ir.Node) {
if nt.Op != ir.OTFUNC {
base.Fatalf("funcargs %v", nt.Op)
}
@ -414,10 +415,10 @@ func funcargs(nt *Node) {
// declare the receiver and in arguments.
if nt.Left != nil {
funcarg(nt.Left, PPARAM)
funcarg(nt.Left, ir.PPARAM)
}
for _, n := range nt.List.Slice() {
funcarg(n, PPARAM)
funcarg(n, ir.PPARAM)
}
oldvargen := vargen
@ -442,21 +443,21 @@ func funcargs(nt *Node) {
gen++
}
funcarg(n, PPARAMOUT)
funcarg(n, ir.PPARAMOUT)
}
vargen = oldvargen
}
func funcarg(n *Node, ctxt Class) {
if n.Op != ODCLFIELD {
func funcarg(n *ir.Node, ctxt ir.Class) {
if n.Op != ir.ODCLFIELD {
base.Fatalf("funcarg %v", n.Op)
}
if n.Sym == nil {
return
}
n.Right = newnamel(n.Pos, n.Sym)
n.Right = ir.NewNameAt(n.Pos, n.Sym)
n.Right.Name.Param.Ntype = n.Left
n.Right.SetIsDDD(n.IsDDD())
declare(n.Right, ctxt)
@ -469,27 +470,27 @@ func funcarg(n *Node, ctxt Class) {
// This happens during import, where the hidden_fndcl rule has
// used functype directly to parse the function's type.
func funcargs2(t *types.Type) {
if t.Etype != TFUNC {
if t.Etype != types.TFUNC {
base.Fatalf("funcargs2 %v", t)
}
for _, f := range t.Recvs().Fields().Slice() {
funcarg2(f, PPARAM)
funcarg2(f, ir.PPARAM)
}
for _, f := range t.Params().Fields().Slice() {
funcarg2(f, PPARAM)
funcarg2(f, ir.PPARAM)
}
for _, f := range t.Results().Fields().Slice() {
funcarg2(f, PPARAMOUT)
funcarg2(f, ir.PPARAMOUT)
}
}
func funcarg2(f *types.Field, ctxt Class) {
func funcarg2(f *types.Field, ctxt ir.Class) {
if f.Sym == nil {
return
}
n := newnamel(f.Pos, f.Sym)
f.Nname = asTypesNode(n)
n := ir.NewNameAt(f.Pos, f.Sym)
f.Nname = ir.AsTypesNode(n)
n.Type = f.Type
n.SetIsDDD(f.IsDDD())
declare(n, ctxt)
@ -498,8 +499,8 @@ func funcarg2(f *types.Field, ctxt Class) {
var funcStack []funcStackEnt // stack of previous values of Curfn/dclcontext
type funcStackEnt struct {
curfn *Node
dclcontext Class
curfn *ir.Node
dclcontext ir.Class
}
// finish the body.
@ -529,16 +530,16 @@ func checkembeddedtype(t *types.Type) {
if t.IsPtr() || t.IsUnsafePtr() {
base.Errorf("embedded type cannot be a pointer")
} else if t.Etype == TFORW && !t.ForwardType().Embedlineno.IsKnown() {
} else if t.Etype == types.TFORW && !t.ForwardType().Embedlineno.IsKnown() {
t.ForwardType().Embedlineno = base.Pos
}
}
func structfield(n *Node) *types.Field {
func structfield(n *ir.Node) *types.Field {
lno := base.Pos
base.Pos = n.Pos
if n.Op != ODCLFIELD {
if n.Op != ir.ODCLFIELD {
base.Fatalf("structfield: oops %v\n", n)
}
@ -581,8 +582,8 @@ func checkdupfields(what string, fss ...[]*types.Field) {
// convert a parsed id/type list into
// a type for struct/interface/arglist
func tostruct(l []*Node) *types.Type {
t := types.New(TSTRUCT)
func tostruct(l []*ir.Node) *types.Type {
t := types.New(types.TSTRUCT)
fields := make([]*types.Field, len(l))
for i, n := range l {
@ -603,8 +604,8 @@ func tostruct(l []*Node) *types.Type {
return t
}
func tofunargs(l []*Node, funarg types.Funarg) *types.Type {
t := types.New(TSTRUCT)
func tofunargs(l []*ir.Node, funarg types.Funarg) *types.Type {
t := types.New(types.TSTRUCT)
t.StructType().Funarg = funarg
fields := make([]*types.Field, len(l))
@ -613,7 +614,7 @@ func tofunargs(l []*Node, funarg types.Funarg) *types.Type {
f.SetIsDDD(n.IsDDD())
if n.Right != nil {
n.Right.Type = f.Type
f.Nname = asTypesNode(n.Right)
f.Nname = ir.AsTypesNode(n.Right)
}
if f.Broke() {
t.SetBroke(true)
@ -625,17 +626,17 @@ func tofunargs(l []*Node, funarg types.Funarg) *types.Type {
}
func tofunargsfield(fields []*types.Field, funarg types.Funarg) *types.Type {
t := types.New(TSTRUCT)
t := types.New(types.TSTRUCT)
t.StructType().Funarg = funarg
t.SetFields(fields)
return t
}
func interfacefield(n *Node) *types.Field {
func interfacefield(n *ir.Node) *types.Field {
lno := base.Pos
base.Pos = n.Pos
if n.Op != ODCLFIELD {
if n.Op != ir.ODCLFIELD {
base.Fatalf("interfacefield: oops %v\n", n)
}
@ -660,11 +661,11 @@ func interfacefield(n *Node) *types.Field {
return f
}
func tointerface(l []*Node) *types.Type {
func tointerface(l []*ir.Node) *types.Type {
if len(l) == 0 {
return types.Types[TINTER]
return types.Types[types.TINTER]
}
t := types.New(TINTER)
t := types.New(types.TINTER)
var fields []*types.Field
for _, n := range l {
f := interfacefield(n)
@ -677,7 +678,7 @@ func tointerface(l []*Node) *types.Type {
return t
}
func fakeRecv() *Node {
func fakeRecv() *ir.Node {
return anonfield(types.FakeRecvType())
}
@ -693,12 +694,12 @@ func isifacemethod(f *types.Type) bool {
}
// turn a parsed function declaration into a type
func functype(this *Node, in, out []*Node) *types.Type {
t := types.New(TFUNC)
func functype(this *ir.Node, in, out []*ir.Node) *types.Type {
t := types.New(types.TFUNC)
var rcvr []*Node
var rcvr []*ir.Node
if this != nil {
rcvr = []*Node{this}
rcvr = []*ir.Node{this}
}
t.FuncType().Receiver = tofunargs(rcvr, types.FunargRcvr)
t.FuncType().Params = tofunargs(in, types.FunargParams)
@ -710,13 +711,13 @@ func functype(this *Node, in, out []*Node) *types.Type {
t.SetBroke(true)
}
t.FuncType().Outnamed = t.NumResults() > 0 && origSym(t.Results().Field(0).Sym) != nil
t.FuncType().Outnamed = t.NumResults() > 0 && ir.OrigSym(t.Results().Field(0).Sym) != nil
return t
}
func functypefield(this *types.Field, in, out []*types.Field) *types.Type {
t := types.New(TFUNC)
t := types.New(types.TFUNC)
var rcvr []*types.Field
if this != nil {
@ -726,36 +727,11 @@ func functypefield(this *types.Field, in, out []*types.Field) *types.Type {
t.FuncType().Params = tofunargsfield(in, types.FunargParams)
t.FuncType().Results = tofunargsfield(out, types.FunargResults)
t.FuncType().Outnamed = t.NumResults() > 0 && origSym(t.Results().Field(0).Sym) != nil
t.FuncType().Outnamed = t.NumResults() > 0 && ir.OrigSym(t.Results().Field(0).Sym) != nil
return t
}
// origSym returns the original symbol written by the user.
func origSym(s *types.Sym) *types.Sym {
if s == nil {
return nil
}
if len(s.Name) > 1 && s.Name[0] == '~' {
switch s.Name[1] {
case 'r': // originally an unnamed result
return nil
case 'b': // originally the blank identifier _
// TODO(mdempsky): Does s.Pkg matter here?
return nblank.Sym
}
return s
}
if strings.HasPrefix(s.Name, ".anon") {
// originally an unnamed or _ name (see subr.go: structargs)
return nil
}
return s
}
// methodSym returns the method symbol representing a method name
// associated with a specific receiver type.
//
@ -823,7 +799,7 @@ func methodSymSuffix(recv *types.Type, msym *types.Sym, suffix string) *types.Sy
// - msym is the method symbol
// - t is function type (with receiver)
// Returns a pointer to the existing or added Field; or nil if there's an error.
func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field {
func addmethod(n *ir.Node, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field {
if msym == nil {
base.Fatalf("no method symbol")
}
@ -864,7 +840,7 @@ func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool)
return nil
}
if local && mt.Sym.Pkg != localpkg {
if local && mt.Sym.Pkg != ir.LocalPkg {
base.Errorf("cannot define new methods on non-local type %v", mt)
return nil
}
@ -896,7 +872,7 @@ func addmethod(n *Node, msym *types.Sym, t *types.Type, local, nointerface bool)
}
f := types.NewField(base.Pos, msym, t)
f.Nname = asTypesNode(n.Func.Nname)
f.Nname = ir.AsTypesNode(n.Func.Nname)
f.SetNointerface(nointerface)
mt.Methods().Append(f)
@ -959,21 +935,21 @@ func makefuncsym(s *types.Sym) {
}
// setNodeNameFunc marks a node as a function.
func setNodeNameFunc(n *Node) {
if n.Op != ONAME || n.Class() != Pxxx {
func setNodeNameFunc(n *ir.Node) {
if n.Op != ir.ONAME || n.Class() != ir.Pxxx {
base.Fatalf("expected ONAME/Pxxx node, got %v", n)
}
n.SetClass(PFUNC)
n.SetClass(ir.PFUNC)
n.Sym.SetFunc(true)
}
func dclfunc(sym *types.Sym, tfn *Node) *Node {
if tfn.Op != OTFUNC {
func dclfunc(sym *types.Sym, tfn *ir.Node) *ir.Node {
if tfn.Op != ir.OTFUNC {
base.Fatalf("expected OTFUNC node, got %v", tfn)
}
fn := nod(ODCLFUNC, nil, nil)
fn := ir.Nod(ir.ODCLFUNC, nil, nil)
fn.Func.Nname = newfuncnamel(base.Pos, sym, fn.Func)
fn.Func.Nname.Name.Defn = fn
fn.Func.Nname.Name.Param.Ntype = tfn
@ -987,27 +963,22 @@ type nowritebarrierrecChecker struct {
// extraCalls contains extra function calls that may not be
// visible during later analysis. It maps from the ODCLFUNC of
// the caller to a list of callees.
extraCalls map[*Node][]nowritebarrierrecCall
extraCalls map[*ir.Node][]nowritebarrierrecCall
// curfn is the current function during AST walks.
curfn *Node
curfn *ir.Node
}
type nowritebarrierrecCall struct {
target *Node // ODCLFUNC of caller or callee
target *ir.Node // ODCLFUNC of caller or callee
lineno src.XPos // line of call
}
type nowritebarrierrecCallSym struct {
target *obj.LSym // LSym of callee
lineno src.XPos // line of call
}
// newNowritebarrierrecChecker creates a nowritebarrierrecChecker. It
// must be called before transformclosure and walk.
func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
c := &nowritebarrierrecChecker{
extraCalls: make(map[*Node][]nowritebarrierrecCall),
extraCalls: make(map[*ir.Node][]nowritebarrierrecCall),
}
// Find all systemstack calls and record their targets. In
@ -1016,39 +987,39 @@ func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
// directly. This has to happen before transformclosure since
// it's a lot harder to work out the argument after.
for _, n := range xtop {
if n.Op != ODCLFUNC {
if n.Op != ir.ODCLFUNC {
continue
}
c.curfn = n
inspect(n, c.findExtraCalls)
ir.Inspect(n, c.findExtraCalls)
}
c.curfn = nil
return c
}
func (c *nowritebarrierrecChecker) findExtraCalls(n *Node) bool {
if n.Op != OCALLFUNC {
func (c *nowritebarrierrecChecker) findExtraCalls(n *ir.Node) bool {
if n.Op != ir.OCALLFUNC {
return true
}
fn := n.Left
if fn == nil || fn.Op != ONAME || fn.Class() != PFUNC || fn.Name.Defn == nil {
if fn == nil || fn.Op != ir.ONAME || fn.Class() != ir.PFUNC || fn.Name.Defn == nil {
return true
}
if !isRuntimePkg(fn.Sym.Pkg) || fn.Sym.Name != "systemstack" {
return true
}
var callee *Node
var callee *ir.Node
arg := n.List.First()
switch arg.Op {
case ONAME:
case ir.ONAME:
callee = arg.Name.Defn
case OCLOSURE:
case ir.OCLOSURE:
callee = arg.Func.Decl
default:
base.Fatalf("expected ONAME or OCLOSURE node, got %+v", arg)
}
if callee.Op != ODCLFUNC {
if callee.Op != ir.ODCLFUNC {
base.Fatalf("expected ODCLFUNC node, got %+v", callee)
}
c.extraCalls[c.curfn] = append(c.extraCalls[c.curfn], nowritebarrierrecCall{callee, n.Pos})
@ -1063,17 +1034,17 @@ func (c *nowritebarrierrecChecker) findExtraCalls(n *Node) bool {
// because that's all we know after we start SSA.
//
// This can be called concurrently for different from Nodes.
func (c *nowritebarrierrecChecker) recordCall(from *Node, to *obj.LSym, pos src.XPos) {
if from.Op != ODCLFUNC {
func (c *nowritebarrierrecChecker) recordCall(from *ir.Node, to *obj.LSym, pos src.XPos) {
if from.Op != ir.ODCLFUNC {
base.Fatalf("expected ODCLFUNC, got %v", from)
}
// We record this information on the *Func so this is
// concurrent-safe.
fn := from.Func
if fn.nwbrCalls == nil {
fn.nwbrCalls = new([]nowritebarrierrecCallSym)
if fn.NWBRCalls == nil {
fn.NWBRCalls = new([]ir.SymAndPos)
}
*fn.nwbrCalls = append(*fn.nwbrCalls, nowritebarrierrecCallSym{to, pos})
*fn.NWBRCalls = append(*fn.NWBRCalls, ir.SymAndPos{Sym: to, Pos: pos})
}
func (c *nowritebarrierrecChecker) check() {
@ -1081,39 +1052,39 @@ func (c *nowritebarrierrecChecker) check() {
// capture all calls created by lowering, but this means we
// only get to see the obj.LSyms of calls. symToFunc lets us
// get back to the ODCLFUNCs.
symToFunc := make(map[*obj.LSym]*Node)
symToFunc := make(map[*obj.LSym]*ir.Node)
// funcs records the back-edges of the BFS call graph walk. It
// maps from the ODCLFUNC of each function that must not have
// write barriers to the call that inhibits them. Functions
// that are directly marked go:nowritebarrierrec are in this
// map with a zero-valued nowritebarrierrecCall. This also
// acts as the set of marks for the BFS of the call graph.
funcs := make(map[*Node]nowritebarrierrecCall)
funcs := make(map[*ir.Node]nowritebarrierrecCall)
// q is the queue of ODCLFUNC Nodes to visit in BFS order.
var q nodeQueue
var q ir.NodeQueue
for _, n := range xtop {
if n.Op != ODCLFUNC {
if n.Op != ir.ODCLFUNC {
continue
}
symToFunc[n.Func.lsym] = n
symToFunc[n.Func.LSym] = n
// Make nowritebarrierrec functions BFS roots.
if n.Func.Pragma&Nowritebarrierrec != 0 {
if n.Func.Pragma&ir.Nowritebarrierrec != 0 {
funcs[n] = nowritebarrierrecCall{}
q.pushRight(n)
q.PushRight(n)
}
// Check go:nowritebarrier functions.
if n.Func.Pragma&Nowritebarrier != 0 && n.Func.WBPos.IsKnown() {
if n.Func.Pragma&ir.Nowritebarrier != 0 && n.Func.WBPos.IsKnown() {
base.ErrorfAt(n.Func.WBPos, "write barrier prohibited")
}
}
// Perform a BFS of the call graph from all
// go:nowritebarrierrec functions.
enqueue := func(src, target *Node, pos src.XPos) {
if target.Func.Pragma&Yeswritebarrierrec != 0 {
enqueue := func(src, target *ir.Node, pos src.XPos) {
if target.Func.Pragma&ir.Yeswritebarrierrec != 0 {
// Don't flow into this function.
return
}
@ -1124,10 +1095,10 @@ func (c *nowritebarrierrecChecker) check() {
// Record the path.
funcs[target] = nowritebarrierrecCall{target: src, lineno: pos}
q.pushRight(target)
q.PushRight(target)
}
for !q.empty() {
fn := q.popLeft()
for !q.Empty() {
fn := q.PopLeft()
// Check fn.
if fn.Func.WBPos.IsKnown() {
@ -1145,13 +1116,13 @@ func (c *nowritebarrierrecChecker) check() {
for _, callee := range c.extraCalls[fn] {
enqueue(fn, callee.target, callee.lineno)
}
if fn.Func.nwbrCalls == nil {
if fn.Func.NWBRCalls == nil {
continue
}
for _, callee := range *fn.Func.nwbrCalls {
target := symToFunc[callee.target]
for _, callee := range *fn.Func.NWBRCalls {
target := symToFunc[callee.Sym]
if target != nil {
enqueue(fn, target, callee.lineno)
enqueue(fn, target, callee.Pos)
}
}
}

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/types"
"cmd/internal/obj"
@ -16,7 +17,7 @@ import (
"strings"
)
var embedlist []*Node
var embedlist []*ir.Node
const (
embedUnknown = iota
@ -27,7 +28,7 @@ const (
var numLocalEmbed int
func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []PragmaEmbed) (newExprs []*Node) {
func varEmbed(p *noder, names []*ir.Node, typ *ir.Node, exprs []*ir.Node, embeds []PragmaEmbed) (newExprs []*ir.Node) {
haveEmbed := false
for _, decl := range p.file.DeclList {
imp, ok := decl.(*syntax.ImportDecl)
@ -110,14 +111,14 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma
}
v := names[0]
if dclcontext != PEXTERN {
if dclcontext != ir.PEXTERN {
numLocalEmbed++
v = newnamel(v.Pos, lookupN("embed.", numLocalEmbed))
v.Sym.Def = asTypesNode(v)
v = ir.NewNameAt(v.Pos, lookupN("embed.", numLocalEmbed))
v.Sym.Def = ir.AsTypesNode(v)
v.Name.Param.Ntype = typ
v.SetClass(PEXTERN)
v.SetClass(ir.PEXTERN)
externdcl = append(externdcl, v)
exprs = []*Node{v}
exprs = []*ir.Node{v}
}
v.Name.Param.SetEmbedFiles(list)
@ -129,18 +130,18 @@ func varEmbed(p *noder, names []*Node, typ *Node, exprs []*Node, embeds []Pragma
// The match is approximate because we haven't done scope resolution yet and
// can't tell whether "string" and "byte" really mean "string" and "byte".
// The result must be confirmed later, after type checking, using embedKind.
func embedKindApprox(typ *Node) int {
if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && base.Ctxt.Pkgpath == "embed")) {
func embedKindApprox(typ *ir.Node) int {
if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
return embedFiles
}
// These are not guaranteed to match only string and []byte -
// maybe the local package has redefined one of those words.
// But it's the best we can do now during the noder.
// The stricter check happens later, in initEmbed calling embedKind.
if typ.Sym != nil && typ.Sym.Name == "string" && typ.Sym.Pkg == localpkg {
if typ.Sym != nil && typ.Sym.Name == "string" && typ.Sym.Pkg == ir.LocalPkg {
return embedString
}
if typ.Op == OTARRAY && typ.Left == nil && typ.Right.Sym != nil && typ.Right.Sym.Name == "byte" && typ.Right.Sym.Pkg == localpkg {
if typ.Op == ir.OTARRAY && typ.Left == nil && typ.Right.Sym != nil && typ.Right.Sym.Name == "byte" && typ.Right.Sym.Pkg == ir.LocalPkg {
return embedBytes
}
return embedUnknown
@ -148,10 +149,10 @@ func embedKindApprox(typ *Node) int {
// embedKind determines the kind of embedding variable.
func embedKind(typ *types.Type) int {
if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == localpkg && base.Ctxt.Pkgpath == "embed")) {
if typ.Sym != nil && typ.Sym.Name == "FS" && (typ.Sym.Pkg.Path == "embed" || (typ.Sym.Pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "embed")) {
return embedFiles
}
if typ == types.Types[TSTRING] {
if typ == types.Types[types.TSTRING] {
return embedString
}
if typ.Sym == nil && typ.IsSlice() && typ.Elem() == types.Bytetype {
@ -191,7 +192,7 @@ func dumpembeds() {
// initEmbed emits the init data for a //go:embed variable,
// which is either a string, a []byte, or an embed.FS.
func initEmbed(v *Node) {
func initEmbed(v *ir.Node) {
files := v.Name.Param.EmbedFiles()
switch kind := embedKind(v.Type); kind {
case embedUnknown:

View file

@ -1,474 +0,0 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/types"
"fmt"
)
func escapes(all []*Node) {
visitBottomUp(all, escapeFuncs)
}
const (
EscFuncUnknown = 0 + iota
EscFuncPlanned
EscFuncStarted
EscFuncTagged
)
func min8(a, b int8) int8 {
if a < b {
return a
}
return b
}
func max8(a, b int8) int8 {
if a > b {
return a
}
return b
}
const (
EscUnknown = iota
EscNone // Does not escape to heap, result, or parameters.
EscHeap // Reachable from the heap
EscNever // By construction will not escape.
)
// funcSym returns fn.Func.Nname.Sym if no nils are encountered along the way.
func funcSym(fn *Node) *types.Sym {
if fn == nil || fn.Func.Nname == nil {
return nil
}
return fn.Func.Nname.Sym
}
// Mark labels that have no backjumps to them as not increasing e.loopdepth.
// Walk hasn't generated (goto|label).Left.Sym.Label yet, so we'll cheat
// and set it to one of the following two. Then in esc we'll clear it again.
var (
looping = nod(OXXX, nil, nil)
nonlooping = nod(OXXX, nil, nil)
)
func isSliceSelfAssign(dst, src *Node) bool {
// Detect the following special case.
//
// func (b *Buffer) Foo() {
// n, m := ...
// b.buf = b.buf[n:m]
// }
//
// This assignment is a no-op for escape analysis,
// it does not store any new pointers into b that were not already there.
// However, without this special case b will escape, because we assign to OIND/ODOTPTR.
// Here we assume that the statement will not contain calls,
// that is, that order will move any calls to init.
// Otherwise base ONAME value could change between the moments
// when we evaluate it for dst and for src.
// dst is ONAME dereference.
if dst.Op != ODEREF && dst.Op != ODOTPTR || dst.Left.Op != ONAME {
return false
}
// src is a slice operation.
switch src.Op {
case OSLICE, OSLICE3, OSLICESTR:
// OK.
case OSLICEARR, OSLICE3ARR:
// Since arrays are embedded into containing object,
// slice of non-pointer array will introduce a new pointer into b that was not already there
// (pointer to b itself). After such assignment, if b contents escape,
// b escapes as well. If we ignore such OSLICEARR, we will conclude
// that b does not escape when b contents do.
//
// Pointer to an array is OK since it's not stored inside b directly.
// For slicing an array (not pointer to array), there is an implicit OADDR.
// We check that to determine non-pointer array slicing.
if src.Left.Op == OADDR {
return false
}
default:
return false
}
// slice is applied to ONAME dereference.
if src.Left.Op != ODEREF && src.Left.Op != ODOTPTR || src.Left.Left.Op != ONAME {
return false
}
// dst and src reference the same base ONAME.
return dst.Left == src.Left.Left
}
// isSelfAssign reports whether assignment from src to dst can
// be ignored by the escape analysis as it's effectively a self-assignment.
func isSelfAssign(dst, src *Node) bool {
if isSliceSelfAssign(dst, src) {
return true
}
// Detect trivial assignments that assign back to the same object.
//
// It covers these cases:
// val.x = val.y
// val.x[i] = val.y[j]
// val.x1.x2 = val.x1.y2
// ... etc
//
// These assignments do not change assigned object lifetime.
if dst == nil || src == nil || dst.Op != src.Op {
return false
}
switch dst.Op {
case ODOT, ODOTPTR:
// Safe trailing accessors that are permitted to differ.
case OINDEX:
if mayAffectMemory(dst.Right) || mayAffectMemory(src.Right) {
return false
}
default:
return false
}
// The expression prefix must be both "safe" and identical.
return samesafeexpr(dst.Left, src.Left)
}
// mayAffectMemory reports whether evaluation of n may affect the program's
// memory state. If the expression can't affect memory state, then it can be
// safely ignored by the escape analysis.
func mayAffectMemory(n *Node) bool {
// We may want to use a list of "memory safe" ops instead of generally
// "side-effect free", which would include all calls and other ops that can
// allocate or change global state. For now, it's safer to start with the latter.
//
// We're ignoring things like division by zero, index out of range,
// and nil pointer dereference here.
switch n.Op {
case ONAME, OCLOSUREVAR, OLITERAL, ONIL:
return false
// Left+Right group.
case OINDEX, OADD, OSUB, OOR, OXOR, OMUL, OLSH, ORSH, OAND, OANDNOT, ODIV, OMOD:
return mayAffectMemory(n.Left) || mayAffectMemory(n.Right)
// Left group.
case ODOT, ODOTPTR, ODEREF, OCONVNOP, OCONV, OLEN, OCAP,
ONOT, OBITNOT, OPLUS, ONEG, OALIGNOF, OOFFSETOF, OSIZEOF:
return mayAffectMemory(n.Left)
default:
return true
}
}
// heapAllocReason returns the reason the given Node must be heap
// allocated, or the empty string if it doesn't.
func heapAllocReason(n *Node) string {
if n.Type == nil {
return ""
}
// Parameters are always passed via the stack.
if n.Op == ONAME && (n.Class() == PPARAM || n.Class() == PPARAMOUT) {
return ""
}
if n.Type.Width > maxStackVarSize {
return "too large for stack"
}
if (n.Op == ONEW || n.Op == OPTRLIT) && n.Type.Elem().Width >= maxImplicitStackVarSize {
return "too large for stack"
}
if n.Op == OCLOSURE && closureType(n).Size() >= maxImplicitStackVarSize {
return "too large for stack"
}
if n.Op == OCALLPART && partialCallType(n).Size() >= maxImplicitStackVarSize {
return "too large for stack"
}
if n.Op == OMAKESLICE {
r := n.Right
if r == nil {
r = n.Left
}
if !smallintconst(r) {
return "non-constant size"
}
if t := n.Type; t.Elem().Width != 0 && r.Int64Val() >= maxImplicitStackVarSize/t.Elem().Width {
return "too large for stack"
}
}
return ""
}
// addrescapes tags node n as having had its address taken
// by "increasing" the "value" of n.Esc to EscHeap.
// Storage is allocated as necessary to allow the address
// to be taken.
func addrescapes(n *Node) {
switch n.Op {
default:
// Unexpected Op, probably due to a previous type error. Ignore.
case ODEREF, ODOTPTR:
// Nothing to do.
case ONAME:
if n == nodfp {
break
}
// if this is a tmpname (PAUTO), it was tagged by tmpname as not escaping.
// on PPARAM it means something different.
if n.Class() == PAUTO && n.Esc == EscNever {
break
}
// If a closure reference escapes, mark the outer variable as escaping.
if n.Name.IsClosureVar() {
addrescapes(n.Name.Defn)
break
}
if n.Class() != PPARAM && n.Class() != PPARAMOUT && n.Class() != PAUTO {
break
}
// This is a plain parameter or local variable that needs to move to the heap,
// but possibly for the function outside the one we're compiling.
// That is, if we have:
//
// func f(x int) {
// func() {
// global = &x
// }
// }
//
// then we're analyzing the inner closure but we need to move x to the
// heap in f, not in the inner closure. Flip over to f before calling moveToHeap.
oldfn := Curfn
Curfn = n.Name.Curfn
if Curfn.Op == OCLOSURE {
Curfn = Curfn.Func.Decl
panic("can't happen")
}
ln := base.Pos
base.Pos = Curfn.Pos
moveToHeap(n)
Curfn = oldfn
base.Pos = ln
// ODOTPTR has already been introduced,
// so these are the non-pointer ODOT and OINDEX.
// In &x[0], if x is a slice, then x does not
// escape--the pointer inside x does, but that
// is always a heap pointer anyway.
case ODOT, OINDEX, OPAREN, OCONVNOP:
if !n.Left.Type.IsSlice() {
addrescapes(n.Left)
}
}
}
// moveToHeap records the parameter or local variable n as moved to the heap.
func moveToHeap(n *Node) {
if base.Flag.LowerR != 0 {
Dump("MOVE", n)
}
if base.Flag.CompilingRuntime {
base.Errorf("%v escapes to heap, not allowed in runtime", n)
}
if n.Class() == PAUTOHEAP {
Dump("n", n)
base.Fatalf("double move to heap")
}
// Allocate a local stack variable to hold the pointer to the heap copy.
// temp will add it to the function declaration list automatically.
heapaddr := temp(types.NewPtr(n.Type))
heapaddr.Sym = lookup("&" + n.Sym.Name)
heapaddr.Orig.Sym = heapaddr.Sym
heapaddr.Pos = n.Pos
// Unset AutoTemp to persist the &foo variable name through SSA to
// liveness analysis.
// TODO(mdempsky/drchase): Cleaner solution?
heapaddr.Name.SetAutoTemp(false)
// Parameters have a local stack copy used at function start/end
// in addition to the copy in the heap that may live longer than
// the function.
if n.Class() == PPARAM || n.Class() == PPARAMOUT {
if n.Xoffset == BADWIDTH {
base.Fatalf("addrescapes before param assignment")
}
// We rewrite n below to be a heap variable (indirection of heapaddr).
// Preserve a copy so we can still write code referring to the original,
// and substitute that copy into the function declaration list
// so that analyses of the local (on-stack) variables use it.
stackcopy := newname(n.Sym)
stackcopy.Type = n.Type
stackcopy.Xoffset = n.Xoffset
stackcopy.SetClass(n.Class())
stackcopy.Name.Param.Heapaddr = heapaddr
if n.Class() == PPARAMOUT {
// Make sure the pointer to the heap copy is kept live throughout the function.
// The function could panic at any point, and then a defer could recover.
// Thus, we need the pointer to the heap copy always available so the
// post-deferreturn code can copy the return value back to the stack.
// See issue 16095.
heapaddr.Name.SetIsOutputParamHeapAddr(true)
}
n.Name.Param.Stackcopy = stackcopy
// Substitute the stackcopy into the function variable list so that
// liveness and other analyses use the underlying stack slot
// and not the now-pseudo-variable n.
found := false
for i, d := range Curfn.Func.Dcl {
if d == n {
Curfn.Func.Dcl[i] = stackcopy
found = true
break
}
// Parameters are before locals, so can stop early.
// This limits the search even in functions with many local variables.
if d.Class() == PAUTO {
break
}
}
if !found {
base.Fatalf("cannot find %v in local variable list", n)
}
Curfn.Func.Dcl = append(Curfn.Func.Dcl, n)
}
// Modify n in place so that uses of n now mean indirection of the heapaddr.
n.SetClass(PAUTOHEAP)
n.Xoffset = 0
n.Name.Param.Heapaddr = heapaddr
n.Esc = EscHeap
if base.Flag.LowerM != 0 {
base.WarnfAt(n.Pos, "moved to heap: %v", n)
}
}
// This special tag is applied to uintptr variables
// that we believe may hold unsafe.Pointers for
// calls into assembly functions.
const unsafeUintptrTag = "unsafe-uintptr"
// This special tag is applied to uintptr parameters of functions
// marked go:uintptrescapes.
const uintptrEscapesTag = "uintptr-escapes"
func (e *Escape) paramTag(fn *Node, narg int, f *types.Field) string {
name := func() string {
if f.Sym != nil {
return f.Sym.Name
}
return fmt.Sprintf("arg#%d", narg)
}
if fn.Nbody.Len() == 0 {
// Assume that uintptr arguments must be held live across the call.
// This is most important for syscall.Syscall.
// See golang.org/issue/13372.
// This really doesn't have much to do with escape analysis per se,
// but we are reusing the ability to annotate an individual function
// argument and pass those annotations along to importing code.
if f.Type.IsUintptr() {
if base.Flag.LowerM != 0 {
base.WarnfAt(f.Pos, "assuming %v is unsafe uintptr", name())
}
return unsafeUintptrTag
}
if !f.Type.HasPointers() { // don't bother tagging for scalars
return ""
}
var esc EscLeaks
// External functions are assumed unsafe, unless
// //go:noescape is given before the declaration.
if fn.Func.Pragma&Noescape != 0 {
if base.Flag.LowerM != 0 && f.Sym != nil {
base.WarnfAt(f.Pos, "%v does not escape", name())
}
} else {
if base.Flag.LowerM != 0 && f.Sym != nil {
base.WarnfAt(f.Pos, "leaking param: %v", name())
}
esc.AddHeap(0)
}
return esc.Encode()
}
if fn.Func.Pragma&UintptrEscapes != 0 {
if f.Type.IsUintptr() {
if base.Flag.LowerM != 0 {
base.WarnfAt(f.Pos, "marking %v as escaping uintptr", name())
}
return uintptrEscapesTag
}
if f.IsDDD() && f.Type.Elem().IsUintptr() {
// final argument is ...uintptr.
if base.Flag.LowerM != 0 {
base.WarnfAt(f.Pos, "marking %v as escaping ...uintptr", name())
}
return uintptrEscapesTag
}
}
if !f.Type.HasPointers() { // don't bother tagging for scalars
return ""
}
// Unnamed parameters are unused and therefore do not escape.
if f.Sym == nil || f.Sym.IsBlank() {
var esc EscLeaks
return esc.Encode()
}
n := asNode(f.Nname)
loc := e.oldLoc(n)
esc := loc.paramEsc
esc.Optimize()
if base.Flag.LowerM != 0 && !loc.escapes {
if esc.Empty() {
base.WarnfAt(f.Pos, "%v does not escape", name())
}
if x := esc.Heap(); x >= 0 {
if x == 0 {
base.WarnfAt(f.Pos, "leaking param: %v", name())
} else {
// TODO(mdempsky): Mention level=x like below?
base.WarnfAt(f.Pos, "leaking param content: %v", name())
}
}
for i := 0; i < numEscResults; i++ {
if x := esc.Result(i); x >= 0 {
res := fn.Type.Results().Field(i).Sym
base.WarnfAt(f.Pos, "leaking param: %v to result %v level=%d", name(), res, x)
}
}
}
return esc.Encode()
}

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/src"
@ -20,10 +21,10 @@ func exportf(bout *bio.Writer, format string, args ...interface{}) {
}
}
var asmlist []*Node
var asmlist []*ir.Node
// exportsym marks n for export (or reexport).
func exportsym(n *Node) {
func exportsym(n *ir.Node) {
if n.Sym.OnExportList() {
return
}
@ -40,14 +41,14 @@ func initname(s string) bool {
return s == "init"
}
func autoexport(n *Node, ctxt Class) {
if n.Sym.Pkg != localpkg {
func autoexport(n *ir.Node, ctxt ir.Class) {
if n.Sym.Pkg != ir.LocalPkg {
return
}
if (ctxt != PEXTERN && ctxt != PFUNC) || dclcontext != PEXTERN {
if (ctxt != ir.PEXTERN && ctxt != ir.PFUNC) || dclcontext != ir.PEXTERN {
return
}
if n.Type != nil && n.Type.IsKind(TFUNC) && n.IsMethod() {
if n.Type != nil && n.Type.IsKind(types.TFUNC) && ir.IsMethod(n) {
return
}
@ -73,8 +74,8 @@ func dumpexport(bout *bio.Writer) {
}
}
func importsym(ipkg *types.Pkg, s *types.Sym, op Op) *Node {
n := asNode(s.PkgDef())
func importsym(ipkg *types.Pkg, s *types.Sym, op ir.Op) *ir.Node {
n := ir.AsNode(s.PkgDef())
if n == nil {
// iimport should have created a stub ONONAME
// declaration for all imported symbols. The exception
@ -85,10 +86,10 @@ func importsym(ipkg *types.Pkg, s *types.Sym, op Op) *Node {
}
n = dclname(s)
s.SetPkgDef(asTypesNode(n))
s.SetPkgDef(ir.AsTypesNode(n))
s.Importdef = ipkg
}
if n.Op != ONONAME && n.Op != op {
if n.Op != ir.ONONAME && n.Op != op {
redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path))
}
return n
@ -98,16 +99,16 @@ func importsym(ipkg *types.Pkg, s *types.Sym, op Op) *Node {
// If no such type has been declared yet, a forward declaration is returned.
// ipkg is the package being imported
func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type {
n := importsym(ipkg, s, OTYPE)
if n.Op != OTYPE {
t := types.New(TFORW)
n := importsym(ipkg, s, ir.OTYPE)
if n.Op != ir.OTYPE {
t := types.New(types.TFORW)
t.Sym = s
t.Nod = asTypesNode(n)
t.Nod = ir.AsTypesNode(n)
n.Op = OTYPE
n.Op = ir.OTYPE
n.Pos = pos
n.Type = t
n.SetClass(PEXTERN)
n.SetClass(ir.PEXTERN)
}
t := n.Type
@ -119,9 +120,9 @@ func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *types.Type {
// importobj declares symbol s as an imported object representable by op.
// ipkg is the package being imported
func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op Op, ctxt Class, t *types.Type) *Node {
func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Node {
n := importsym(ipkg, s, op)
if n.Op != ONONAME {
if n.Op != ir.ONONAME {
if n.Op == op && (n.Class() != ctxt || !types.Identical(n.Type, t)) {
redeclare(base.Pos, s, fmt.Sprintf("during import %q", ipkg.Path))
}
@ -131,7 +132,7 @@ func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op Op, ctxt Class, t
n.Op = op
n.Pos = pos
n.SetClass(ctxt)
if ctxt == PFUNC {
if ctxt == ir.PFUNC {
n.Sym.SetFunc(true)
}
n.Type = t
@ -141,7 +142,7 @@ func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op Op, ctxt Class, t
// importconst declares symbol s as an imported constant with type t and value val.
// ipkg is the package being imported
func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val constant.Value) {
n := importobj(ipkg, pos, s, OLITERAL, PEXTERN, t)
n := importobj(ipkg, pos, s, ir.OLITERAL, ir.PEXTERN, t)
if n == nil { // TODO: Check that value matches.
return
}
@ -156,12 +157,12 @@ func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val
// importfunc declares symbol s as an imported function with type t.
// ipkg is the package being imported
func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
n := importobj(ipkg, pos, s, ONAME, PFUNC, t)
n := importobj(ipkg, pos, s, ir.ONAME, ir.PFUNC, t)
if n == nil {
return
}
n.Func = new(Func)
n.Func = new(ir.Func)
if base.Flag.E != 0 {
fmt.Printf("import func %v%S\n", s, t)
@ -171,7 +172,7 @@ func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
// importvar declares symbol s as an imported variable with type t.
// ipkg is the package being imported
func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
n := importobj(ipkg, pos, s, ONAME, PEXTERN, t)
n := importobj(ipkg, pos, s, ir.ONAME, ir.PEXTERN, t)
if n == nil {
return
}
@ -184,7 +185,7 @@ func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
// importalias declares symbol s as an imported type alias with type t.
// ipkg is the package being imported
func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) {
n := importobj(ipkg, pos, s, OTYPE, PEXTERN, t)
n := importobj(ipkg, pos, s, ir.OTYPE, ir.PEXTERN, t)
if n == nil {
return
}
@ -199,20 +200,20 @@ func dumpasmhdr() {
if err != nil {
base.Fatalf("%v", err)
}
fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", localpkg.Name)
fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", ir.LocalPkg.Name)
for _, n := range asmlist {
if n.Sym.IsBlank() {
continue
}
switch n.Op {
case OLITERAL:
case ir.OLITERAL:
t := n.Val().Kind()
if t == constant.Float || t == constant.Complex {
break
}
fmt.Fprintf(b, "#define const_%s %#v\n", n.Sym.Name, n.Val())
case OTYPE:
case ir.OTYPE:
t := n.Type
if !t.IsStruct() || t.StructType().Map != nil || t.IsFuncArgStruct() {
break

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
@ -29,14 +30,14 @@ func sysvar(name string) *obj.LSym {
// isParamStackCopy reports whether this is the on-stack copy of a
// function parameter that moved to the heap.
func (n *Node) isParamStackCopy() bool {
return n.Op == ONAME && (n.Class() == PPARAM || n.Class() == PPARAMOUT) && n.Name.Param.Heapaddr != nil
func isParamStackCopy(n *ir.Node) bool {
return n.Op == ir.ONAME && (n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Name.Param.Heapaddr != nil
}
// isParamHeapCopy reports whether this is the on-heap copy of
// a function parameter that moved to the heap.
func (n *Node) isParamHeapCopy() bool {
return n.Op == ONAME && n.Class() == PAUTOHEAP && n.Name.Param.Stackcopy != nil
func isParamHeapCopy(n *ir.Node) bool {
return n.Op == ir.ONAME && n.Class() == ir.PAUTOHEAP && n.Name.Param.Stackcopy != nil
}
// autotmpname returns the name for an autotmp variable numbered n.
@ -51,12 +52,12 @@ func autotmpname(n int) string {
}
// make a new Node off the books
func tempAt(pos src.XPos, curfn *Node, t *types.Type) *Node {
func tempAt(pos src.XPos, curfn *ir.Node, t *types.Type) *ir.Node {
if curfn == nil {
base.Fatalf("no curfn for tempAt")
}
if curfn.Op == OCLOSURE {
Dump("tempAt", curfn)
if curfn.Op == ir.OCLOSURE {
ir.Dump("tempAt", curfn)
base.Fatalf("adding tempAt to wrong closure function")
}
if t == nil {
@ -65,12 +66,12 @@ func tempAt(pos src.XPos, curfn *Node, t *types.Type) *Node {
s := &types.Sym{
Name: autotmpname(len(curfn.Func.Dcl)),
Pkg: localpkg,
Pkg: ir.LocalPkg,
}
n := newnamel(pos, s)
s.Def = asTypesNode(n)
n := ir.NewNameAt(pos, s)
s.Def = ir.AsTypesNode(n)
n.Type = t
n.SetClass(PAUTO)
n.SetClass(ir.PAUTO)
n.Esc = EscNever
n.Name.Curfn = curfn
n.Name.SetUsed(true)
@ -82,6 +83,6 @@ func tempAt(pos src.XPos, curfn *Node, t *types.Type) *Node {
return n.Orig
}
func temp(t *types.Type) *Node {
func temp(t *types.Type) *ir.Node {
return tempAt(base.Pos, Curfn, t)
}

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
@ -13,10 +14,6 @@ import (
"sync"
)
const (
BADWIDTH = types.BADWIDTH
)
var (
// maximum size variable which we will allocate on the stack.
// This limit is for explicit variable declarations like "var x T" or "x := ...".
@ -40,7 +37,7 @@ var (
// isRuntimePkg reports whether p is package runtime.
func isRuntimePkg(p *types.Pkg) bool {
if base.Flag.CompilingRuntime && p == localpkg {
if base.Flag.CompilingRuntime && p == ir.LocalPkg {
return true
}
return p.Path == "runtime"
@ -48,31 +45,12 @@ func isRuntimePkg(p *types.Pkg) bool {
// isReflectPkg reports whether p is package reflect.
func isReflectPkg(p *types.Pkg) bool {
if p == localpkg {
if p == ir.LocalPkg {
return base.Ctxt.Pkgpath == "reflect"
}
return p.Path == "reflect"
}
// The Class of a variable/function describes the "storage class"
// of a variable or function. During parsing, storage classes are
// called declaration contexts.
type Class uint8
//go:generate stringer -type=Class
const (
Pxxx Class = iota // no class; used during ssa conversion to indicate pseudo-variables
PEXTERN // global variables
PAUTO // local variables
PAUTOHEAP // local variables or parameters moved to heap
PPARAM // input arguments
PPARAMOUT // output results
PFUNC // global functions
// Careful: Class is stored in three bits in Node.flags.
_ = uint((1 << 3) - iota) // static assert for iota <= (1 << 3)
)
// Slices in the runtime are represented by three components:
//
// type slice struct {
@ -102,8 +80,6 @@ var pragcgobuf [][]string
var decldepth int32
var localpkg *types.Pkg // package being compiled
var inimport bool // set during import
var itabpkg *types.Pkg // fake pkg for itab entries
@ -126,55 +102,51 @@ var gopkg *types.Pkg // pseudo-package for method symbols on anonymous receiver
var zerosize int64
var simtype [NTYPE]types.EType
var simtype [types.NTYPE]types.EType
var (
isInt [NTYPE]bool
isFloat [NTYPE]bool
isComplex [NTYPE]bool
issimple [NTYPE]bool
isInt [types.NTYPE]bool
isFloat [types.NTYPE]bool
isComplex [types.NTYPE]bool
issimple [types.NTYPE]bool
)
var (
okforeq [NTYPE]bool
okforadd [NTYPE]bool
okforand [NTYPE]bool
okfornone [NTYPE]bool
okforcmp [NTYPE]bool
okforbool [NTYPE]bool
okforcap [NTYPE]bool
okforlen [NTYPE]bool
okforarith [NTYPE]bool
okforeq [types.NTYPE]bool
okforadd [types.NTYPE]bool
okforand [types.NTYPE]bool
okfornone [types.NTYPE]bool
okforcmp [types.NTYPE]bool
okforbool [types.NTYPE]bool
okforcap [types.NTYPE]bool
okforlen [types.NTYPE]bool
okforarith [types.NTYPE]bool
)
var okforconst [NTYPE]bool
var (
okfor [OEND][]bool
iscmp [OEND]bool
okfor [ir.OEND][]bool
iscmp [ir.OEND]bool
)
var xtop []*Node
var xtop []*ir.Node
var exportlist []*Node
var exportlist []*ir.Node
var importlist []*Node // imported functions and methods with inlinable bodies
var importlist []*ir.Node // imported functions and methods with inlinable bodies
var (
funcsymsmu sync.Mutex // protects funcsyms and associated package lookups (see func funcsym)
funcsyms []*types.Sym
)
var dclcontext Class // PEXTERN/PAUTO
var dclcontext ir.Class // PEXTERN/PAUTO
var Curfn *Node
var Curfn *ir.Node
var Widthptr int
var Widthreg int
var nblank *Node
var typecheckok bool
// Whether we are adding any sort of code instrumentation, such as
@ -184,7 +156,7 @@ var instrumenting bool
// Whether we are tracking lexical scopes for DWARF.
var trackScopes bool
var nodfp *Node
var nodfp *ir.Node
var autogeneratedPos src.XPos
@ -221,7 +193,7 @@ var thearch Arch
var (
staticuint64s,
zerobase *Node
zerobase *ir.Node
assertE2I,
assertE2I2,

View file

@ -32,6 +32,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/internal/obj"
"cmd/internal/objabi"
@ -46,7 +47,7 @@ type Progs struct {
next *obj.Prog // next Prog
pc int64 // virtual PC; count of Progs
pos src.XPos // position to use for new Progs
curfn *Node // fn these Progs are for
curfn *ir.Node // fn these Progs are for
progcache []obj.Prog // local progcache
cacheidx int // first free element of progcache
@ -56,7 +57,7 @@ type Progs struct {
// newProgs returns a new Progs for fn.
// worker indicates which of the backend workers will use the Progs.
func newProgs(fn *Node, worker int) *Progs {
func newProgs(fn *ir.Node, worker int) *Progs {
pp := new(Progs)
if base.Ctxt.CanReuseProgs() {
sz := len(sharedProgArray) / base.Flag.LowerC
@ -173,17 +174,17 @@ func (pp *Progs) Appendpp(p *obj.Prog, as obj.As, ftype obj.AddrType, freg int16
return q
}
func (pp *Progs) settext(fn *Node) {
func (pp *Progs) settext(fn *ir.Node) {
if pp.Text != nil {
base.Fatalf("Progs.settext called twice")
}
ptxt := pp.Prog(obj.ATEXT)
pp.Text = ptxt
fn.Func.lsym.Func().Text = ptxt
fn.Func.LSym.Func().Text = ptxt
ptxt.From.Type = obj.TYPE_MEM
ptxt.From.Name = obj.NAME_EXTERN
ptxt.From.Sym = fn.Func.lsym
ptxt.From.Sym = fn.Func.LSym
}
// initLSym defines f's obj.LSym and initializes it based on the
@ -192,36 +193,36 @@ func (pp *Progs) settext(fn *Node) {
//
// initLSym must be called exactly once per function and must be
// called for both functions with bodies and functions without bodies.
func (f *Func) initLSym(hasBody bool) {
if f.lsym != nil {
func initLSym(f *ir.Func, hasBody bool) {
if f.LSym != nil {
base.Fatalf("Func.initLSym called twice")
}
if nam := f.Nname; !nam.isBlank() {
f.lsym = nam.Sym.Linksym()
if f.Pragma&Systemstack != 0 {
f.lsym.Set(obj.AttrCFunc, true)
if nam := f.Nname; !ir.IsBlank(nam) {
f.LSym = nam.Sym.Linksym()
if f.Pragma&ir.Systemstack != 0 {
f.LSym.Set(obj.AttrCFunc, true)
}
var aliasABI obj.ABI
needABIAlias := false
defABI, hasDefABI := symabiDefs[f.lsym.Name]
defABI, hasDefABI := symabiDefs[f.LSym.Name]
if hasDefABI && defABI == obj.ABI0 {
// Symbol is defined as ABI0. Create an
// Internal -> ABI0 wrapper.
f.lsym.SetABI(obj.ABI0)
f.LSym.SetABI(obj.ABI0)
needABIAlias, aliasABI = true, obj.ABIInternal
} else {
// No ABI override. Check that the symbol is
// using the expected ABI.
want := obj.ABIInternal
if f.lsym.ABI() != want {
base.Fatalf("function symbol %s has the wrong ABI %v, expected %v", f.lsym.Name, f.lsym.ABI(), want)
if f.LSym.ABI() != want {
base.Fatalf("function symbol %s has the wrong ABI %v, expected %v", f.LSym.Name, f.LSym.ABI(), want)
}
}
isLinknameExported := nam.Sym.Linkname != "" && (hasBody || hasDefABI)
if abi, ok := symabiRefs[f.lsym.Name]; (ok && abi == obj.ABI0) || isLinknameExported {
if abi, ok := symabiRefs[f.LSym.Name]; (ok && abi == obj.ABI0) || isLinknameExported {
// Either 1) this symbol is definitely
// referenced as ABI0 from this package; or 2)
// this symbol is defined in this package but
@ -233,7 +234,7 @@ func (f *Func) initLSym(hasBody bool) {
// since other packages may "pull" symbols
// using linkname and we don't want to create
// duplicate ABI wrappers.
if f.lsym.ABI() != obj.ABI0 {
if f.LSym.ABI() != obj.ABI0 {
needABIAlias, aliasABI = true, obj.ABI0
}
}
@ -244,9 +245,9 @@ func (f *Func) initLSym(hasBody bool) {
// rather than looking them up. The uniqueness
// of f.lsym ensures uniqueness of asym.
asym := &obj.LSym{
Name: f.lsym.Name,
Name: f.LSym.Name,
Type: objabi.SABIALIAS,
R: []obj.Reloc{{Sym: f.lsym}}, // 0 size, so "informational"
R: []obj.Reloc{{Sym: f.LSym}}, // 0 size, so "informational"
}
asym.SetABI(aliasABI)
asym.Set(obj.AttrDuplicateOK, true)
@ -269,7 +270,7 @@ func (f *Func) initLSym(hasBody bool) {
if f.Needctxt() {
flag |= obj.NEEDCTXT
}
if f.Pragma&Nosplit != 0 {
if f.Pragma&ir.Nosplit != 0 {
flag |= obj.NOSPLIT
}
if f.ReflectMethod() {
@ -286,10 +287,10 @@ func (f *Func) initLSym(hasBody bool) {
}
}
base.Ctxt.InitTextSym(f.lsym, flag)
base.Ctxt.InitTextSym(f.LSym, flag)
}
func ggloblnod(nam *Node) {
func ggloblnod(nam *ir.Node) {
s := nam.Sym.Linksym()
s.Gotype = ngotype(nam).Linksym()
flags := 0

View file

@ -205,6 +205,7 @@ import (
"bufio"
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/goobj"
"cmd/internal/src"
@ -258,8 +259,8 @@ func iexport(out *bufio.Writer) {
p := iexporter{
allPkgs: map[*types.Pkg]bool{},
stringIndex: map[string]uint64{},
declIndex: map[*Node]uint64{},
inlineIndex: map[*Node]uint64{},
declIndex: map[*ir.Node]uint64{},
inlineIndex: map[*ir.Node]uint64{},
typIndex: map[*types.Type]uint64{},
}
@ -278,8 +279,8 @@ func iexport(out *bufio.Writer) {
// Loop until no more work. We use a queue because while
// writing out inline bodies, we may discover additional
// declarations that are needed.
for !p.declTodo.empty() {
p.doDecl(p.declTodo.popLeft())
for !p.declTodo.Empty() {
p.doDecl(p.declTodo.PopLeft())
}
// Append indices to data0 section.
@ -313,15 +314,15 @@ func iexport(out *bufio.Writer) {
// we're writing out the main index, which is also read by
// non-compiler tools and includes a complete package description
// (i.e., name and height).
func (w *exportWriter) writeIndex(index map[*Node]uint64, mainIndex bool) {
func (w *exportWriter) writeIndex(index map[*ir.Node]uint64, mainIndex bool) {
// Build a map from packages to objects from that package.
pkgObjs := map[*types.Pkg][]*Node{}
pkgObjs := map[*types.Pkg][]*ir.Node{}
// For the main index, make sure to include every package that
// we reference, even if we're not exporting (or reexporting)
// any symbols from it.
if mainIndex {
pkgObjs[localpkg] = nil
pkgObjs[ir.LocalPkg] = nil
for pkg := range w.p.allPkgs {
pkgObjs[pkg] = nil
}
@ -367,14 +368,14 @@ type iexporter struct {
// main index.
allPkgs map[*types.Pkg]bool
declTodo nodeQueue
declTodo ir.NodeQueue
strings intWriter
stringIndex map[string]uint64
data0 intWriter
declIndex map[*Node]uint64
inlineIndex map[*Node]uint64
declIndex map[*ir.Node]uint64
inlineIndex map[*ir.Node]uint64
typIndex map[*types.Type]uint64
}
@ -393,13 +394,13 @@ func (p *iexporter) stringOff(s string) uint64 {
}
// pushDecl adds n to the declaration work queue, if not already present.
func (p *iexporter) pushDecl(n *Node) {
if n.Sym == nil || asNode(n.Sym.Def) != n && n.Op != OTYPE {
func (p *iexporter) pushDecl(n *ir.Node) {
if n.Sym == nil || ir.AsNode(n.Sym.Def) != n && n.Op != ir.OTYPE {
base.Fatalf("weird Sym: %v, %v", n, n.Sym)
}
// Don't export predeclared declarations.
if n.Sym.Pkg == builtinpkg || n.Sym.Pkg == unsafepkg {
if n.Sym.Pkg == ir.BuiltinPkg || n.Sym.Pkg == unsafepkg {
return
}
@ -408,7 +409,7 @@ func (p *iexporter) pushDecl(n *Node) {
}
p.declIndex[n] = ^uint64(0) // mark n present in work queue
p.declTodo.pushRight(n)
p.declTodo.PushRight(n)
}
// exportWriter handles writing out individual data section chunks.
@ -422,22 +423,22 @@ type exportWriter struct {
prevColumn int64
}
func (p *iexporter) doDecl(n *Node) {
func (p *iexporter) doDecl(n *ir.Node) {
w := p.newWriter()
w.setPkg(n.Sym.Pkg, false)
switch n.Op {
case ONAME:
case ir.ONAME:
switch n.Class() {
case PEXTERN:
case ir.PEXTERN:
// Variable.
w.tag('V')
w.pos(n.Pos)
w.typ(n.Type)
w.varExt(n)
case PFUNC:
if n.IsMethod() {
case ir.PFUNC:
if ir.IsMethod(n) {
base.Fatalf("unexpected method: %v", n)
}
@ -451,14 +452,14 @@ func (p *iexporter) doDecl(n *Node) {
base.Fatalf("unexpected class: %v, %v", n, n.Class())
}
case OLITERAL:
case ir.OLITERAL:
// Constant.
n = typecheck(n, ctxExpr)
w.tag('C')
w.pos(n.Pos)
w.value(n.Type, n.Val())
case OTYPE:
case ir.OTYPE:
if IsAlias(n.Sym) {
// Alias.
w.tag('A')
@ -514,11 +515,11 @@ func (w *exportWriter) tag(tag byte) {
w.data.WriteByte(tag)
}
func (p *iexporter) doInline(f *Node) {
func (p *iexporter) doInline(f *ir.Node) {
w := p.newWriter()
w.setPkg(fnpkg(f), false)
w.stmtList(asNodes(f.Func.Inl.Body))
w.stmtList(ir.AsNodes(f.Func.Inl.Body))
p.inlineIndex[f] = w.flush()
}
@ -569,7 +570,7 @@ func (w *exportWriter) pkg(pkg *types.Pkg) {
w.string(pkg.Path)
}
func (w *exportWriter) qualifiedIdent(n *Node) {
func (w *exportWriter) qualifiedIdent(n *ir.Node) {
// Ensure any referenced declarations are written out too.
w.p.pushDecl(n)
@ -592,7 +593,7 @@ func (w *exportWriter) selector(s *types.Sym) {
} else {
pkg := w.currPkg
if types.IsExported(name) {
pkg = localpkg
pkg = ir.LocalPkg
}
if s.Pkg != pkg {
base.Fatalf("package mismatch in selector: %v in package %q, but want %q", s, s.Pkg.Path, pkg.Path)
@ -633,7 +634,7 @@ func (w *exportWriter) startType(k itag) {
func (w *exportWriter) doTyp(t *types.Type) {
if t.Sym != nil {
if t.Sym.Pkg == builtinpkg || t.Sym.Pkg == unsafepkg {
if t.Sym.Pkg == ir.BuiltinPkg || t.Sym.Pkg == unsafepkg {
base.Fatalf("builtin type missing from typIndex: %v", t)
}
@ -643,35 +644,35 @@ func (w *exportWriter) doTyp(t *types.Type) {
}
switch t.Etype {
case TPTR:
case types.TPTR:
w.startType(pointerType)
w.typ(t.Elem())
case TSLICE:
case types.TSLICE:
w.startType(sliceType)
w.typ(t.Elem())
case TARRAY:
case types.TARRAY:
w.startType(arrayType)
w.uint64(uint64(t.NumElem()))
w.typ(t.Elem())
case TCHAN:
case types.TCHAN:
w.startType(chanType)
w.uint64(uint64(t.ChanDir()))
w.typ(t.Elem())
case TMAP:
case types.TMAP:
w.startType(mapType)
w.typ(t.Key())
w.typ(t.Elem())
case TFUNC:
case types.TFUNC:
w.startType(signatureType)
w.setPkg(t.Pkg(), true)
w.signature(t)
case TSTRUCT:
case types.TSTRUCT:
w.startType(structType)
w.setPkg(t.Pkg(), true)
@ -684,7 +685,7 @@ func (w *exportWriter) doTyp(t *types.Type) {
w.string(f.Note)
}
case TINTER:
case types.TINTER:
var embeddeds, methods []*types.Field
for _, m := range t.Methods().Slice() {
if m.Sym != nil {
@ -719,7 +720,7 @@ func (w *exportWriter) setPkg(pkg *types.Pkg, write bool) {
if pkg == nil {
// TODO(mdempsky): Proactively set Pkg for types and
// remove this fallback logic.
pkg = localpkg
pkg = ir.LocalPkg
}
if write {
@ -746,7 +747,7 @@ func (w *exportWriter) paramList(fs []*types.Field) {
func (w *exportWriter) param(f *types.Field) {
w.pos(f.Pos)
w.localIdent(origSym(f.Sym), 0)
w.localIdent(ir.OrigSym(f.Sym), 0)
w.typ(f.Type)
}
@ -761,16 +762,16 @@ func constTypeOf(typ *types.Type) constant.Kind {
}
switch typ.Etype {
case TBOOL:
case types.TBOOL:
return constant.Bool
case TSTRING:
case types.TSTRING:
return constant.String
case TINT, TINT8, TINT16, TINT32, TINT64,
TUINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINTPTR:
case types.TINT, types.TINT8, types.TINT16, types.TINT32, types.TINT64,
types.TUINT, types.TUINT8, types.TUINT16, types.TUINT32, types.TUINT64, types.TUINTPTR:
return constant.Int
case TFLOAT32, TFLOAT64:
case types.TFLOAT32, types.TFLOAT64:
return constant.Float
case TCOMPLEX64, TCOMPLEX128:
case types.TCOMPLEX64, types.TCOMPLEX128:
return constant.Complex
}
@ -779,7 +780,7 @@ func constTypeOf(typ *types.Type) constant.Kind {
}
func (w *exportWriter) value(typ *types.Type, v constant.Value) {
assertRepresents(typ, v)
ir.AssertValidTypeForConst(typ, v)
w.typ(typ)
// Each type has only one admissible constant representation,
@ -808,9 +809,9 @@ func intSize(typ *types.Type) (signed bool, maxBytes uint) {
}
switch typ.Etype {
case TFLOAT32, TCOMPLEX64:
case types.TFLOAT32, types.TCOMPLEX64:
return true, 3
case TFLOAT64, TCOMPLEX128:
case types.TFLOAT64, types.TCOMPLEX128:
return true, 7
}
@ -820,7 +821,7 @@ func intSize(typ *types.Type) (signed bool, maxBytes uint) {
// The go/types API doesn't expose sizes to importers, so they
// don't know how big these types are.
switch typ.Etype {
case TINT, TUINT, TUINTPTR:
case types.TINT, types.TUINT, types.TUINTPTR:
maxBytes = 8
}
@ -954,12 +955,12 @@ func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
// Compiler-specific extensions.
func (w *exportWriter) varExt(n *Node) {
func (w *exportWriter) varExt(n *ir.Node) {
w.linkname(n.Sym)
w.symIdx(n.Sym)
}
func (w *exportWriter) funcExt(n *Node) {
func (w *exportWriter) funcExt(n *ir.Node) {
w.linkname(n.Sym)
w.symIdx(n.Sym)
@ -993,7 +994,7 @@ func (w *exportWriter) funcExt(n *Node) {
func (w *exportWriter) methExt(m *types.Field) {
w.bool(m.Nointerface())
w.funcExt(asNode(m.Nname))
w.funcExt(ir.AsNode(m.Nname))
}
func (w *exportWriter) linkname(s *types.Sym) {
@ -1029,15 +1030,15 @@ func (w *exportWriter) typeExt(t *types.Type) {
// Inline bodies.
func (w *exportWriter) stmtList(list Nodes) {
func (w *exportWriter) stmtList(list ir.Nodes) {
for _, n := range list.Slice() {
w.node(n)
}
w.op(OEND)
w.op(ir.OEND)
}
func (w *exportWriter) node(n *Node) {
if opprec[n.Op] < 0 {
func (w *exportWriter) node(n *ir.Node) {
if ir.OpPrec[n.Op] < 0 {
w.stmt(n)
} else {
w.expr(n)
@ -1046,8 +1047,8 @@ func (w *exportWriter) node(n *Node) {
// Caution: stmt will emit more than one node for statement nodes n that have a non-empty
// n.Ninit and where n cannot have a natural init section (such as in "if", "for", etc.).
func (w *exportWriter) stmt(n *Node) {
if n.Ninit.Len() > 0 && !stmtwithinit(n.Op) {
func (w *exportWriter) stmt(n *ir.Node) {
if n.Ninit.Len() > 0 && !ir.StmtWithInit(n.Op) {
// can't use stmtList here since we don't want the final OEND
for _, n := range n.Ninit.Slice() {
w.stmt(n)
@ -1055,8 +1056,8 @@ func (w *exportWriter) stmt(n *Node) {
}
switch op := n.Op; op {
case ODCL:
w.op(ODCL)
case ir.ODCL:
w.op(ir.ODCL)
w.pos(n.Left.Pos)
w.localName(n.Left)
w.typ(n.Left.Type)
@ -1064,19 +1065,19 @@ func (w *exportWriter) stmt(n *Node) {
// case ODCLFIELD:
// unimplemented - handled by default case
case OAS:
case ir.OAS:
// Don't export "v = <N>" initializing statements, hope they're always
// preceded by the DCL which will be re-parsed and typecheck to reproduce
// the "v = <N>" again.
if n.Right != nil {
w.op(OAS)
w.op(ir.OAS)
w.pos(n.Pos)
w.expr(n.Left)
w.expr(n.Right)
}
case OASOP:
w.op(OASOP)
case ir.OASOP:
w.op(ir.OASOP)
w.pos(n.Pos)
w.op(n.SubOp())
w.expr(n.Left)
@ -1084,54 +1085,54 @@ func (w *exportWriter) stmt(n *Node) {
w.expr(n.Right)
}
case OAS2:
w.op(OAS2)
case ir.OAS2:
w.op(ir.OAS2)
w.pos(n.Pos)
w.exprList(n.List)
w.exprList(n.Rlist)
case OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
w.op(OAS2)
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
w.op(ir.OAS2)
w.pos(n.Pos)
w.exprList(n.List)
w.exprList(asNodes([]*Node{n.Right}))
w.exprList(ir.AsNodes([]*ir.Node{n.Right}))
case ORETURN:
w.op(ORETURN)
case ir.ORETURN:
w.op(ir.ORETURN)
w.pos(n.Pos)
w.exprList(n.List)
// case ORETJMP:
// unreachable - generated by compiler for trampolin routines
case OGO, ODEFER:
case ir.OGO, ir.ODEFER:
w.op(op)
w.pos(n.Pos)
w.expr(n.Left)
case OIF:
w.op(OIF)
case ir.OIF:
w.op(ir.OIF)
w.pos(n.Pos)
w.stmtList(n.Ninit)
w.expr(n.Left)
w.stmtList(n.Nbody)
w.stmtList(n.Rlist)
case OFOR:
w.op(OFOR)
case ir.OFOR:
w.op(ir.OFOR)
w.pos(n.Pos)
w.stmtList(n.Ninit)
w.exprsOrNil(n.Left, n.Right)
w.stmtList(n.Nbody)
case ORANGE:
w.op(ORANGE)
case ir.ORANGE:
w.op(ir.ORANGE)
w.pos(n.Pos)
w.stmtList(n.List)
w.expr(n.Right)
w.stmtList(n.Nbody)
case OSELECT, OSWITCH:
case ir.OSELECT, ir.OSWITCH:
w.op(op)
w.pos(n.Pos)
w.stmtList(n.Ninit)
@ -1141,19 +1142,19 @@ func (w *exportWriter) stmt(n *Node) {
// case OCASE:
// handled by caseList
case OFALL:
w.op(OFALL)
case ir.OFALL:
w.op(ir.OFALL)
w.pos(n.Pos)
case OBREAK, OCONTINUE:
case ir.OBREAK, ir.OCONTINUE:
w.op(op)
w.pos(n.Pos)
w.exprsOrNil(n.Left, nil)
case OEMPTY:
case ir.OEMPTY:
// nothing to emit
case OGOTO, OLABEL:
case ir.OGOTO, ir.OLABEL:
w.op(op)
w.pos(n.Pos)
w.string(n.Sym.Name)
@ -1163,13 +1164,13 @@ func (w *exportWriter) stmt(n *Node) {
}
}
func (w *exportWriter) caseList(sw *Node) {
namedTypeSwitch := sw.Op == OSWITCH && sw.Left != nil && sw.Left.Op == OTYPESW && sw.Left.Left != nil
func (w *exportWriter) caseList(sw *ir.Node) {
namedTypeSwitch := sw.Op == ir.OSWITCH && sw.Left != nil && sw.Left.Op == ir.OTYPESW && sw.Left.Left != nil
cases := sw.List.Slice()
w.uint64(uint64(len(cases)))
for _, cas := range cases {
if cas.Op != OCASE {
if cas.Op != ir.OCASE {
base.Fatalf("expected OCASE, got %v", cas)
}
w.pos(cas.Pos)
@ -1181,14 +1182,14 @@ func (w *exportWriter) caseList(sw *Node) {
}
}
func (w *exportWriter) exprList(list Nodes) {
func (w *exportWriter) exprList(list ir.Nodes) {
for _, n := range list.Slice() {
w.expr(n)
}
w.op(OEND)
w.op(ir.OEND)
}
func (w *exportWriter) expr(n *Node) {
func (w *exportWriter) expr(n *ir.Node) {
// from nodefmt (fmt.go)
//
// nodefmt reverts nodes back to their original - we don't need to do
@ -1199,14 +1200,14 @@ func (w *exportWriter) expr(n *Node) {
// }
// from exprfmt (fmt.go)
for n.Op == OPAREN || n.Implicit() && (n.Op == ODEREF || n.Op == OADDR || n.Op == ODOT || n.Op == ODOTPTR) {
for n.Op == ir.OPAREN || n.Implicit() && (n.Op == ir.ODEREF || n.Op == ir.OADDR || n.Op == ir.ODOT || n.Op == ir.ODOTPTR) {
n = n.Left
}
switch op := n.Op; op {
// expressions
// (somewhat closely following the structure of exprfmt in fmt.go)
case ONIL:
case ir.ONIL:
if !n.Type.HasNil() {
base.Fatalf("unexpected type for nil: %v", n.Type)
}
@ -1214,49 +1215,49 @@ func (w *exportWriter) expr(n *Node) {
w.expr(n.Orig)
break
}
w.op(OLITERAL)
w.op(ir.OLITERAL)
w.pos(n.Pos)
w.typ(n.Type)
case OLITERAL:
w.op(OLITERAL)
case ir.OLITERAL:
w.op(ir.OLITERAL)
w.pos(n.Pos)
w.value(n.Type, n.Val())
case OMETHEXPR:
case ir.OMETHEXPR:
// Special case: explicit name of func (*T) method(...) is turned into pkg.(*T).method,
// but for export, this should be rendered as (*pkg.T).meth.
// These nodes have the special property that they are names with a left OTYPE and a right ONAME.
w.op(OXDOT)
w.op(ir.OXDOT)
w.pos(n.Pos)
w.expr(n.Left) // n.Left.Op == OTYPE
w.selector(n.Right.Sym)
case ONAME:
case ir.ONAME:
// Package scope name.
if (n.Class() == PEXTERN || n.Class() == PFUNC) && !n.isBlank() {
w.op(ONONAME)
if (n.Class() == ir.PEXTERN || n.Class() == ir.PFUNC) && !ir.IsBlank(n) {
w.op(ir.ONONAME)
w.qualifiedIdent(n)
break
}
// Function scope name.
w.op(ONAME)
w.op(ir.ONAME)
w.localName(n)
// case OPACK, ONONAME:
// should have been resolved by typechecking - handled by default case
case OTYPE:
w.op(OTYPE)
case ir.OTYPE:
w.op(ir.OTYPE)
w.typ(n.Type)
case OTYPESW:
w.op(OTYPESW)
case ir.OTYPESW:
w.op(ir.OTYPESW)
w.pos(n.Pos)
var s *types.Sym
if n.Left != nil {
if n.Left.Op != ONONAME {
if n.Left.Op != ir.ONONAME {
base.Fatalf("expected ONONAME, got %v", n.Left)
}
s = n.Left.Sym
@ -1273,149 +1274,149 @@ func (w *exportWriter) expr(n *Node) {
// case OCOMPLIT:
// should have been resolved by typechecking - handled by default case
case OPTRLIT:
w.op(OADDR)
case ir.OPTRLIT:
w.op(ir.OADDR)
w.pos(n.Pos)
w.expr(n.Left)
case OSTRUCTLIT:
w.op(OSTRUCTLIT)
case ir.OSTRUCTLIT:
w.op(ir.OSTRUCTLIT)
w.pos(n.Pos)
w.typ(n.Type)
w.elemList(n.List) // special handling of field names
case OARRAYLIT, OSLICELIT, OMAPLIT:
w.op(OCOMPLIT)
case ir.OARRAYLIT, ir.OSLICELIT, ir.OMAPLIT:
w.op(ir.OCOMPLIT)
w.pos(n.Pos)
w.typ(n.Type)
w.exprList(n.List)
case OKEY:
w.op(OKEY)
case ir.OKEY:
w.op(ir.OKEY)
w.pos(n.Pos)
w.exprsOrNil(n.Left, n.Right)
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
case OCALLPART:
case ir.OCALLPART:
// An OCALLPART is an OXDOT before type checking.
w.op(OXDOT)
w.op(ir.OXDOT)
w.pos(n.Pos)
w.expr(n.Left)
// Right node should be ONAME
w.selector(n.Right.Sym)
case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
w.op(OXDOT)
case ir.OXDOT, ir.ODOT, ir.ODOTPTR, ir.ODOTINTER, ir.ODOTMETH:
w.op(ir.OXDOT)
w.pos(n.Pos)
w.expr(n.Left)
w.selector(n.Sym)
case ODOTTYPE, ODOTTYPE2:
w.op(ODOTTYPE)
case ir.ODOTTYPE, ir.ODOTTYPE2:
w.op(ir.ODOTTYPE)
w.pos(n.Pos)
w.expr(n.Left)
w.typ(n.Type)
case OINDEX, OINDEXMAP:
w.op(OINDEX)
case ir.OINDEX, ir.OINDEXMAP:
w.op(ir.OINDEX)
w.pos(n.Pos)
w.expr(n.Left)
w.expr(n.Right)
case OSLICE, OSLICESTR, OSLICEARR:
w.op(OSLICE)
case ir.OSLICE, ir.OSLICESTR, ir.OSLICEARR:
w.op(ir.OSLICE)
w.pos(n.Pos)
w.expr(n.Left)
low, high, _ := n.SliceBounds()
w.exprsOrNil(low, high)
case OSLICE3, OSLICE3ARR:
w.op(OSLICE3)
case ir.OSLICE3, ir.OSLICE3ARR:
w.op(ir.OSLICE3)
w.pos(n.Pos)
w.expr(n.Left)
low, high, max := n.SliceBounds()
w.exprsOrNil(low, high)
w.expr(max)
case OCOPY, OCOMPLEX:
case ir.OCOPY, ir.OCOMPLEX:
// treated like other builtin calls (see e.g., OREAL)
w.op(op)
w.pos(n.Pos)
w.expr(n.Left)
w.expr(n.Right)
w.op(OEND)
w.op(ir.OEND)
case OCONV, OCONVIFACE, OCONVNOP, OBYTES2STR, ORUNES2STR, OSTR2BYTES, OSTR2RUNES, ORUNESTR:
w.op(OCONV)
case ir.OCONV, ir.OCONVIFACE, ir.OCONVNOP, ir.OBYTES2STR, ir.ORUNES2STR, ir.OSTR2BYTES, ir.OSTR2RUNES, ir.ORUNESTR:
w.op(ir.OCONV)
w.pos(n.Pos)
w.expr(n.Left)
w.typ(n.Type)
case OREAL, OIMAG, OAPPEND, OCAP, OCLOSE, ODELETE, OLEN, OMAKE, ONEW, OPANIC, ORECOVER, OPRINT, OPRINTN:
case ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
w.op(op)
w.pos(n.Pos)
if n.Left != nil {
w.expr(n.Left)
w.op(OEND)
w.op(ir.OEND)
} else {
w.exprList(n.List) // emits terminating OEND
}
// only append() calls may contain '...' arguments
if op == OAPPEND {
if op == ir.OAPPEND {
w.bool(n.IsDDD())
} else if n.IsDDD() {
base.Fatalf("exporter: unexpected '...' with %v call", op)
}
case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER, OGETG:
w.op(OCALL)
case ir.OCALL, ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER, ir.OGETG:
w.op(ir.OCALL)
w.pos(n.Pos)
w.stmtList(n.Ninit)
w.expr(n.Left)
w.exprList(n.List)
w.bool(n.IsDDD())
case OMAKEMAP, OMAKECHAN, OMAKESLICE:
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
w.op(op) // must keep separate from OMAKE for importer
w.pos(n.Pos)
w.typ(n.Type)
switch {
default:
// empty list
w.op(OEND)
w.op(ir.OEND)
case n.List.Len() != 0: // pre-typecheck
w.exprList(n.List) // emits terminating OEND
case n.Right != nil:
w.expr(n.Left)
w.expr(n.Right)
w.op(OEND)
case n.Left != nil && (n.Op == OMAKESLICE || !n.Left.Type.IsUntyped()):
w.op(ir.OEND)
case n.Left != nil && (n.Op == ir.OMAKESLICE || !n.Left.Type.IsUntyped()):
w.expr(n.Left)
w.op(OEND)
w.op(ir.OEND)
}
// unary expressions
case OPLUS, ONEG, OADDR, OBITNOT, ODEREF, ONOT, ORECV:
case ir.OPLUS, ir.ONEG, ir.OADDR, ir.OBITNOT, ir.ODEREF, ir.ONOT, ir.ORECV:
w.op(op)
w.pos(n.Pos)
w.expr(n.Left)
// binary expressions
case OADD, OAND, OANDAND, OANDNOT, ODIV, OEQ, OGE, OGT, OLE, OLT,
OLSH, OMOD, OMUL, ONE, OOR, OOROR, ORSH, OSEND, OSUB, OXOR:
case ir.OADD, ir.OAND, ir.OANDAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.OOROR, ir.ORSH, ir.OSEND, ir.OSUB, ir.OXOR:
w.op(op)
w.pos(n.Pos)
w.expr(n.Left)
w.expr(n.Right)
case OADDSTR:
w.op(OADDSTR)
case ir.OADDSTR:
w.op(ir.OADDSTR)
w.pos(n.Pos)
w.exprList(n.List)
case ODCLCONST:
case ir.ODCLCONST:
// if exporting, DCLCONST should just be removed as its usage
// has already been replaced with literals
@ -1425,11 +1426,11 @@ func (w *exportWriter) expr(n *Node) {
}
}
func (w *exportWriter) op(op Op) {
func (w *exportWriter) op(op ir.Op) {
w.uint64(uint64(op))
}
func (w *exportWriter) exprsOrNil(a, b *Node) {
func (w *exportWriter) exprsOrNil(a, b *ir.Node) {
ab := 0
if a != nil {
ab |= 1
@ -1446,7 +1447,7 @@ func (w *exportWriter) exprsOrNil(a, b *Node) {
}
}
func (w *exportWriter) elemList(list Nodes) {
func (w *exportWriter) elemList(list ir.Nodes) {
w.uint64(uint64(list.Len()))
for _, n := range list.Slice() {
w.selector(n.Sym)
@ -1454,7 +1455,7 @@ func (w *exportWriter) elemList(list Nodes) {
}
}
func (w *exportWriter) localName(n *Node) {
func (w *exportWriter) localName(n *ir.Node) {
// Escape analysis happens after inline bodies are saved, but
// we're using the same ONAME nodes, so we might still see
// PAUTOHEAP here.
@ -1463,7 +1464,7 @@ func (w *exportWriter) localName(n *Node) {
// PPARAM/PPARAMOUT, because we only want to include vargen in
// non-param names.
var v int32
if n.Class() == PAUTO || (n.Class() == PAUTOHEAP && n.Name.Param.Stackcopy == nil) {
if n.Class() == ir.PAUTO || (n.Class() == ir.PAUTOHEAP && n.Name.Param.Stackcopy == nil) {
v = n.Name.Vargen
}

View file

@ -9,6 +9,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/goobj"
@ -40,8 +41,8 @@ var (
inlineImporter = map[*types.Sym]iimporterAndOffset{}
)
func expandDecl(n *Node) {
if n.Op != ONONAME {
func expandDecl(n *ir.Node) {
if n.Op != ir.ONONAME {
return
}
@ -54,7 +55,7 @@ func expandDecl(n *Node) {
r.doDecl(n)
}
func expandInline(fn *Node) {
func expandInline(fn *ir.Node) {
if fn.Func.Inl.Body != nil {
return
}
@ -67,7 +68,7 @@ func expandInline(fn *Node) {
r.doInline(fn)
}
func importReaderFor(n *Node, importers map[*types.Sym]iimporterAndOffset) *importReader {
func importReaderFor(n *ir.Node, importers map[*types.Sym]iimporterAndOffset) *importReader {
x, ok := importers[n.Sym]
if !ok {
return nil
@ -147,10 +148,10 @@ func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType)
if pkg.Name == "" {
pkg.Name = pkgName
pkg.Height = pkgHeight
numImport[pkgName]++
ir.NumImport[pkgName]++
// TODO(mdempsky): This belongs somewhere else.
pkg.Lookup("_").Def = asTypesNode(nblank)
pkg.Lookup("_").Def = ir.AsTypesNode(ir.BlankNode)
} else {
if pkg.Name != pkgName {
base.Fatalf("conflicting package names %v and %v for path %q", pkg.Name, pkgName, pkg.Path)
@ -172,9 +173,9 @@ func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType)
// Create stub declaration. If used, this will
// be overwritten by expandDecl.
if s.Def != nil {
base.Fatalf("unexpected definition for %v: %v", s, asNode(s.Def))
base.Fatalf("unexpected definition for %v: %v", s, ir.AsNode(s.Def))
}
s.Def = asTypesNode(npos(src.NoXPos, dclname(s)))
s.Def = ir.AsTypesNode(npos(src.NoXPos, dclname(s)))
}
}
@ -280,8 +281,8 @@ func (r *importReader) setPkg() {
r.currPkg = r.pkg()
}
func (r *importReader) doDecl(n *Node) {
if n.Op != ONONAME {
func (r *importReader) doDecl(n *ir.Node) {
if n.Op != ir.ONONAME {
base.Fatalf("doDecl: unexpected Op for %v: %v", n.Sym, n.Op)
}
@ -330,13 +331,13 @@ func (r *importReader) doDecl(n *Node) {
recv := r.param()
mtyp := r.signature(recv)
m := newfuncnamel(mpos, methodSym(recv.Type, msym), new(Func))
m := newfuncnamel(mpos, methodSym(recv.Type, msym), new(ir.Func))
m.Type = mtyp
m.SetClass(PFUNC)
m.SetClass(ir.PFUNC)
// methodSym already marked m.Sym as a function.
f := types.NewField(mpos, msym, mtyp)
f.Nname = asTypesNode(m)
f.Nname = ir.AsTypesNode(m)
ms[i] = f
}
t.Methods().Set(ms)
@ -434,7 +435,7 @@ func (r *importReader) ident() *types.Sym {
}
pkg := r.currPkg
if types.IsExported(name) {
pkg = localpkg
pkg = ir.LocalPkg
}
return pkg.Lookup(name)
}
@ -498,11 +499,11 @@ func (r *importReader) typ1() *types.Type {
// support inlining functions with local defined
// types. Therefore, this must be a package-scope
// type.
n := asNode(r.qualifiedIdent().PkgDef())
if n.Op == ONONAME {
n := ir.AsNode(r.qualifiedIdent().PkgDef())
if n.Op == ir.ONONAME {
expandDecl(n)
}
if n.Op != OTYPE {
if n.Op != ir.OTYPE {
base.Fatalf("expected OTYPE, got %v: %v, %v", n.Op, n.Sym, n)
}
return n.Type
@ -542,7 +543,7 @@ func (r *importReader) typ1() *types.Type {
fs[i] = f
}
t := types.New(TSTRUCT)
t := types.New(types.TSTRUCT)
t.SetPkg(r.currPkg)
t.SetFields(fs)
return t
@ -567,7 +568,7 @@ func (r *importReader) typ1() *types.Type {
methods[i] = types.NewField(pos, sym, typ)
}
t := types.New(TINTER)
t := types.New(types.TINTER)
t.SetPkg(r.currPkg)
t.SetInterface(append(embeddeds, methods...))
@ -634,12 +635,12 @@ func (r *importReader) byte() byte {
// Compiler-specific extensions.
func (r *importReader) varExt(n *Node) {
func (r *importReader) varExt(n *ir.Node) {
r.linkname(n.Sym)
r.symIdx(n.Sym)
}
func (r *importReader) funcExt(n *Node) {
func (r *importReader) funcExt(n *ir.Node) {
r.linkname(n.Sym)
r.symIdx(n.Sym)
@ -652,7 +653,7 @@ func (r *importReader) funcExt(n *Node) {
// Inline body.
if u := r.uint64(); u > 0 {
n.Func.Inl = &Inline{
n.Func.Inl = &ir.Inline{
Cost: int32(u - 1),
}
n.Func.Endlineno = r.pos()
@ -663,7 +664,7 @@ func (r *importReader) methExt(m *types.Field) {
if r.bool() {
m.SetNointerface(true)
}
r.funcExt(asNode(m.Nname))
r.funcExt(ir.AsNode(m.Nname))
}
func (r *importReader) linkname(s *types.Sym) {
@ -694,7 +695,7 @@ func (r *importReader) typeExt(t *types.Type) {
// so we can use index to reference the symbol.
var typeSymIdx = make(map[*types.Type][2]int64)
func (r *importReader) doInline(n *Node) {
func (r *importReader) doInline(n *ir.Node) {
if len(n.Func.Inl.Body) != 0 {
base.Fatalf("%v already has inline body", n)
}
@ -709,7 +710,7 @@ func (r *importReader) doInline(n *Node) {
// (not doing so can cause significant performance
// degradation due to unnecessary calls to empty
// functions).
body = []*Node{}
body = []*ir.Node{}
}
n.Func.Inl.Body = body
@ -717,9 +718,9 @@ func (r *importReader) doInline(n *Node) {
if base.Flag.E > 0 && base.Flag.LowerM > 2 {
if base.Flag.LowerM > 3 {
fmt.Printf("inl body for %v %#v: %+v\n", n, n.Type, asNodes(n.Func.Inl.Body))
fmt.Printf("inl body for %v %#v: %+v\n", n, n.Type, ir.AsNodes(n.Func.Inl.Body))
} else {
fmt.Printf("inl body for %v %#v: %v\n", n, n.Type, asNodes(n.Func.Inl.Body))
fmt.Printf("inl body for %v %#v: %v\n", n, n.Type, ir.AsNodes(n.Func.Inl.Body))
}
}
}
@ -739,15 +740,15 @@ func (r *importReader) doInline(n *Node) {
// unrefined nodes (since this is what the importer uses). The respective case
// entries are unreachable in the importer.
func (r *importReader) stmtList() []*Node {
var list []*Node
func (r *importReader) stmtList() []*ir.Node {
var list []*ir.Node
for {
n := r.node()
if n == nil {
break
}
// OBLOCK nodes may be created when importing ODCL nodes - unpack them
if n.Op == OBLOCK {
if n.Op == ir.OBLOCK {
list = append(list, n.List.Slice()...)
} else {
list = append(list, n)
@ -757,18 +758,18 @@ func (r *importReader) stmtList() []*Node {
return list
}
func (r *importReader) caseList(sw *Node) []*Node {
namedTypeSwitch := sw.Op == OSWITCH && sw.Left != nil && sw.Left.Op == OTYPESW && sw.Left.Left != nil
func (r *importReader) caseList(sw *ir.Node) []*ir.Node {
namedTypeSwitch := sw.Op == ir.OSWITCH && sw.Left != nil && sw.Left.Op == ir.OTYPESW && sw.Left.Left != nil
cases := make([]*Node, r.uint64())
cases := make([]*ir.Node, r.uint64())
for i := range cases {
cas := nodl(r.pos(), OCASE, nil, nil)
cas := ir.NodAt(r.pos(), ir.OCASE, nil, nil)
cas.List.Set(r.stmtList())
if namedTypeSwitch {
// Note: per-case variables will have distinct, dotted
// names after import. That's okay: swt.go only needs
// Sym for diagnostics anyway.
caseVar := newnamel(cas.Pos, r.ident())
caseVar := ir.NewNameAt(cas.Pos, r.ident())
declare(caseVar, dclcontext)
cas.Rlist.Set1(caseVar)
caseVar.Name.Defn = sw.Left
@ -779,8 +780,8 @@ func (r *importReader) caseList(sw *Node) []*Node {
return cases
}
func (r *importReader) exprList() []*Node {
var list []*Node
func (r *importReader) exprList() []*ir.Node {
var list []*ir.Node
for {
n := r.expr()
if n == nil {
@ -791,16 +792,16 @@ func (r *importReader) exprList() []*Node {
return list
}
func (r *importReader) expr() *Node {
func (r *importReader) expr() *ir.Node {
n := r.node()
if n != nil && n.Op == OBLOCK {
if n != nil && n.Op == ir.OBLOCK {
base.Fatalf("unexpected block node: %v", n)
}
return n
}
// TODO(gri) split into expr and stmt
func (r *importReader) node() *Node {
func (r *importReader) node() *ir.Node {
switch op := r.op(); op {
// expressions
// case OPAREN:
@ -809,34 +810,34 @@ func (r *importReader) node() *Node {
// case ONIL:
// unreachable - mapped to OLITERAL
case OLITERAL:
case ir.OLITERAL:
pos := r.pos()
typ := r.typ()
var n *Node
var n *ir.Node
if typ.HasNil() {
n = nodnil()
} else {
n = nodlit(r.value(typ))
n = ir.NewLiteral(r.value(typ))
}
n = npos(pos, n)
n.Type = typ
return n
case ONONAME:
case ir.ONONAME:
return mkname(r.qualifiedIdent())
case ONAME:
case ir.ONAME:
return mkname(r.ident())
// case OPACK, ONONAME:
// unreachable - should have been resolved by typechecking
case OTYPE:
case ir.OTYPE:
return typenod(r.typ())
case OTYPESW:
n := nodl(r.pos(), OTYPESW, nil, nil)
case ir.OTYPESW:
n := ir.NodAt(r.pos(), ir.OTYPESW, nil, nil)
if s := r.ident(); s != nil {
n.Left = npos(n.Pos, newnoname(s))
}
@ -853,11 +854,11 @@ func (r *importReader) node() *Node {
// case OPTRLIT:
// unreachable - mapped to case OADDR below by exporter
case OSTRUCTLIT:
case ir.OSTRUCTLIT:
// TODO(mdempsky): Export position information for OSTRUCTKEY nodes.
savedlineno := base.Pos
base.Pos = r.pos()
n := nodl(base.Pos, OCOMPLIT, nil, typenod(r.typ()))
n := ir.NodAt(base.Pos, ir.OCOMPLIT, nil, typenod(r.typ()))
n.List.Set(r.elemList()) // special handling of field names
base.Pos = savedlineno
return n
@ -865,15 +866,15 @@ func (r *importReader) node() *Node {
// case OARRAYLIT, OSLICELIT, OMAPLIT:
// unreachable - mapped to case OCOMPLIT below by exporter
case OCOMPLIT:
n := nodl(r.pos(), OCOMPLIT, nil, typenod(r.typ()))
case ir.OCOMPLIT:
n := ir.NodAt(r.pos(), ir.OCOMPLIT, nil, typenod(r.typ()))
n.List.Set(r.exprList())
return n
case OKEY:
case ir.OKEY:
pos := r.pos()
left, right := r.exprsOrNil()
return nodl(pos, OKEY, left, right)
return ir.NodAt(pos, ir.OKEY, left, right)
// case OSTRUCTKEY:
// unreachable - handled in case OSTRUCTLIT by elemList
@ -884,28 +885,28 @@ func (r *importReader) node() *Node {
// case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
// unreachable - mapped to case OXDOT below by exporter
case OXDOT:
case ir.OXDOT:
// see parser.new_dotname
return npos(r.pos(), nodSym(OXDOT, r.expr(), r.ident()))
return npos(r.pos(), nodSym(ir.OXDOT, r.expr(), r.ident()))
// case ODOTTYPE, ODOTTYPE2:
// unreachable - mapped to case ODOTTYPE below by exporter
case ODOTTYPE:
n := nodl(r.pos(), ODOTTYPE, r.expr(), nil)
case ir.ODOTTYPE:
n := ir.NodAt(r.pos(), ir.ODOTTYPE, r.expr(), nil)
n.Type = r.typ()
return n
// case OINDEX, OINDEXMAP, OSLICE, OSLICESTR, OSLICEARR, OSLICE3, OSLICE3ARR:
// unreachable - mapped to cases below by exporter
case OINDEX:
return nodl(r.pos(), op, r.expr(), r.expr())
case ir.OINDEX:
return ir.NodAt(r.pos(), op, r.expr(), r.expr())
case OSLICE, OSLICE3:
n := nodl(r.pos(), op, r.expr(), nil)
case ir.OSLICE, ir.OSLICE3:
n := ir.NodAt(r.pos(), op, r.expr(), nil)
low, high := r.exprsOrNil()
var max *Node
var max *ir.Node
if n.Op.IsSlice3() {
max = r.expr()
}
@ -915,15 +916,15 @@ func (r *importReader) node() *Node {
// case OCONV, OCONVIFACE, OCONVNOP, OBYTES2STR, ORUNES2STR, OSTR2BYTES, OSTR2RUNES, ORUNESTR:
// unreachable - mapped to OCONV case below by exporter
case OCONV:
n := nodl(r.pos(), OCONV, r.expr(), nil)
case ir.OCONV:
n := ir.NodAt(r.pos(), ir.OCONV, r.expr(), nil)
n.Type = r.typ()
return n
case OCOPY, OCOMPLEX, OREAL, OIMAG, OAPPEND, OCAP, OCLOSE, ODELETE, OLEN, OMAKE, ONEW, OPANIC, ORECOVER, OPRINT, OPRINTN:
case ir.OCOPY, ir.OCOMPLEX, ir.OREAL, ir.OIMAG, ir.OAPPEND, ir.OCAP, ir.OCLOSE, ir.ODELETE, ir.OLEN, ir.OMAKE, ir.ONEW, ir.OPANIC, ir.ORECOVER, ir.OPRINT, ir.OPRINTN:
n := npos(r.pos(), builtinCall(op))
n.List.Set(r.exprList())
if op == OAPPEND {
if op == ir.OAPPEND {
n.SetIsDDD(r.bool())
}
return n
@ -931,45 +932,45 @@ func (r *importReader) node() *Node {
// case OCALL, OCALLFUNC, OCALLMETH, OCALLINTER, OGETG:
// unreachable - mapped to OCALL case below by exporter
case OCALL:
n := nodl(r.pos(), OCALL, nil, nil)
case ir.OCALL:
n := ir.NodAt(r.pos(), ir.OCALL, nil, nil)
n.Ninit.Set(r.stmtList())
n.Left = r.expr()
n.List.Set(r.exprList())
n.SetIsDDD(r.bool())
return n
case OMAKEMAP, OMAKECHAN, OMAKESLICE:
n := npos(r.pos(), builtinCall(OMAKE))
case ir.OMAKEMAP, ir.OMAKECHAN, ir.OMAKESLICE:
n := npos(r.pos(), builtinCall(ir.OMAKE))
n.List.Append(typenod(r.typ()))
n.List.Append(r.exprList()...)
return n
// unary expressions
case OPLUS, ONEG, OADDR, OBITNOT, ODEREF, ONOT, ORECV:
return nodl(r.pos(), op, r.expr(), nil)
case ir.OPLUS, ir.ONEG, ir.OADDR, ir.OBITNOT, ir.ODEREF, ir.ONOT, ir.ORECV:
return ir.NodAt(r.pos(), op, r.expr(), nil)
// binary expressions
case OADD, OAND, OANDAND, OANDNOT, ODIV, OEQ, OGE, OGT, OLE, OLT,
OLSH, OMOD, OMUL, ONE, OOR, OOROR, ORSH, OSEND, OSUB, OXOR:
return nodl(r.pos(), op, r.expr(), r.expr())
case ir.OADD, ir.OAND, ir.OANDAND, ir.OANDNOT, ir.ODIV, ir.OEQ, ir.OGE, ir.OGT, ir.OLE, ir.OLT,
ir.OLSH, ir.OMOD, ir.OMUL, ir.ONE, ir.OOR, ir.OOROR, ir.ORSH, ir.OSEND, ir.OSUB, ir.OXOR:
return ir.NodAt(r.pos(), op, r.expr(), r.expr())
case OADDSTR:
case ir.OADDSTR:
pos := r.pos()
list := r.exprList()
x := npos(pos, list[0])
for _, y := range list[1:] {
x = nodl(pos, OADD, x, y)
x = ir.NodAt(pos, ir.OADD, x, y)
}
return x
// --------------------------------------------------------------------
// statements
case ODCL:
case ir.ODCL:
pos := r.pos()
lhs := npos(pos, dclname(r.ident()))
typ := typenod(r.typ())
return npos(pos, liststmt(variter([]*Node{lhs}, typ, nil))) // TODO(gri) avoid list creation
return npos(pos, liststmt(variter([]*ir.Node{lhs}, typ, nil))) // TODO(gri) avoid list creation
// case ODCLFIELD:
// unimplemented
@ -977,11 +978,11 @@ func (r *importReader) node() *Node {
// case OAS, OASWB:
// unreachable - mapped to OAS case below by exporter
case OAS:
return nodl(r.pos(), OAS, r.expr(), r.expr())
case ir.OAS:
return ir.NodAt(r.pos(), ir.OAS, r.expr(), r.expr())
case OASOP:
n := nodl(r.pos(), OASOP, nil, nil)
case ir.OASOP:
n := ir.NodAt(r.pos(), ir.OASOP, nil, nil)
n.SetSubOp(r.op())
n.Left = r.expr()
if !r.bool() {
@ -995,33 +996,33 @@ func (r *importReader) node() *Node {
// case OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
// unreachable - mapped to OAS2 case below by exporter
case OAS2:
n := nodl(r.pos(), OAS2, nil, nil)
case ir.OAS2:
n := ir.NodAt(r.pos(), ir.OAS2, nil, nil)
n.List.Set(r.exprList())
n.Rlist.Set(r.exprList())
return n
case ORETURN:
n := nodl(r.pos(), ORETURN, nil, nil)
case ir.ORETURN:
n := ir.NodAt(r.pos(), ir.ORETURN, nil, nil)
n.List.Set(r.exprList())
return n
// case ORETJMP:
// unreachable - generated by compiler for trampolin routines (not exported)
case OGO, ODEFER:
return nodl(r.pos(), op, r.expr(), nil)
case ir.OGO, ir.ODEFER:
return ir.NodAt(r.pos(), op, r.expr(), nil)
case OIF:
n := nodl(r.pos(), OIF, nil, nil)
case ir.OIF:
n := ir.NodAt(r.pos(), ir.OIF, nil, nil)
n.Ninit.Set(r.stmtList())
n.Left = r.expr()
n.Nbody.Set(r.stmtList())
n.Rlist.Set(r.stmtList())
return n
case OFOR:
n := nodl(r.pos(), OFOR, nil, nil)
case ir.OFOR:
n := ir.NodAt(r.pos(), ir.OFOR, nil, nil)
n.Ninit.Set(r.stmtList())
left, right := r.exprsOrNil()
n.Left = left
@ -1029,15 +1030,15 @@ func (r *importReader) node() *Node {
n.Nbody.Set(r.stmtList())
return n
case ORANGE:
n := nodl(r.pos(), ORANGE, nil, nil)
case ir.ORANGE:
n := ir.NodAt(r.pos(), ir.ORANGE, nil, nil)
n.List.Set(r.stmtList())
n.Right = r.expr()
n.Nbody.Set(r.stmtList())
return n
case OSELECT, OSWITCH:
n := nodl(r.pos(), op, nil, nil)
case ir.OSELECT, ir.OSWITCH:
n := ir.NodAt(r.pos(), op, nil, nil)
n.Ninit.Set(r.stmtList())
left, _ := r.exprsOrNil()
n.Left = left
@ -1047,27 +1048,27 @@ func (r *importReader) node() *Node {
// case OCASE:
// handled by caseList
case OFALL:
n := nodl(r.pos(), OFALL, nil, nil)
case ir.OFALL:
n := ir.NodAt(r.pos(), ir.OFALL, nil, nil)
return n
case OBREAK, OCONTINUE:
case ir.OBREAK, ir.OCONTINUE:
pos := r.pos()
left, _ := r.exprsOrNil()
if left != nil {
left = newname(left.Sym)
left = NewName(left.Sym)
}
return nodl(pos, op, left, nil)
return ir.NodAt(pos, op, left, nil)
// case OEMPTY:
// unreachable - not emitted by exporter
case OGOTO, OLABEL:
n := nodl(r.pos(), op, nil, nil)
case ir.OGOTO, ir.OLABEL:
n := ir.NodAt(r.pos(), op, nil, nil)
n.Sym = lookup(r.string())
return n
case OEND:
case ir.OEND:
return nil
default:
@ -1077,21 +1078,21 @@ func (r *importReader) node() *Node {
}
}
func (r *importReader) op() Op {
return Op(r.uint64())
func (r *importReader) op() ir.Op {
return ir.Op(r.uint64())
}
func (r *importReader) elemList() []*Node {
func (r *importReader) elemList() []*ir.Node {
c := r.uint64()
list := make([]*Node, c)
list := make([]*ir.Node, c)
for i := range list {
s := r.ident()
list[i] = nodSym(OSTRUCTKEY, r.expr(), s)
list[i] = nodSym(ir.OSTRUCTKEY, r.expr(), s)
}
return list
}
func (r *importReader) exprsOrNil() (a, b *Node) {
func (r *importReader) exprsOrNil() (a, b *ir.Node) {
ab := r.uint64()
if ab&1 != 0 {
a = r.expr()

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/obj"
)
@ -18,7 +19,7 @@ var renameinitgen int
// Function collecting autotmps generated during typechecking,
// to be included in the package-level init function.
var initTodo = nod(ODCLFUNC, nil, nil)
var initTodo = ir.Nod(ir.ODCLFUNC, nil, nil)
func renameinit() *types.Sym {
s := lookupN("init.", renameinitgen)
@ -32,7 +33,7 @@ func renameinit() *types.Sym {
// 1) Initialize all of the packages the current package depends on.
// 2) Initialize all the variables that have initializers.
// 3) Run any init functions.
func fninit(n []*Node) {
func fninit(n []*ir.Node) {
nf := initOrder(n)
var deps []*obj.LSym // initTask records for packages the current package depends on
@ -47,7 +48,7 @@ func fninit(n []*Node) {
if len(nf) > 0 {
base.Pos = nf[0].Pos // prolog/epilog gets line number of first init stmt
initializers := lookup("init")
fn := dclfunc(initializers, nod(OTFUNC, nil, nil))
fn := dclfunc(initializers, ir.Nod(ir.OTFUNC, nil, nil))
for _, dcl := range initTodo.Func.Dcl {
dcl.Name.Curfn = fn
}
@ -75,24 +76,24 @@ func fninit(n []*Node) {
// Record user init functions.
for i := 0; i < renameinitgen; i++ {
s := lookupN("init.", i)
fn := asNode(s.Def).Name.Defn
fn := ir.AsNode(s.Def).Name.Defn
// Skip init functions with empty bodies.
if fn.Nbody.Len() == 1 && fn.Nbody.First().Op == OEMPTY {
if fn.Nbody.Len() == 1 && fn.Nbody.First().Op == ir.OEMPTY {
continue
}
fns = append(fns, s.Linksym())
}
if len(deps) == 0 && len(fns) == 0 && localpkg.Name != "main" && localpkg.Name != "runtime" {
if len(deps) == 0 && len(fns) == 0 && ir.LocalPkg.Name != "main" && ir.LocalPkg.Name != "runtime" {
return // nothing to initialize
}
// Make an .inittask structure.
sym := lookup(".inittask")
nn := newname(sym)
nn.Type = types.Types[TUINT8] // fake type
nn.SetClass(PEXTERN)
sym.Def = asTypesNode(nn)
nn := NewName(sym)
nn.Type = types.Types[types.TUINT8] // fake type
nn.SetClass(ir.PEXTERN)
sym.Def = ir.AsTypesNode(nn)
exportsym(nn)
lsym := sym.Linksym()
ot := 0

View file

@ -10,6 +10,8 @@ import (
"fmt"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
)
// Package initialization
@ -62,7 +64,7 @@ const (
type InitOrder struct {
// blocking maps initialization assignments to the assignments
// that depend on it.
blocking map[*Node][]*Node
blocking map[*ir.Node][]*ir.Node
// ready is the queue of Pending initialization assignments
// that are ready for initialization.
@ -73,22 +75,22 @@ type InitOrder struct {
// package-level declarations (in declaration order) and outputs the
// corresponding list of statements to include in the init() function
// body.
func initOrder(l []*Node) []*Node {
func initOrder(l []*ir.Node) []*ir.Node {
s := InitSchedule{
initplans: make(map[*Node]*InitPlan),
inittemps: make(map[*Node]*Node),
initplans: make(map[*ir.Node]*InitPlan),
inittemps: make(map[*ir.Node]*ir.Node),
}
o := InitOrder{
blocking: make(map[*Node][]*Node),
blocking: make(map[*ir.Node][]*ir.Node),
}
// Process all package-level assignment in declaration order.
for _, n := range l {
switch n.Op {
case OAS, OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
case ir.OAS, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
o.processAssign(n)
o.flushReady(s.staticInit)
case ODCLCONST, ODCLFUNC, ODCLTYPE:
case ir.ODCLCONST, ir.ODCLFUNC, ir.ODCLTYPE:
// nop
default:
base.Fatalf("unexpected package-level statement: %v", n)
@ -99,7 +101,7 @@ func initOrder(l []*Node) []*Node {
// have been a dependency cycle.
for _, n := range l {
switch n.Op {
case OAS, OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
case ir.OAS, ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
if n.Initorder() != InitDone {
// If there have already been errors
// printed, those errors may have
@ -108,7 +110,7 @@ func initOrder(l []*Node) []*Node {
// first.
base.ExitIfErrors()
findInitLoopAndExit(firstLHS(n), new([]*Node))
findInitLoopAndExit(firstLHS(n), new([]*ir.Node))
base.Fatalf("initialization unfinished, but failed to identify loop")
}
}
@ -123,8 +125,8 @@ func initOrder(l []*Node) []*Node {
return s.out
}
func (o *InitOrder) processAssign(n *Node) {
if n.Initorder() != InitNotStarted || n.Xoffset != BADWIDTH {
func (o *InitOrder) processAssign(n *ir.Node) {
if n.Initorder() != InitNotStarted || n.Xoffset != types.BADWIDTH {
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
}
@ -137,7 +139,7 @@ func (o *InitOrder) processAssign(n *Node) {
defn := dep.Name.Defn
// Skip dependencies on functions (PFUNC) and
// variables already initialized (InitDone).
if dep.Class() != PEXTERN || defn.Initorder() == InitDone {
if dep.Class() != ir.PEXTERN || defn.Initorder() == InitDone {
continue
}
n.Xoffset++
@ -152,16 +154,16 @@ func (o *InitOrder) processAssign(n *Node) {
// flushReady repeatedly applies initialize to the earliest (in
// declaration order) assignment ready for initialization and updates
// the inverse dependency ("blocking") graph.
func (o *InitOrder) flushReady(initialize func(*Node)) {
func (o *InitOrder) flushReady(initialize func(*ir.Node)) {
for o.ready.Len() != 0 {
n := heap.Pop(&o.ready).(*Node)
n := heap.Pop(&o.ready).(*ir.Node)
if n.Initorder() != InitPending || n.Xoffset != 0 {
base.Fatalf("unexpected state: %v, %v, %v", n, n.Initorder(), n.Xoffset)
}
initialize(n)
n.SetInitorder(InitDone)
n.Xoffset = BADWIDTH
n.Xoffset = types.BADWIDTH
blocked := o.blocking[n]
delete(o.blocking, n)
@ -181,7 +183,7 @@ func (o *InitOrder) flushReady(initialize func(*Node)) {
// path points to a slice used for tracking the sequence of
// variables/functions visited. Using a pointer to a slice allows the
// slice capacity to grow and limit reallocations.
func findInitLoopAndExit(n *Node, path *[]*Node) {
func findInitLoopAndExit(n *ir.Node, path *[]*ir.Node) {
// We implement a simple DFS loop-finding algorithm. This
// could be faster, but initialization cycles are rare.
@ -194,14 +196,14 @@ func findInitLoopAndExit(n *Node, path *[]*Node) {
// There might be multiple loops involving n; by sorting
// references, we deterministically pick the one reported.
refers := collectDeps(n.Name.Defn, false).Sorted(func(ni, nj *Node) bool {
refers := collectDeps(n.Name.Defn, false).Sorted(func(ni, nj *ir.Node) bool {
return ni.Pos.Before(nj.Pos)
})
*path = append(*path, n)
for _, ref := range refers {
// Short-circuit variables that were initialized.
if ref.Class() == PEXTERN && ref.Name.Defn.Initorder() == InitDone {
if ref.Class() == ir.PEXTERN && ref.Name.Defn.Initorder() == InitDone {
continue
}
@ -213,12 +215,12 @@ func findInitLoopAndExit(n *Node, path *[]*Node) {
// reportInitLoopAndExit reports and initialization loop as an error
// and exits. However, if l is not actually an initialization loop, it
// simply returns instead.
func reportInitLoopAndExit(l []*Node) {
func reportInitLoopAndExit(l []*ir.Node) {
// Rotate loop so that the earliest variable declaration is at
// the start.
i := -1
for j, n := range l {
if n.Class() == PEXTERN && (i == -1 || n.Pos.Before(l[i].Pos)) {
if n.Class() == ir.PEXTERN && (i == -1 || n.Pos.Before(l[i].Pos)) {
i = j
}
}
@ -236,9 +238,9 @@ func reportInitLoopAndExit(l []*Node) {
var msg bytes.Buffer
fmt.Fprintf(&msg, "initialization loop:\n")
for _, n := range l {
fmt.Fprintf(&msg, "\t%v: %v refers to\n", n.Line(), n)
fmt.Fprintf(&msg, "\t%v: %v refers to\n", ir.Line(n), n)
}
fmt.Fprintf(&msg, "\t%v: %v", l[0].Line(), l[0])
fmt.Fprintf(&msg, "\t%v: %v", ir.Line(l[0]), l[0])
base.ErrorfAt(l[0].Pos, msg.String())
base.ErrorExit()
@ -248,14 +250,14 @@ func reportInitLoopAndExit(l []*Node) {
// variables that declaration n depends on. If transitive is true,
// then it also includes the transitive dependencies of any depended
// upon functions (but not variables).
func collectDeps(n *Node, transitive bool) NodeSet {
func collectDeps(n *ir.Node, transitive bool) ir.NodeSet {
d := initDeps{transitive: transitive}
switch n.Op {
case OAS:
case ir.OAS:
d.inspect(n.Right)
case OAS2DOTTYPE, OAS2FUNC, OAS2MAPR, OAS2RECV:
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2MAPR, ir.OAS2RECV:
d.inspect(n.Right)
case ODCLFUNC:
case ir.ODCLFUNC:
d.inspectList(n.Nbody)
default:
base.Fatalf("unexpected Op: %v", n.Op)
@ -265,31 +267,31 @@ func collectDeps(n *Node, transitive bool) NodeSet {
type initDeps struct {
transitive bool
seen NodeSet
seen ir.NodeSet
}
func (d *initDeps) inspect(n *Node) { inspect(n, d.visit) }
func (d *initDeps) inspectList(l Nodes) { inspectList(l, d.visit) }
func (d *initDeps) inspect(n *ir.Node) { ir.Inspect(n, d.visit) }
func (d *initDeps) inspectList(l ir.Nodes) { ir.InspectList(l, d.visit) }
// visit calls foundDep on any package-level functions or variables
// referenced by n, if any.
func (d *initDeps) visit(n *Node) bool {
func (d *initDeps) visit(n *ir.Node) bool {
switch n.Op {
case OMETHEXPR:
d.foundDep(n.MethodName())
case ir.OMETHEXPR:
d.foundDep(methodExprName(n))
return false
case ONAME:
case ir.ONAME:
switch n.Class() {
case PEXTERN, PFUNC:
case ir.PEXTERN, ir.PFUNC:
d.foundDep(n)
}
case OCLOSURE:
case ir.OCLOSURE:
d.inspectList(n.Func.Decl.Nbody)
case ODOTMETH, OCALLPART:
d.foundDep(n.MethodName())
case ir.ODOTMETH, ir.OCALLPART:
d.foundDep(methodExprName(n))
}
return true
@ -297,7 +299,7 @@ func (d *initDeps) visit(n *Node) bool {
// foundDep records that we've found a dependency on n by adding it to
// seen.
func (d *initDeps) foundDep(n *Node) {
func (d *initDeps) foundDep(n *ir.Node) {
// Can happen with method expressions involving interface
// types; e.g., fixedbugs/issue4495.go.
if n == nil {
@ -314,7 +316,7 @@ func (d *initDeps) foundDep(n *Node) {
return
}
d.seen.Add(n)
if d.transitive && n.Class() == PFUNC {
if d.transitive && n.Class() == ir.PFUNC {
d.inspectList(n.Name.Defn.Nbody)
}
}
@ -326,13 +328,13 @@ func (d *initDeps) foundDep(n *Node) {
// an OAS node's Pos may not be unique. For example, given the
// declaration "var a, b = f(), g()", "a" must be ordered before "b",
// but both OAS nodes use the "=" token's position as their Pos.
type declOrder []*Node
type declOrder []*ir.Node
func (s declOrder) Len() int { return len(s) }
func (s declOrder) Less(i, j int) bool { return firstLHS(s[i]).Pos.Before(firstLHS(s[j]).Pos) }
func (s declOrder) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s *declOrder) Push(x interface{}) { *s = append(*s, x.(*Node)) }
func (s *declOrder) Push(x interface{}) { *s = append(*s, x.(*ir.Node)) }
func (s *declOrder) Pop() interface{} {
n := (*s)[len(*s)-1]
*s = (*s)[:len(*s)-1]
@ -341,11 +343,11 @@ func (s *declOrder) Pop() interface{} {
// firstLHS returns the first expression on the left-hand side of
// assignment n.
func firstLHS(n *Node) *Node {
func firstLHS(n *ir.Node) *ir.Node {
switch n.Op {
case OAS:
case ir.OAS:
return n.Left
case OAS2DOTTYPE, OAS2FUNC, OAS2RECV, OAS2MAPR:
case ir.OAS2DOTTYPE, ir.OAS2FUNC, ir.OAS2RECV, ir.OAS2MAPR:
return n.List.First()
}

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/internal/objabi"
"cmd/internal/src"
@ -25,78 +26,51 @@ func isQuoted(s string) bool {
return len(s) >= 2 && s[0] == '"' && s[len(s)-1] == '"'
}
type PragmaFlag int16
const (
// Func pragmas.
Nointerface PragmaFlag = 1 << iota
Noescape // func parameters don't escape
Norace // func must not have race detector annotations
Nosplit // func should not execute on separate stack
Noinline // func should not be inlined
NoCheckPtr // func should not be instrumented by checkptr
CgoUnsafeArgs // treat a pointer to one arg as a pointer to them all
UintptrEscapes // pointers converted to uintptr escape
FuncPragmas = ir.Nointerface |
ir.Noescape |
ir.Norace |
ir.Nosplit |
ir.Noinline |
ir.NoCheckPtr |
ir.CgoUnsafeArgs |
ir.UintptrEscapes |
ir.Systemstack |
ir.Nowritebarrier |
ir.Nowritebarrierrec |
ir.Yeswritebarrierrec
// Runtime-only func pragmas.
// See ../../../../runtime/README.md for detailed descriptions.
Systemstack // func must run on system stack
Nowritebarrier // emit compiler error instead of write barrier
Nowritebarrierrec // error on write barrier in this or recursive callees
Yeswritebarrierrec // cancels Nowritebarrierrec in this function and callees
// Runtime and cgo type pragmas
NotInHeap // values of this type must not be heap allocated
// Go command pragmas
GoBuildPragma
TypePragmas = ir.NotInHeap
)
const (
FuncPragmas = Nointerface |
Noescape |
Norace |
Nosplit |
Noinline |
NoCheckPtr |
CgoUnsafeArgs |
UintptrEscapes |
Systemstack |
Nowritebarrier |
Nowritebarrierrec |
Yeswritebarrierrec
TypePragmas = NotInHeap
)
func pragmaFlag(verb string) PragmaFlag {
func pragmaFlag(verb string) ir.PragmaFlag {
switch verb {
case "go:build":
return GoBuildPragma
return ir.GoBuildPragma
case "go:nointerface":
if objabi.Fieldtrack_enabled != 0 {
return Nointerface
return ir.Nointerface
}
case "go:noescape":
return Noescape
return ir.Noescape
case "go:norace":
return Norace
return ir.Norace
case "go:nosplit":
return Nosplit | NoCheckPtr // implies NoCheckPtr (see #34972)
return ir.Nosplit | ir.NoCheckPtr // implies NoCheckPtr (see #34972)
case "go:noinline":
return Noinline
return ir.Noinline
case "go:nocheckptr":
return NoCheckPtr
return ir.NoCheckPtr
case "go:systemstack":
return Systemstack
return ir.Systemstack
case "go:nowritebarrier":
return Nowritebarrier
return ir.Nowritebarrier
case "go:nowritebarrierrec":
return Nowritebarrierrec | Nowritebarrier // implies Nowritebarrier
return ir.Nowritebarrierrec | ir.Nowritebarrier // implies Nowritebarrier
case "go:yeswritebarrierrec":
return Yeswritebarrierrec
return ir.Yeswritebarrierrec
case "go:cgo_unsafe_args":
return CgoUnsafeArgs | NoCheckPtr // implies NoCheckPtr (see #34968)
return ir.CgoUnsafeArgs | ir.NoCheckPtr // implies NoCheckPtr (see #34968)
case "go:uintptrescapes":
// For the next function declared in the file
// any uintptr arguments may be pointer values
@ -109,9 +83,9 @@ func pragmaFlag(verb string) PragmaFlag {
// call. The conversion to uintptr must appear
// in the argument list.
// Used in syscall/dll_windows.go.
return UintptrEscapes
return ir.UintptrEscapes
case "go:notinheap":
return NotInHeap
return ir.NotInHeap
}
return 0
}

View file

@ -10,6 +10,7 @@ import (
"bufio"
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
@ -73,17 +74,17 @@ func Main(archInit func(*Arch)) {
// See bugs 31188 and 21945 (CLs 170638, 98075, 72371).
base.Ctxt.UseBASEntries = base.Ctxt.Headtype != objabi.Hdarwin
localpkg = types.NewPkg("", "")
localpkg.Prefix = "\"\""
ir.LocalPkg = types.NewPkg("", "")
ir.LocalPkg.Prefix = "\"\""
// We won't know localpkg's height until after import
// processing. In the mean time, set to MaxPkgHeight to ensure
// height comparisons at least work until then.
localpkg.Height = types.MaxPkgHeight
ir.LocalPkg.Height = types.MaxPkgHeight
// pseudo-package, for scoping
builtinpkg = types.NewPkg("go.builtin", "") // TODO(gri) name this package go.builtin?
builtinpkg.Prefix = "go.builtin" // not go%2ebuiltin
ir.BuiltinPkg = types.NewPkg("go.builtin", "") // TODO(gri) name this package go.builtin?
ir.BuiltinPkg.Prefix = "go.builtin" // not go%2ebuiltin
// pseudo-package, accessed by import "unsafe"
unsafepkg = types.NewPkg("unsafe", "unsafe")
@ -209,29 +210,18 @@ func Main(archInit func(*Arch)) {
types.Widthptr = Widthptr
types.Dowidth = dowidth
types.Fatalf = base.Fatalf
types.Sconv = func(s *types.Sym, flag, mode int) string {
return sconv(s, FmtFlag(flag), fmtMode(mode))
}
types.Tconv = func(t *types.Type, flag, mode int) string {
return tconv(t, FmtFlag(flag), fmtMode(mode))
}
types.FormatSym = func(sym *types.Sym, s fmt.State, verb rune, mode int) {
symFormat(sym, s, verb, fmtMode(mode))
}
types.FormatType = func(t *types.Type, s fmt.State, verb rune, mode int) {
typeFormat(t, s, verb, fmtMode(mode))
}
ir.InstallTypeFormats()
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return typenamesym(t).Linksym()
}
types.FmtLeft = int(FmtLeft)
types.FmtUnsigned = int(FmtUnsigned)
types.FErr = int(FErr)
types.FmtLeft = int(ir.FmtLeft)
types.FmtUnsigned = int(ir.FmtUnsigned)
types.FErr = int(ir.FErr)
types.Ctxt = base.Ctxt
initUniverse()
dclcontext = PEXTERN
dclcontext = ir.PEXTERN
autogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
@ -263,7 +253,7 @@ func Main(archInit func(*Arch)) {
timings.Start("fe", "typecheck", "top1")
for i := 0; i < len(xtop); i++ {
n := xtop[i]
if op := n.Op; op != ODCL && op != OAS && op != OAS2 && (op != ODCLTYPE || !n.Left.Name.Param.Alias()) {
if op := n.Op; op != ir.ODCL && op != ir.OAS && op != ir.OAS2 && (op != ir.ODCLTYPE || !n.Left.Name.Param.Alias()) {
xtop[i] = typecheck(n, ctxStmt)
}
}
@ -275,7 +265,7 @@ func Main(archInit func(*Arch)) {
timings.Start("fe", "typecheck", "top2")
for i := 0; i < len(xtop); i++ {
n := xtop[i]
if op := n.Op; op == ODCL || op == OAS || op == OAS2 || op == ODCLTYPE && n.Left.Name.Param.Alias() {
if op := n.Op; op == ir.ODCL || op == ir.OAS || op == ir.OAS2 || op == ir.ODCLTYPE && n.Left.Name.Param.Alias() {
xtop[i] = typecheck(n, ctxStmt)
}
}
@ -286,7 +276,7 @@ func Main(archInit func(*Arch)) {
var fcount int64
for i := 0; i < len(xtop); i++ {
n := xtop[i]
if n.Op == ODCLFUNC {
if n.Op == ir.ODCLFUNC {
Curfn = n
decldepth = 1
errorsBefore := base.Errors()
@ -316,7 +306,7 @@ func Main(archInit func(*Arch)) {
// because variables captured by value do not escape.
timings.Start("fe", "capturevars")
for _, n := range xtop {
if n.Op == ODCLFUNC && n.Func.OClosure != nil {
if n.Op == ir.ODCLFUNC && n.Func.OClosure != nil {
Curfn = n
capturevars(n)
}
@ -340,7 +330,7 @@ func Main(archInit func(*Arch)) {
if base.Flag.LowerL != 0 {
// Find functions that can be inlined and clone them before walk expands them.
visitBottomUp(xtop, func(list []*Node, recursive bool) {
visitBottomUp(xtop, func(list []*ir.Node, recursive bool) {
numfns := numNonClosures(list)
for _, n := range list {
if !recursive || numfns > 1 {
@ -350,7 +340,7 @@ func Main(archInit func(*Arch)) {
caninl(n)
} else {
if base.Flag.LowerM > 1 {
fmt.Printf("%v: cannot inline %v: recursive\n", n.Line(), n.Func.Nname)
fmt.Printf("%v: cannot inline %v: recursive\n", ir.Line(n), n.Func.Nname)
}
}
inlcalls(n)
@ -359,7 +349,7 @@ func Main(archInit func(*Arch)) {
}
for _, n := range xtop {
if n.Op == ODCLFUNC {
if n.Op == ir.ODCLFUNC {
devirtualize(n)
}
}
@ -389,7 +379,7 @@ func Main(archInit func(*Arch)) {
// before walk reaches a call of a closure.
timings.Start("fe", "xclosures")
for _, n := range xtop {
if n.Op == ODCLFUNC && n.Func.OClosure != nil {
if n.Op == ir.ODCLFUNC && n.Func.OClosure != nil {
Curfn = n
transformclosure(n)
}
@ -412,7 +402,7 @@ func Main(archInit func(*Arch)) {
fcount = 0
for i := 0; i < len(xtop); i++ {
n := xtop[i]
if n.Op == ODCLFUNC {
if n.Op == ir.ODCLFUNC {
funccompile(n)
fcount++
}
@ -440,7 +430,7 @@ func Main(archInit func(*Arch)) {
// Phase 9: Check external declarations.
timings.Start("be", "externaldcls")
for i, n := range externdcl {
if n.Op == ONAME {
if n.Op == ir.ONAME {
externdcl[i] = typecheck(externdcl[i], ctxExpr)
}
}
@ -491,7 +481,7 @@ func Main(archInit func(*Arch)) {
}
// numNonClosures returns the number of functions in list which are not closures.
func numNonClosures(list []*Node) int {
func numNonClosures(list []*ir.Node) int {
count := 0
for _, n := range list {
if n.Func.OClosure == nil {
@ -934,14 +924,14 @@ func pkgnotused(lineno src.XPos, path string, name string) {
}
func mkpackage(pkgname string) {
if localpkg.Name == "" {
if ir.LocalPkg.Name == "" {
if pkgname == "_" {
base.Errorf("invalid package name _")
}
localpkg.Name = pkgname
ir.LocalPkg.Name = pkgname
} else {
if pkgname != localpkg.Name {
base.Errorf("package %s; expected %s", pkgname, localpkg.Name)
if pkgname != ir.LocalPkg.Name {
base.Errorf("package %s; expected %s", pkgname, ir.LocalPkg.Name)
}
}
}
@ -954,12 +944,12 @@ func clearImports() {
}
var unused []importedPkg
for _, s := range localpkg.Syms {
n := asNode(s.Def)
for _, s := range ir.LocalPkg.Syms {
n := ir.AsNode(s.Def)
if n == nil {
continue
}
if n.Op == OPACK {
if n.Op == ir.OPACK {
// throw away top-level package name left over
// from previous file.
// leave s->block set to cause redeclaration
@ -990,7 +980,7 @@ func clearImports() {
}
func IsAlias(sym *types.Sym) bool {
return sym.Def != nil && asNode(sym.Def).Sym != sym
return sym.Def != nil && ir.AsNode(sym.Def).Sym != sym
}
// recordFlags records the specified command-line flags to be placed
@ -1057,7 +1047,7 @@ func recordPackageName() {
// together two package main archives. So allow dups.
s.Set(obj.AttrDuplicateOK, true)
base.Ctxt.Data = append(base.Ctxt.Data, s)
s.P = []byte(localpkg.Name)
s.P = []byte(ir.LocalPkg.Name)
}
// currentLang returns the current language version.
@ -1084,9 +1074,9 @@ var langWant lang
func langSupported(major, minor int, pkg *types.Pkg) bool {
if pkg == nil {
// TODO(mdempsky): Set Pkg for local types earlier.
pkg = localpkg
pkg = ir.LocalPkg
}
if pkg != localpkg {
if pkg != ir.LocalPkg {
// Assume imported packages passed type-checking.
return true
}

View file

@ -35,7 +35,10 @@ func main() {
fmt.Fprintln(&b)
fmt.Fprintln(&b, "package gc")
fmt.Fprintln(&b)
fmt.Fprintln(&b, `import "cmd/compile/internal/types"`)
fmt.Fprintln(&b, `import (`)
fmt.Fprintln(&b, ` "cmd/compile/internal/ir"`)
fmt.Fprintln(&b, ` "cmd/compile/internal/types"`)
fmt.Fprintln(&b, `)`)
mkbuiltin(&b, "runtime")
@ -144,12 +147,12 @@ func (i *typeInterner) mktype(t ast.Expr) string {
case "rune":
return "types.Runetype"
}
return fmt.Sprintf("types.Types[T%s]", strings.ToUpper(t.Name))
return fmt.Sprintf("types.Types[types.T%s]", strings.ToUpper(t.Name))
case *ast.SelectorExpr:
if t.X.(*ast.Ident).Name != "unsafe" || t.Sel.Name != "Pointer" {
log.Fatalf("unhandled type: %#v", t)
}
return "types.Types[TUNSAFEPTR]"
return "types.Types[types.TUNSAFEPTR]"
case *ast.ArrayType:
if t.Len == nil {
@ -171,7 +174,7 @@ func (i *typeInterner) mktype(t ast.Expr) string {
if len(t.Methods.List) != 0 {
log.Fatal("non-empty interfaces unsupported")
}
return "types.Types[TINTER]"
return "types.Types[types.TINTER]"
case *ast.MapType:
return fmt.Sprintf("types.NewMap(%s, %s)", i.subtype(t.Key), i.subtype(t.Value))
case *ast.StarExpr:
@ -204,7 +207,7 @@ func (i *typeInterner) fields(fl *ast.FieldList, keepNames bool) string {
}
}
}
return fmt.Sprintf("[]*Node{%s}", strings.Join(res, ", "))
return fmt.Sprintf("[]*ir.Node{%s}", strings.Join(res, ", "))
}
func intconst(e ast.Expr) int64 {

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/obj"
@ -83,7 +84,7 @@ func printObjHeader(bout *bio.Writer) {
if base.Flag.BuildID != "" {
fmt.Fprintf(bout, "build id %q\n", base.Flag.BuildID)
}
if localpkg.Name == "main" {
if ir.LocalPkg.Name == "main" {
fmt.Fprintf(bout, "main\n")
}
fmt.Fprintf(bout, "\n") // header ends with blank line
@ -141,7 +142,7 @@ func dumpdata() {
for {
for i := xtops; i < len(xtop); i++ {
n := xtop[i]
if n.Op == ODCLFUNC {
if n.Op == ir.ODCLFUNC {
funccompile(n)
}
}
@ -199,16 +200,16 @@ func dumpLinkerObj(bout *bio.Writer) {
}
func addptabs() {
if !base.Ctxt.Flag_dynlink || localpkg.Name != "main" {
if !base.Ctxt.Flag_dynlink || ir.LocalPkg.Name != "main" {
return
}
for _, exportn := range exportlist {
s := exportn.Sym
n := asNode(s.Def)
n := ir.AsNode(s.Def)
if n == nil {
continue
}
if n.Op != ONAME {
if n.Op != ir.ONAME {
continue
}
if !types.IsExported(s.Name) {
@ -217,37 +218,37 @@ func addptabs() {
if s.Pkg.Name != "main" {
continue
}
if n.Type.Etype == TFUNC && n.Class() == PFUNC {
if n.Type.Etype == types.TFUNC && n.Class() == ir.PFUNC {
// function
ptabs = append(ptabs, ptabEntry{s: s, t: asNode(s.Def).Type})
ptabs = append(ptabs, ptabEntry{s: s, t: ir.AsNode(s.Def).Type})
} else {
// variable
ptabs = append(ptabs, ptabEntry{s: s, t: types.NewPtr(asNode(s.Def).Type)})
ptabs = append(ptabs, ptabEntry{s: s, t: types.NewPtr(ir.AsNode(s.Def).Type)})
}
}
}
func dumpGlobal(n *Node) {
func dumpGlobal(n *ir.Node) {
if n.Type == nil {
base.Fatalf("external %v nil type\n", n)
}
if n.Class() == PFUNC {
if n.Class() == ir.PFUNC {
return
}
if n.Sym.Pkg != localpkg {
if n.Sym.Pkg != ir.LocalPkg {
return
}
dowidth(n.Type)
ggloblnod(n)
}
func dumpGlobalConst(n *Node) {
func dumpGlobalConst(n *ir.Node) {
// only export typed constants
t := n.Type
if t == nil {
return
}
if n.Sym.Pkg != localpkg {
if n.Sym.Pkg != ir.LocalPkg {
return
}
// only export integer constants for now
@ -257,21 +258,21 @@ func dumpGlobalConst(n *Node) {
v := n.Val()
if t.IsUntyped() {
// Export untyped integers as int (if they fit).
t = types.Types[TINT]
t = types.Types[types.TINT]
if doesoverflow(v, t) {
return
}
}
base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym.Name, typesymname(t), int64Val(t, v))
base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym.Name, typesymname(t), ir.Int64Val(t, v))
}
func dumpglobls() {
// add globals
for _, n := range externdcl {
switch n.Op {
case ONAME:
case ir.ONAME:
dumpGlobal(n)
case OLITERAL:
case ir.OLITERAL:
dumpGlobalConst(n)
}
}
@ -474,12 +475,12 @@ func fileStringSym(pos src.XPos, file string, readonly bool, hash []byte) (*obj.
var slicedataGen int
func slicedata(pos src.XPos, s string) *Node {
func slicedata(pos src.XPos, s string) *ir.Node {
slicedataGen++
symname := fmt.Sprintf(".gobytes.%d", slicedataGen)
sym := localpkg.Lookup(symname)
symnode := newname(sym)
sym.Def = asTypesNode(symnode)
sym := ir.LocalPkg.Lookup(symname)
symnode := NewName(sym)
sym.Def = ir.AsTypesNode(symnode)
lsym := sym.Linksym()
off := dstringdata(lsym, 0, s, pos, "slice")
@ -488,8 +489,8 @@ func slicedata(pos src.XPos, s string) *Node {
return symnode
}
func slicebytes(nam *Node, s string) {
if nam.Op != ONAME {
func slicebytes(nam *ir.Node, s string) {
if nam.Op != ir.ONAME {
base.Fatalf("slicebytes %v", nam)
}
slicesym(nam, slicedata(nam.Pos, s), int64(len(s)))
@ -529,10 +530,10 @@ func dsymptrWeakOff(s *obj.LSym, off int, x *obj.LSym) int {
// slicesym writes a static slice symbol {&arr, lencap, lencap} to n.
// arr must be an ONAME. slicesym does not modify n.
func slicesym(n, arr *Node, lencap int64) {
func slicesym(n, arr *ir.Node, lencap int64) {
s := n.Sym.Linksym()
off := n.Xoffset
if arr.Op != ONAME {
if arr.Op != ir.ONAME {
base.Fatalf("slicesym non-name arr %v", arr)
}
s.WriteAddr(base.Ctxt, off, Widthptr, arr.Sym.Linksym(), arr.Xoffset)
@ -542,14 +543,14 @@ func slicesym(n, arr *Node, lencap int64) {
// addrsym writes the static address of a to n. a must be an ONAME.
// Neither n nor a is modified.
func addrsym(n, a *Node) {
if n.Op != ONAME {
func addrsym(n, a *ir.Node) {
if n.Op != ir.ONAME {
base.Fatalf("addrsym n op %v", n.Op)
}
if n.Sym == nil {
base.Fatalf("addrsym nil n sym")
}
if a.Op != ONAME {
if a.Op != ir.ONAME {
base.Fatalf("addrsym a op %v", a.Op)
}
s := n.Sym.Linksym()
@ -558,14 +559,14 @@ func addrsym(n, a *Node) {
// pfuncsym writes the static address of f to n. f must be a global function.
// Neither n nor f is modified.
func pfuncsym(n, f *Node) {
if n.Op != ONAME {
func pfuncsym(n, f *ir.Node) {
if n.Op != ir.ONAME {
base.Fatalf("pfuncsym n op %v", n.Op)
}
if n.Sym == nil {
base.Fatalf("pfuncsym nil n sym")
}
if f.Class() != PFUNC {
if f.Class() != ir.PFUNC {
base.Fatalf("pfuncsym class not PFUNC %d", f.Class())
}
s := n.Sym.Linksym()
@ -574,8 +575,8 @@ func pfuncsym(n, f *Node) {
// litsym writes the static literal c to n.
// Neither n nor c is modified.
func litsym(n, c *Node, wid int) {
if n.Op != ONAME {
func litsym(n, c *ir.Node, wid int) {
if n.Op != ir.ONAME {
base.Fatalf("litsym n op %v", n.Op)
}
if n.Sym == nil {
@ -584,10 +585,10 @@ func litsym(n, c *Node, wid int) {
if !types.Identical(n.Type, c.Type) {
base.Fatalf("litsym: type mismatch: %v has type %v, but %v has type %v", n, n.Type, c, c.Type)
}
if c.Op == ONIL {
if c.Op == ir.ONIL {
return
}
if c.Op != OLITERAL {
if c.Op != ir.OLITERAL {
base.Fatalf("litsym c op %v", c.Op)
}
s := n.Sym.Linksym()
@ -597,14 +598,14 @@ func litsym(n, c *Node, wid int) {
s.WriteInt(base.Ctxt, n.Xoffset, wid, i)
case constant.Int:
s.WriteInt(base.Ctxt, n.Xoffset, wid, int64Val(n.Type, u))
s.WriteInt(base.Ctxt, n.Xoffset, wid, ir.Int64Val(n.Type, u))
case constant.Float:
f, _ := constant.Float64Val(u)
switch n.Type.Etype {
case TFLOAT32:
case types.TFLOAT32:
s.WriteFloat32(base.Ctxt, n.Xoffset, float32(f))
case TFLOAT64:
case types.TFLOAT64:
s.WriteFloat64(base.Ctxt, n.Xoffset, f)
}
@ -612,10 +613,10 @@ func litsym(n, c *Node, wid int) {
re, _ := constant.Float64Val(constant.Real(u))
im, _ := constant.Float64Val(constant.Imag(u))
switch n.Type.Etype {
case TCOMPLEX64:
case types.TCOMPLEX64:
s.WriteFloat32(base.Ctxt, n.Xoffset, float32(re))
s.WriteFloat32(base.Ctxt, n.Xoffset+4, float32(im))
case TCOMPLEX128:
case types.TCOMPLEX128:
s.WriteFloat64(base.Ctxt, n.Xoffset, re)
s.WriteFloat64(base.Ctxt, n.Xoffset+8, im)
}

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/dwarf"
@ -23,14 +24,14 @@ import (
// "Portable" code generation.
var (
compilequeue []*Node // functions waiting to be compiled
compilequeue []*ir.Node // functions waiting to be compiled
)
func emitptrargsmap(fn *Node) {
if fn.funcname() == "_" || fn.Func.Nname.Sym.Linkname != "" {
func emitptrargsmap(fn *ir.Node) {
if ir.FuncName(fn) == "_" || fn.Func.Nname.Sym.Linkname != "" {
return
}
lsym := base.Ctxt.Lookup(fn.Func.lsym.Name + ".args_stackmap")
lsym := base.Ctxt.Lookup(fn.Func.LSym.Name + ".args_stackmap")
nptr := int(fn.Type.ArgWidth() / int64(Widthptr))
bv := bvalloc(int32(nptr) * 2)
@ -41,7 +42,7 @@ func emitptrargsmap(fn *Node) {
off := duint32(lsym, 0, uint32(nbitmap))
off = duint32(lsym, off, uint32(bv.n))
if fn.IsMethod() {
if ir.IsMethod(fn) {
onebitwalktype1(fn.Type.Recvs(), 0, bv)
}
if fn.Type.NumParams() > 0 {
@ -67,12 +68,12 @@ func emitptrargsmap(fn *Node) {
// really means, in memory, things with pointers needing zeroing at
// the top of the stack and increasing in size.
// Non-autos sort on offset.
func cmpstackvarlt(a, b *Node) bool {
if (a.Class() == PAUTO) != (b.Class() == PAUTO) {
return b.Class() == PAUTO
func cmpstackvarlt(a, b *ir.Node) bool {
if (a.Class() == ir.PAUTO) != (b.Class() == ir.PAUTO) {
return b.Class() == ir.PAUTO
}
if a.Class() != PAUTO {
if a.Class() != ir.PAUTO {
return a.Xoffset < b.Xoffset
}
@ -100,7 +101,7 @@ func cmpstackvarlt(a, b *Node) bool {
}
// byStackvar implements sort.Interface for []*Node using cmpstackvarlt.
type byStackVar []*Node
type byStackVar []*ir.Node
func (s byStackVar) Len() int { return len(s) }
func (s byStackVar) Less(i, j int) bool { return cmpstackvarlt(s[i], s[j]) }
@ -113,28 +114,28 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Mark the PAUTO's unused.
for _, ln := range fn.Dcl {
if ln.Class() == PAUTO {
if ln.Class() == ir.PAUTO {
ln.Name.SetUsed(false)
}
}
for _, l := range f.RegAlloc {
if ls, ok := l.(ssa.LocalSlot); ok {
ls.N.(*Node).Name.SetUsed(true)
ls.N.(*ir.Node).Name.SetUsed(true)
}
}
scratchUsed := false
for _, b := range f.Blocks {
for _, v := range b.Values {
if n, ok := v.Aux.(*Node); ok {
if n, ok := v.Aux.(*ir.Node); ok {
switch n.Class() {
case PPARAM, PPARAMOUT:
case ir.PPARAM, ir.PPARAMOUT:
// Don't modify nodfp; it is a global.
if n != nodfp {
n.Name.SetUsed(true)
}
case PAUTO:
case ir.PAUTO:
n.Name.SetUsed(true)
}
}
@ -146,7 +147,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
}
if f.Config.NeedsFpScratch && scratchUsed {
s.scratchFpMem = tempAt(src.NoXPos, s.curfn, types.Types[TUINT64])
s.scratchFpMem = tempAt(src.NoXPos, s.curfn, types.Types[types.TUINT64])
}
sort.Sort(byStackVar(fn.Dcl))
@ -154,7 +155,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
// Reassign stack offsets of the locals that are used.
lastHasPtr := false
for i, n := range fn.Dcl {
if n.Op != ONAME || n.Class() != PAUTO {
if n.Op != ir.ONAME || n.Class() != ir.PAUTO {
continue
}
if !n.Name.Used() {
@ -192,7 +193,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
s.stkptrsize = Rnd(s.stkptrsize, int64(Widthreg))
}
func funccompile(fn *Node) {
func funccompile(fn *ir.Node) {
if Curfn != nil {
base.Fatalf("funccompile %v inside %v", fn.Func.Nname.Sym, Curfn.Func.Nname.Sym)
}
@ -209,21 +210,21 @@ func funccompile(fn *Node) {
if fn.Nbody.Len() == 0 {
// Initialize ABI wrappers if necessary.
fn.Func.initLSym(false)
initLSym(fn.Func, false)
emitptrargsmap(fn)
return
}
dclcontext = PAUTO
dclcontext = ir.PAUTO
Curfn = fn
compile(fn)
Curfn = nil
dclcontext = PEXTERN
dclcontext = ir.PEXTERN
}
func compile(fn *Node) {
func compile(fn *ir.Node) {
errorsBefore := base.Errors()
order(fn)
if base.Errors() > errorsBefore {
@ -233,7 +234,7 @@ func compile(fn *Node) {
// Set up the function's LSym early to avoid data races with the assemblers.
// Do this before walk, as walk needs the LSym to set attributes/relocations
// (e.g. in markTypeUsedInInterface).
fn.Func.initLSym(true)
initLSym(fn.Func, true)
walk(fn)
if base.Errors() > errorsBefore {
@ -246,7 +247,7 @@ func compile(fn *Node) {
// From this point, there should be no uses of Curfn. Enforce that.
Curfn = nil
if fn.funcname() == "_" {
if ir.FuncName(fn) == "_" {
// We don't need to generate code for this function, just report errors in its body.
// At this point we've generated any errors needed.
// (Beyond here we generate only non-spec errors, like "stack frame too large".)
@ -260,13 +261,13 @@ func compile(fn *Node) {
// phase of the compiler.
for _, n := range fn.Func.Dcl {
switch n.Class() {
case PPARAM, PPARAMOUT, PAUTO:
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
if livenessShouldTrack(n) && n.Name.Addrtaken() {
dtypesym(n.Type)
// Also make sure we allocate a linker symbol
// for the stack object data, for the same reason.
if fn.Func.lsym.Func().StackObjects == nil {
fn.Func.lsym.Func().StackObjects = base.Ctxt.Lookup(fn.Func.lsym.Name + ".stkobj")
if fn.Func.LSym.Func().StackObjects == nil {
fn.Func.LSym.Func().StackObjects = base.Ctxt.Lookup(fn.Func.LSym.Name + ".stkobj")
}
}
}
@ -283,13 +284,13 @@ func compile(fn *Node) {
// If functions are not compiled immediately,
// they are enqueued in compilequeue,
// which is drained by compileFunctions.
func compilenow(fn *Node) bool {
func compilenow(fn *ir.Node) bool {
// Issue 38068: if this function is a method AND an inline
// candidate AND was not inlined (yet), put it onto the compile
// queue instead of compiling it immediately. This is in case we
// wind up inlining it into a method wrapper that is generated by
// compiling a function later on in the xtop list.
if fn.IsMethod() && isInlinableButNotInlined(fn) {
if ir.IsMethod(fn) && isInlinableButNotInlined(fn) {
return false
}
return base.Flag.LowerC == 1 && base.Debug.CompileLater == 0
@ -298,7 +299,7 @@ func compilenow(fn *Node) bool {
// isInlinableButNotInlined returns true if 'fn' was marked as an
// inline candidate but then never inlined (presumably because we
// found no call sites).
func isInlinableButNotInlined(fn *Node) bool {
func isInlinableButNotInlined(fn *ir.Node) bool {
if fn.Func.Nname.Func.Inl == nil {
return false
}
@ -314,7 +315,7 @@ const maxStackSize = 1 << 30
// uses it to generate a plist,
// and flushes that plist to machine code.
// worker indicates which of the backend workers is doing the processing.
func compileSSA(fn *Node, worker int) {
func compileSSA(fn *ir.Node, worker int) {
f := buildssa(fn, worker)
// Note: check arg size to fix issue 25507.
if f.Frontend().(*ssafn).stksize >= maxStackSize || fn.Type.ArgWidth() >= maxStackSize {
@ -359,7 +360,7 @@ func compileFunctions() {
sizeCalculationDisabled = true // not safe to calculate sizes concurrently
if race.Enabled {
// Randomize compilation order to try to shake out races.
tmp := make([]*Node, len(compilequeue))
tmp := make([]*ir.Node, len(compilequeue))
perm := rand.Perm(len(compilequeue))
for i, v := range perm {
tmp[v] = compilequeue[i]
@ -375,7 +376,7 @@ func compileFunctions() {
}
var wg sync.WaitGroup
base.Ctxt.InParallel = true
c := make(chan *Node, base.Flag.LowerC)
c := make(chan *ir.Node, base.Flag.LowerC)
for i := 0; i < base.Flag.LowerC; i++ {
wg.Add(1)
go func(worker int) {
@ -397,7 +398,7 @@ func compileFunctions() {
}
func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.Scope, dwarf.InlCalls) {
fn := curfn.(*Node)
fn := curfn.(*ir.Node)
if fn.Func.Nname != nil {
if expect := fn.Func.Nname.Sym.Linksym(); fnsym != expect {
base.Fatalf("unexpected fnsym: %v != %v", fnsym, expect)
@ -429,17 +430,17 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
//
// These two adjustments keep toolstash -cmp working for now.
// Deciding the right answer is, as they say, future work.
isODCLFUNC := fn.Op == ODCLFUNC
isODCLFUNC := fn.Op == ir.ODCLFUNC
var apdecls []*Node
var apdecls []*ir.Node
// Populate decls for fn.
if isODCLFUNC {
for _, n := range fn.Func.Dcl {
if n.Op != ONAME { // might be OTYPE or OLITERAL
if n.Op != ir.ONAME { // might be OTYPE or OLITERAL
continue
}
switch n.Class() {
case PAUTO:
case ir.PAUTO:
if !n.Name.Used() {
// Text == nil -> generating abstract function
if fnsym.Func().Text != nil {
@ -447,7 +448,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
}
continue
}
case PPARAM, PPARAMOUT:
case ir.PPARAM, ir.PPARAMOUT:
default:
continue
}
@ -474,7 +475,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
}
fnsym.Func().Autot = nil
var varScopes []ScopeID
var varScopes []ir.ScopeID
for _, decl := range decls {
pos := declPos(decl)
varScopes = append(varScopes, findScope(fn.Func.Marks, pos))
@ -488,7 +489,7 @@ func debuginfo(fnsym *obj.LSym, infosym *obj.LSym, curfn interface{}) ([]dwarf.S
return scopes, inlcalls
}
func declPos(decl *Node) src.XPos {
func declPos(decl *ir.Node) src.XPos {
if decl.Name.Defn != nil && (decl.Name.Captured() || decl.Name.Byval()) {
// It's not clear which position is correct for captured variables here:
// * decl.Pos is the wrong position for captured variables, in the inner
@ -511,10 +512,10 @@ func declPos(decl *Node) src.XPos {
// createSimpleVars creates a DWARF entry for every variable declared in the
// function, claiming that they are permanently on the stack.
func createSimpleVars(fnsym *obj.LSym, apDecls []*Node) ([]*Node, []*dwarf.Var, map[*Node]bool) {
func createSimpleVars(fnsym *obj.LSym, apDecls []*ir.Node) ([]*ir.Node, []*dwarf.Var, map[*ir.Node]bool) {
var vars []*dwarf.Var
var decls []*Node
selected := make(map[*Node]bool)
var decls []*ir.Node
selected := make(map[*ir.Node]bool)
for _, n := range apDecls {
if n.IsAutoTmp() {
continue
@ -527,12 +528,12 @@ func createSimpleVars(fnsym *obj.LSym, apDecls []*Node) ([]*Node, []*dwarf.Var,
return decls, vars, selected
}
func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var {
func createSimpleVar(fnsym *obj.LSym, n *ir.Node) *dwarf.Var {
var abbrev int
offs := n.Xoffset
switch n.Class() {
case PAUTO:
case ir.PAUTO:
abbrev = dwarf.DW_ABRV_AUTO
if base.Ctxt.FixedFrameSize() == 0 {
offs -= int64(Widthptr)
@ -542,7 +543,7 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var {
offs -= int64(Widthptr)
}
case PPARAM, PPARAMOUT:
case ir.PPARAM, ir.PPARAMOUT:
abbrev = dwarf.DW_ABRV_PARAM
offs += base.Ctxt.FixedFrameSize()
default:
@ -563,7 +564,7 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var {
declpos := base.Ctxt.InnermostPos(declPos(n))
return &dwarf.Var{
Name: n.Sym.Name,
IsReturnValue: n.Class() == PPARAMOUT,
IsReturnValue: n.Class() == ir.PPARAMOUT,
IsInlFormal: n.Name.InlFormal(),
Abbrev: abbrev,
StackOffset: int32(offs),
@ -578,19 +579,19 @@ func createSimpleVar(fnsym *obj.LSym, n *Node) *dwarf.Var {
// createComplexVars creates recomposed DWARF vars with location lists,
// suitable for describing optimized code.
func createComplexVars(fnsym *obj.LSym, fn *Func) ([]*Node, []*dwarf.Var, map[*Node]bool) {
func createComplexVars(fnsym *obj.LSym, fn *ir.Func) ([]*ir.Node, []*dwarf.Var, map[*ir.Node]bool) {
debugInfo := fn.DebugInfo
// Produce a DWARF variable entry for each user variable.
var decls []*Node
var decls []*ir.Node
var vars []*dwarf.Var
ssaVars := make(map[*Node]bool)
ssaVars := make(map[*ir.Node]bool)
for varID, dvar := range debugInfo.Vars {
n := dvar.(*Node)
n := dvar.(*ir.Node)
ssaVars[n] = true
for _, slot := range debugInfo.VarSlots[varID] {
ssaVars[debugInfo.Slots[slot].N.(*Node)] = true
ssaVars[debugInfo.Slots[slot].N.(*ir.Node)] = true
}
if dvar := createComplexVar(fnsym, fn, ssa.VarID(varID)); dvar != nil {
@ -604,11 +605,11 @@ func createComplexVars(fnsym *obj.LSym, fn *Func) ([]*Node, []*dwarf.Var, map[*N
// createDwarfVars process fn, returning a list of DWARF variables and the
// Nodes they represent.
func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node) ([]*Node, []*dwarf.Var) {
func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *ir.Func, apDecls []*ir.Node) ([]*ir.Node, []*dwarf.Var) {
// Collect a raw list of DWARF vars.
var vars []*dwarf.Var
var decls []*Node
var selected map[*Node]bool
var decls []*ir.Node
var selected map[*ir.Node]bool
if base.Ctxt.Flag_locationlists && base.Ctxt.Flag_optimize && fn.DebugInfo != nil && complexOK {
decls, vars, selected = createComplexVars(fnsym, fn)
} else {
@ -640,7 +641,7 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node)
if c == '.' || n.Type.IsUntyped() {
continue
}
if n.Class() == PPARAM && !canSSAType(n.Type) {
if n.Class() == ir.PPARAM && !canSSAType(n.Type) {
// SSA-able args get location lists, and may move in and
// out of registers, so those are handled elsewhere.
// Autos and named output params seem to get handled
@ -655,10 +656,10 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node)
typename := dwarf.InfoPrefix + typesymname(n.Type)
decls = append(decls, n)
abbrev := dwarf.DW_ABRV_AUTO_LOCLIST
isReturnValue := (n.Class() == PPARAMOUT)
if n.Class() == PPARAM || n.Class() == PPARAMOUT {
isReturnValue := (n.Class() == ir.PPARAMOUT)
if n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
} else if n.Class() == PAUTOHEAP {
} else if n.Class() == ir.PAUTOHEAP {
// If dcl in question has been promoted to heap, do a bit
// of extra work to recover original class (auto or param);
// see issue 30908. This insures that we get the proper
@ -667,9 +668,9 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node)
// and not stack).
// TODO(thanm): generate a better location expression
stackcopy := n.Name.Param.Stackcopy
if stackcopy != nil && (stackcopy.Class() == PPARAM || stackcopy.Class() == PPARAMOUT) {
if stackcopy != nil && (stackcopy.Class() == ir.PPARAM || stackcopy.Class() == ir.PPARAMOUT) {
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
isReturnValue = (stackcopy.Class() == PPARAMOUT)
isReturnValue = (stackcopy.Class() == ir.PPARAMOUT)
}
}
inlIndex := 0
@ -707,9 +708,9 @@ func createDwarfVars(fnsym *obj.LSym, complexOK bool, fn *Func, apDecls []*Node)
// function that is not local to the package being compiled, then the
// names of the variables may have been "versioned" to avoid conflicts
// with local vars; disregard this versioning when sorting.
func preInliningDcls(fnsym *obj.LSym) []*Node {
fn := base.Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*Node)
var rdcl []*Node
func preInliningDcls(fnsym *obj.LSym) []*ir.Node {
fn := base.Ctxt.DwFixups.GetPrecursorFunc(fnsym).(*ir.Node)
var rdcl []*ir.Node
for _, n := range fn.Func.Inl.Dcl {
c := n.Sym.Name[0]
// Avoid reporting "_" parameters, since if there are more than
@ -726,10 +727,10 @@ func preInliningDcls(fnsym *obj.LSym) []*Node {
// stack pointer, suitable for use in a DWARF location entry. This has nothing
// to do with its offset in the user variable.
func stackOffset(slot ssa.LocalSlot) int32 {
n := slot.N.(*Node)
n := slot.N.(*ir.Node)
var off int64
switch n.Class() {
case PAUTO:
case ir.PAUTO:
if base.Ctxt.FixedFrameSize() == 0 {
off -= int64(Widthptr)
}
@ -737,22 +738,22 @@ func stackOffset(slot ssa.LocalSlot) int32 {
// There is a word space for FP on ARM64 even if the frame pointer is disabled
off -= int64(Widthptr)
}
case PPARAM, PPARAMOUT:
case ir.PPARAM, ir.PPARAMOUT:
off += base.Ctxt.FixedFrameSize()
}
return int32(off + n.Xoffset + slot.Off)
}
// createComplexVar builds a single DWARF variable entry and location list.
func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var {
func createComplexVar(fnsym *obj.LSym, fn *ir.Func, varID ssa.VarID) *dwarf.Var {
debug := fn.DebugInfo
n := debug.Vars[varID].(*Node)
n := debug.Vars[varID].(*ir.Node)
var abbrev int
switch n.Class() {
case PAUTO:
case ir.PAUTO:
abbrev = dwarf.DW_ABRV_AUTO_LOCLIST
case PPARAM, PPARAMOUT:
case ir.PPARAM, ir.PPARAMOUT:
abbrev = dwarf.DW_ABRV_PARAM_LOCLIST
default:
return nil
@ -773,7 +774,7 @@ func createComplexVar(fnsym *obj.LSym, fn *Func, varID ssa.VarID) *dwarf.Var {
declpos := base.Ctxt.InnermostPos(n.Pos)
dvar := &dwarf.Var{
Name: n.Sym.Name,
IsReturnValue: n.Class() == PPARAMOUT,
IsReturnValue: n.Class() == ir.PPARAMOUT,
IsInlFormal: n.Name.InlFormal(),
Abbrev: abbrev,
Type: base.Ctxt.Lookup(typename),

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"reflect"
"sort"
@ -12,133 +13,133 @@ import (
)
func typeWithoutPointers() *types.Type {
t := types.New(TSTRUCT)
f := &types.Field{Type: types.New(TINT)}
t := types.New(types.TSTRUCT)
f := &types.Field{Type: types.New(types.TINT)}
t.SetFields([]*types.Field{f})
return t
}
func typeWithPointers() *types.Type {
t := types.New(TSTRUCT)
f := &types.Field{Type: types.NewPtr(types.New(TINT))}
t := types.New(types.TSTRUCT)
f := &types.Field{Type: types.NewPtr(types.New(types.TINT))}
t.SetFields([]*types.Field{f})
return t
}
func markUsed(n *Node) *Node {
func markUsed(n *ir.Node) *ir.Node {
n.Name.SetUsed(true)
return n
}
func markNeedZero(n *Node) *Node {
func markNeedZero(n *ir.Node) *ir.Node {
n.Name.SetNeedzero(true)
return n
}
// Test all code paths for cmpstackvarlt.
func TestCmpstackvar(t *testing.T) {
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl Class) *Node {
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) *ir.Node {
if s == nil {
s = &types.Sym{Name: "."}
}
n := newname(s)
n := NewName(s)
n.Type = t
n.Xoffset = xoffset
n.SetClass(cl)
return n
}
testdata := []struct {
a, b *Node
a, b *ir.Node
lt bool
}{
{
nod(0, nil, nil, PAUTO),
nod(0, nil, nil, PFUNC),
nod(0, nil, nil, ir.PAUTO),
nod(0, nil, nil, ir.PFUNC),
false,
},
{
nod(0, nil, nil, PFUNC),
nod(0, nil, nil, PAUTO),
nod(0, nil, nil, ir.PFUNC),
nod(0, nil, nil, ir.PAUTO),
true,
},
{
nod(0, nil, nil, PFUNC),
nod(10, nil, nil, PFUNC),
nod(0, nil, nil, ir.PFUNC),
nod(10, nil, nil, ir.PFUNC),
true,
},
{
nod(20, nil, nil, PFUNC),
nod(10, nil, nil, PFUNC),
nod(20, nil, nil, ir.PFUNC),
nod(10, nil, nil, ir.PFUNC),
false,
},
{
nod(10, nil, nil, PFUNC),
nod(10, nil, nil, PFUNC),
nod(10, nil, nil, ir.PFUNC),
nod(10, nil, nil, ir.PFUNC),
false,
},
{
nod(10, nil, nil, PPARAM),
nod(20, nil, nil, PPARAMOUT),
nod(10, nil, nil, ir.PPARAM),
nod(20, nil, nil, ir.PPARAMOUT),
true,
},
{
nod(10, nil, nil, PPARAMOUT),
nod(20, nil, nil, PPARAM),
nod(10, nil, nil, ir.PPARAMOUT),
nod(20, nil, nil, ir.PPARAM),
true,
},
{
markUsed(nod(0, nil, nil, PAUTO)),
nod(0, nil, nil, PAUTO),
markUsed(nod(0, nil, nil, ir.PAUTO)),
nod(0, nil, nil, ir.PAUTO),
true,
},
{
nod(0, nil, nil, PAUTO),
markUsed(nod(0, nil, nil, PAUTO)),
nod(0, nil, nil, ir.PAUTO),
markUsed(nod(0, nil, nil, ir.PAUTO)),
false,
},
{
nod(0, typeWithoutPointers(), nil, PAUTO),
nod(0, typeWithPointers(), nil, PAUTO),
nod(0, typeWithoutPointers(), nil, ir.PAUTO),
nod(0, typeWithPointers(), nil, ir.PAUTO),
false,
},
{
nod(0, typeWithPointers(), nil, PAUTO),
nod(0, typeWithoutPointers(), nil, PAUTO),
nod(0, typeWithPointers(), nil, ir.PAUTO),
nod(0, typeWithoutPointers(), nil, ir.PAUTO),
true,
},
{
markNeedZero(nod(0, &types.Type{}, nil, PAUTO)),
nod(0, &types.Type{}, nil, PAUTO),
markNeedZero(nod(0, &types.Type{}, nil, ir.PAUTO)),
nod(0, &types.Type{}, nil, ir.PAUTO),
true,
},
{
nod(0, &types.Type{}, nil, PAUTO),
markNeedZero(nod(0, &types.Type{}, nil, PAUTO)),
nod(0, &types.Type{}, nil, ir.PAUTO),
markNeedZero(nod(0, &types.Type{}, nil, ir.PAUTO)),
false,
},
{
nod(0, &types.Type{Width: 1}, nil, PAUTO),
nod(0, &types.Type{Width: 2}, nil, PAUTO),
nod(0, &types.Type{Width: 1}, nil, ir.PAUTO),
nod(0, &types.Type{Width: 2}, nil, ir.PAUTO),
false,
},
{
nod(0, &types.Type{Width: 2}, nil, PAUTO),
nod(0, &types.Type{Width: 1}, nil, PAUTO),
nod(0, &types.Type{Width: 2}, nil, ir.PAUTO),
nod(0, &types.Type{Width: 1}, nil, ir.PAUTO),
true,
},
{
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
true,
},
{
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
false,
},
{
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
false,
},
}
@ -155,42 +156,42 @@ func TestCmpstackvar(t *testing.T) {
}
func TestStackvarSort(t *testing.T) {
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl Class) *Node {
n := newname(s)
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) *ir.Node {
n := NewName(s)
n.Type = t
n.Xoffset = xoffset
n.SetClass(cl)
return n
}
inp := []*Node{
nod(0, &types.Type{}, &types.Sym{}, PFUNC),
nod(0, &types.Type{}, &types.Sym{}, PAUTO),
nod(0, &types.Type{}, &types.Sym{}, PFUNC),
nod(10, &types.Type{}, &types.Sym{}, PFUNC),
nod(20, &types.Type{}, &types.Sym{}, PFUNC),
markUsed(nod(0, &types.Type{}, &types.Sym{}, PAUTO)),
nod(0, typeWithoutPointers(), &types.Sym{}, PAUTO),
nod(0, &types.Type{}, &types.Sym{}, PAUTO),
markNeedZero(nod(0, &types.Type{}, &types.Sym{}, PAUTO)),
nod(0, &types.Type{Width: 1}, &types.Sym{}, PAUTO),
nod(0, &types.Type{Width: 2}, &types.Sym{}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, PAUTO),
inp := []*ir.Node{
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(10, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(20, &types.Type{}, &types.Sym{}, ir.PFUNC),
markUsed(nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO)),
nod(0, typeWithoutPointers(), &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
markNeedZero(nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO)),
nod(0, &types.Type{Width: 1}, &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{Width: 2}, &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
}
want := []*Node{
nod(0, &types.Type{}, &types.Sym{}, PFUNC),
nod(0, &types.Type{}, &types.Sym{}, PFUNC),
nod(10, &types.Type{}, &types.Sym{}, PFUNC),
nod(20, &types.Type{}, &types.Sym{}, PFUNC),
markUsed(nod(0, &types.Type{}, &types.Sym{}, PAUTO)),
markNeedZero(nod(0, &types.Type{}, &types.Sym{}, PAUTO)),
nod(0, &types.Type{Width: 2}, &types.Sym{}, PAUTO),
nod(0, &types.Type{Width: 1}, &types.Sym{}, PAUTO),
nod(0, &types.Type{}, &types.Sym{}, PAUTO),
nod(0, &types.Type{}, &types.Sym{}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, PAUTO),
nod(0, typeWithoutPointers(), &types.Sym{}, PAUTO),
want := []*ir.Node{
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(0, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(10, &types.Type{}, &types.Sym{}, ir.PFUNC),
nod(20, &types.Type{}, &types.Sym{}, ir.PFUNC),
markUsed(nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO)),
markNeedZero(nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO)),
nod(0, &types.Type{Width: 2}, &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{Width: 1}, &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "abc"}, ir.PAUTO),
nod(0, &types.Type{}, &types.Sym{Name: "xyz"}, ir.PAUTO),
nod(0, typeWithoutPointers(), &types.Sym{}, ir.PAUTO),
}
sort.Sort(byStackVar(inp))
if !reflect.DeepEqual(want, inp) {

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/src"
@ -40,11 +41,11 @@ func (s *state) insertPhis() {
}
type phiState struct {
s *state // SSA state
f *ssa.Func // function to work on
defvars []map[*Node]*ssa.Value // defined variables at end of each block
s *state // SSA state
f *ssa.Func // function to work on
defvars []map[*ir.Node]*ssa.Value // defined variables at end of each block
varnum map[*Node]int32 // variable numbering
varnum map[*ir.Node]int32 // variable numbering
// properties of the dominator tree
idom []*ssa.Block // dominator parents
@ -70,15 +71,15 @@ func (s *phiState) insertPhis() {
// Find all the variables for which we need to match up reads & writes.
// This step prunes any basic-block-only variables from consideration.
// Generate a numbering for these variables.
s.varnum = map[*Node]int32{}
var vars []*Node
s.varnum = map[*ir.Node]int32{}
var vars []*ir.Node
var vartypes []*types.Type
for _, b := range s.f.Blocks {
for _, v := range b.Values {
if v.Op != ssa.OpFwdRef {
continue
}
var_ := v.Aux.(*Node)
var_ := v.Aux.(*ir.Node)
// Optimization: look back 1 block for the definition.
if len(b.Preds) == 1 {
@ -183,7 +184,7 @@ levels:
}
}
func (s *phiState) insertVarPhis(n int, var_ *Node, defs []*ssa.Block, typ *types.Type) {
func (s *phiState) insertVarPhis(n int, var_ *ir.Node, defs []*ssa.Block, typ *types.Type) {
priq := &s.priq
q := s.q
queued := s.queued
@ -318,7 +319,7 @@ func (s *phiState) resolveFwdRefs() {
if v.Op != ssa.OpFwdRef {
continue
}
n := s.varnum[v.Aux.(*Node)]
n := s.varnum[v.Aux.(*ir.Node)]
v.Op = ssa.OpCopy
v.Aux = nil
v.AddArg(values[n])
@ -432,11 +433,11 @@ func (s *sparseSet) clear() {
// Variant to use for small functions.
type simplePhiState struct {
s *state // SSA state
f *ssa.Func // function to work on
fwdrefs []*ssa.Value // list of FwdRefs to be processed
defvars []map[*Node]*ssa.Value // defined variables at end of each block
reachable []bool // which blocks are reachable
s *state // SSA state
f *ssa.Func // function to work on
fwdrefs []*ssa.Value // list of FwdRefs to be processed
defvars []map[*ir.Node]*ssa.Value // defined variables at end of each block
reachable []bool // which blocks are reachable
}
func (s *simplePhiState) insertPhis() {
@ -449,7 +450,7 @@ func (s *simplePhiState) insertPhis() {
continue
}
s.fwdrefs = append(s.fwdrefs, v)
var_ := v.Aux.(*Node)
var_ := v.Aux.(*ir.Node)
if _, ok := s.defvars[b.ID][var_]; !ok {
s.defvars[b.ID][var_] = v // treat FwdDefs as definitions.
}
@ -463,7 +464,7 @@ loop:
v := s.fwdrefs[len(s.fwdrefs)-1]
s.fwdrefs = s.fwdrefs[:len(s.fwdrefs)-1]
b := v.Block
var_ := v.Aux.(*Node)
var_ := v.Aux.(*ir.Node)
if b == s.f.Entry {
// No variable should be live at entry.
s.s.Fatalf("Value live at entry. It shouldn't be. func %s, node %v, value %v", s.f.Name, var_, v)
@ -511,7 +512,7 @@ loop:
}
// lookupVarOutgoing finds the variable's value at the end of block b.
func (s *simplePhiState) lookupVarOutgoing(b *ssa.Block, t *types.Type, var_ *Node, line src.XPos) *ssa.Value {
func (s *simplePhiState) lookupVarOutgoing(b *ssa.Block, t *types.Type, var_ *ir.Node, line src.XPos) *ssa.Value {
for {
if v := s.defvars[b.ID][var_]; v != nil {
return v

View file

@ -16,6 +16,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
@ -100,10 +101,10 @@ type BlockEffects struct {
// A collection of global state used by liveness analysis.
type Liveness struct {
fn *Node
fn *ir.Node
f *ssa.Func
vars []*Node
idx map[*Node]int32
vars []*ir.Node
idx map[*ir.Node]int32
stkptrsize int64
be []BlockEffects
@ -205,20 +206,20 @@ type progeffectscache struct {
// nor do we care about non-local variables,
// nor do we care about empty structs (handled by the pointer check),
// nor do we care about the fake PAUTOHEAP variables.
func livenessShouldTrack(n *Node) bool {
return n.Op == ONAME && (n.Class() == PAUTO || n.Class() == PPARAM || n.Class() == PPARAMOUT) && n.Type.HasPointers()
func livenessShouldTrack(n *ir.Node) bool {
return n.Op == ir.ONAME && (n.Class() == ir.PAUTO || n.Class() == ir.PPARAM || n.Class() == ir.PPARAMOUT) && n.Type.HasPointers()
}
// getvariables returns the list of on-stack variables that we need to track
// and a map for looking up indices by *Node.
func getvariables(fn *Node) ([]*Node, map[*Node]int32) {
var vars []*Node
func getvariables(fn *ir.Node) ([]*ir.Node, map[*ir.Node]int32) {
var vars []*ir.Node
for _, n := range fn.Func.Dcl {
if livenessShouldTrack(n) {
vars = append(vars, n)
}
}
idx := make(map[*Node]int32, len(vars))
idx := make(map[*ir.Node]int32, len(vars))
for i, n := range vars {
idx[n] = int32(i)
}
@ -234,7 +235,7 @@ func (lv *Liveness) initcache() {
for i, node := range lv.vars {
switch node.Class() {
case PPARAM:
case ir.PPARAM:
// A return instruction with a p.to is a tail return, which brings
// the stack pointer back up (if it ever went down) and then jumps
// to a new function entirely. That form of instruction must read
@ -243,7 +244,7 @@ func (lv *Liveness) initcache() {
// function runs.
lv.cache.tailuevar = append(lv.cache.tailuevar, int32(i))
case PPARAMOUT:
case ir.PPARAMOUT:
// All results are live at every return point.
// Note that this point is after escaping return values
// are copied back to the stack using their PAUTOHEAP references.
@ -271,7 +272,7 @@ const (
// If v does not affect any tracked variables, it returns -1, 0.
func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
n, e := affectedNode(v)
if e == 0 || n == nil || n.Op != ONAME { // cheapest checks first
if e == 0 || n == nil || n.Op != ir.ONAME { // cheapest checks first
return -1, 0
}
@ -311,7 +312,7 @@ func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
}
// affectedNode returns the *Node affected by v
func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) {
func affectedNode(v *ssa.Value) (*ir.Node, ssa.SymEffect) {
// Special cases.
switch v.Op {
case ssa.OpLoadReg:
@ -322,9 +323,9 @@ func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) {
return n, ssa.SymWrite
case ssa.OpVarLive:
return v.Aux.(*Node), ssa.SymRead
return v.Aux.(*ir.Node), ssa.SymRead
case ssa.OpVarDef, ssa.OpVarKill:
return v.Aux.(*Node), ssa.SymWrite
return v.Aux.(*ir.Node), ssa.SymWrite
case ssa.OpKeepAlive:
n, _ := AutoVar(v.Args[0])
return n, ssa.SymRead
@ -339,7 +340,7 @@ func affectedNode(v *ssa.Value) (*Node, ssa.SymEffect) {
case nil, *obj.LSym:
// ok, but no node
return nil, e
case *Node:
case *ir.Node:
return a, e
default:
base.Fatalf("weird aux: %s", v.LongString())
@ -355,7 +356,7 @@ type livenessFuncCache struct {
// Constructs a new liveness structure used to hold the global state of the
// liveness computation. The cfg argument is a slice of *BasicBlocks and the
// vars argument is a slice of *Nodes.
func newliveness(fn *Node, f *ssa.Func, vars []*Node, idx map[*Node]int32, stkptrsize int64) *Liveness {
func newliveness(fn *ir.Node, f *ssa.Func, vars []*ir.Node, idx map[*ir.Node]int32, stkptrsize int64) *Liveness {
lv := &Liveness{
fn: fn,
f: f,
@ -416,20 +417,20 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
}
switch t.Etype {
case TPTR, TUNSAFEPTR, TFUNC, TCHAN, TMAP:
case types.TPTR, types.TUNSAFEPTR, types.TFUNC, types.TCHAN, types.TMAP:
if off&int64(Widthptr-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bv.Set(int32(off / int64(Widthptr))) // pointer
case TSTRING:
case types.TSTRING:
// struct { byte *str; intgo len; }
if off&int64(Widthptr-1) != 0 {
base.Fatalf("onebitwalktype1: invalid alignment, %v", t)
}
bv.Set(int32(off / int64(Widthptr))) //pointer in first slot
case TINTER:
case types.TINTER:
// struct { Itab *tab; void *data; }
// or, when isnilinter(t)==true:
// struct { Type *type; void *data; }
@ -450,14 +451,14 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
// well as scan itabs to update their itab._type fields).
bv.Set(int32(off/int64(Widthptr) + 1)) // pointer in second slot
case TSLICE:
case types.TSLICE:
// struct { byte *array; uintgo len; uintgo cap; }
if off&int64(Widthptr-1) != 0 {
base.Fatalf("onebitwalktype1: invalid TARRAY alignment, %v", t)
}
bv.Set(int32(off / int64(Widthptr))) // pointer in first slot (BitsPointer)
case TARRAY:
case types.TARRAY:
elt := t.Elem()
if elt.Width == 0 {
// Short-circuit for #20739.
@ -468,7 +469,7 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
off += elt.Width
}
case TSTRUCT:
case types.TSTRUCT:
for _, f := range t.Fields().Slice() {
onebitwalktype1(f.Type, off+f.Offset, bv)
}
@ -481,7 +482,7 @@ func onebitwalktype1(t *types.Type, off int64, bv bvec) {
// Generates live pointer value maps for arguments and local variables. The
// this argument and the in arguments are always assumed live. The vars
// argument is a slice of *Nodes.
func (lv *Liveness) pointerMap(liveout bvec, vars []*Node, args, locals bvec) {
func (lv *Liveness) pointerMap(liveout bvec, vars []*ir.Node, args, locals bvec) {
for i := int32(0); ; i++ {
i = liveout.Next(i)
if i < 0 {
@ -489,10 +490,10 @@ func (lv *Liveness) pointerMap(liveout bvec, vars []*Node, args, locals bvec) {
}
node := vars[i]
switch node.Class() {
case PAUTO:
case ir.PAUTO:
onebitwalktype1(node.Type, node.Xoffset+lv.stkptrsize, locals)
case PPARAM, PPARAMOUT:
case ir.PPARAM, ir.PPARAMOUT:
onebitwalktype1(node.Type, node.Xoffset, args)
}
}
@ -789,7 +790,7 @@ func (lv *Liveness) epilogue() {
// don't need to keep the stack copy live?
if lv.fn.Func.HasDefer() {
for i, n := range lv.vars {
if n.Class() == PPARAMOUT {
if n.Class() == ir.PPARAMOUT {
if n.Name.IsOutputParamHeapAddr() {
// Just to be paranoid. Heap addresses are PAUTOs.
base.Fatalf("variable %v both output param and heap output param", n)
@ -887,7 +888,7 @@ func (lv *Liveness) epilogue() {
if !liveout.Get(int32(i)) {
continue
}
if n.Class() == PPARAM {
if n.Class() == ir.PPARAM {
continue // ok
}
base.Fatalf("bad live variable at entry of %v: %L", lv.fn.Func.Nname, n)
@ -920,7 +921,7 @@ func (lv *Liveness) epilogue() {
// the only things that can possibly be live are the
// input parameters.
for j, n := range lv.vars {
if n.Class() != PPARAM && lv.stackMaps[0].Get(int32(j)) {
if n.Class() != ir.PPARAM && lv.stackMaps[0].Get(int32(j)) {
lv.f.Fatalf("%v %L recorded as live on entry", lv.fn.Func.Nname, n)
}
}
@ -967,7 +968,7 @@ func (lv *Liveness) compact(b *ssa.Block) {
}
func (lv *Liveness) showlive(v *ssa.Value, live bvec) {
if base.Flag.Live == 0 || lv.fn.funcname() == "init" || strings.HasPrefix(lv.fn.funcname(), ".") {
if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
return
}
if !(v == nil || v.Op.IsCall()) {
@ -986,7 +987,7 @@ func (lv *Liveness) showlive(v *ssa.Value, live bvec) {
s := "live at "
if v == nil {
s += fmt.Sprintf("entry to %s:", lv.fn.funcname())
s += fmt.Sprintf("entry to %s:", ir.FuncName(lv.fn))
} else if sym, ok := v.Aux.(*ssa.AuxCall); ok && sym.Fn != nil {
fn := sym.Fn.Name
if pos := strings.Index(fn, "."); pos >= 0 {
@ -1051,7 +1052,7 @@ func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bo
// This format synthesizes the information used during the multiple passes
// into a single presentation.
func (lv *Liveness) printDebug() {
fmt.Printf("liveness: %s\n", lv.fn.funcname())
fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
for i, b := range lv.f.Blocks {
if i > 0 {
@ -1163,10 +1164,10 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
// Size args bitmaps to be just large enough to hold the largest pointer.
// First, find the largest Xoffset node we care about.
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
var maxArgNode *Node
var maxArgNode *ir.Node
for _, n := range lv.vars {
switch n.Class() {
case PPARAM, PPARAMOUT:
case ir.PPARAM, ir.PPARAMOUT:
if maxArgNode == nil || n.Xoffset > maxArgNode.Xoffset {
maxArgNode = n
}
@ -1265,7 +1266,7 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
}
// Emit the live pointer map data structures
ls := e.curfn.Func.lsym
ls := e.curfn.Func.LSym
fninfo := ls.Func()
fninfo.GCArgs, fninfo.GCLocals = lv.emit()
@ -1300,16 +1301,16 @@ func liveness(e *ssafn, f *ssa.Func, pp *Progs) LivenessMap {
func isfat(t *types.Type) bool {
if t != nil {
switch t.Etype {
case TSLICE, TSTRING,
TINTER: // maybe remove later
case types.TSLICE, types.TSTRING,
types.TINTER: // maybe remove later
return true
case TARRAY:
case types.TARRAY:
// Array of 1 element, check if element is fat
if t.NumElem() == 1 {
return isfat(t.Elem())
}
return true
case TSTRUCT:
case types.TSTRUCT:
// Struct with 1 field, check if field is fat
if t.NumFields() == 1 {
return isfat(t.Field(0).Type)

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
"cmd/internal/sys"
@ -59,8 +60,8 @@ func ispkgin(pkgs []string) bool {
return false
}
func instrument(fn *Node) {
if fn.Func.Pragma&Norace != 0 {
func instrument(fn *ir.Node) {
if fn.Func.Pragma&ir.Norace != 0 {
return
}
@ -82,8 +83,8 @@ func instrument(fn *Node) {
// This only works for amd64. This will not
// work on arm or others that might support
// race in the future.
nodpc := nodfp.copy()
nodpc.Type = types.Types[TUINTPTR]
nodpc := ir.Copy(nodfp)
nodpc.Type = types.Types[types.TUINTPTR]
nodpc.Xoffset = int64(-Widthptr)
fn.Func.Dcl = append(fn.Func.Dcl, nodpc)
fn.Func.Enter.Prepend(mkcall("racefuncenter", nil, nil, nodpc))

View file

@ -6,13 +6,14 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/sys"
"unicode/utf8"
)
// range
func typecheckrange(n *Node) {
func typecheckrange(n *ir.Node) {
// Typechecking order is important here:
// 0. first typecheck range expression (slice/map/chan),
// it is evaluated only once and so logically it is not part of the loop.
@ -38,7 +39,7 @@ func typecheckrange(n *Node) {
decldepth--
}
func typecheckrangeExpr(n *Node) {
func typecheckrangeExpr(n *ir.Node) {
n.Right = typecheck(n.Right, ctxExpr)
t := n.Right.Type
@ -65,15 +66,15 @@ func typecheckrangeExpr(n *Node) {
base.ErrorfAt(n.Pos, "cannot range over %L", n.Right)
return
case TARRAY, TSLICE:
t1 = types.Types[TINT]
case types.TARRAY, types.TSLICE:
t1 = types.Types[types.TINT]
t2 = t.Elem()
case TMAP:
case types.TMAP:
t1 = t.Key()
t2 = t.Elem()
case TCHAN:
case types.TCHAN:
if !t.ChanDir().CanRecv() {
base.ErrorfAt(n.Pos, "invalid operation: range %v (receive from send-only type %v)", n.Right, n.Right.Type)
return
@ -85,8 +86,8 @@ func typecheckrangeExpr(n *Node) {
toomany = true
}
case TSTRING:
t1 = types.Types[TINT]
case types.TSTRING:
t1 = types.Types[types.TINT]
t2 = types.Runetype
}
@ -94,7 +95,7 @@ func typecheckrangeExpr(n *Node) {
base.ErrorfAt(n.Pos, "too many variables in range")
}
var v1, v2 *Node
var v1, v2 *ir.Node
if n.List.Len() != 0 {
v1 = n.List.First()
}
@ -106,7 +107,7 @@ func typecheckrangeExpr(n *Node) {
// "if the second iteration variable is the blank identifier, the range
// clause is equivalent to the same clause with only the first variable
// present."
if v2.isBlank() {
if ir.IsBlank(v2) {
if v1 != nil {
n.List.Set1(v1)
}
@ -117,7 +118,7 @@ func typecheckrangeExpr(n *Node) {
if v1.Name != nil && v1.Name.Defn == n {
v1.Type = t1
} else if v1.Type != nil {
if op, why := assignop(t1, v1.Type); op == OXXX {
if op, why := assignop(t1, v1.Type); op == ir.OXXX {
base.ErrorfAt(n.Pos, "cannot assign type %v to %L in range%s", t1, v1, why)
}
}
@ -128,7 +129,7 @@ func typecheckrangeExpr(n *Node) {
if v2.Name != nil && v2.Name.Defn == n {
v2.Type = t2
} else if v2.Type != nil {
if op, why := assignop(t2, v2.Type); op == OXXX {
if op, why := assignop(t2, v2.Type); op == ir.OXXX {
base.ErrorfAt(n.Pos, "cannot assign type %v to %L in range%s", t2, v2, why)
}
}
@ -156,7 +157,7 @@ func cheapComputableIndex(width int64) bool {
// simpler forms. The result must be assigned back to n.
// Node n may also be modified in place, and may also be
// the returned node.
func walkrange(n *Node) *Node {
func walkrange(n *ir.Node) *ir.Node {
if isMapClear(n) {
m := n.Right
lno := setlineno(m)
@ -178,7 +179,7 @@ func walkrange(n *Node) *Node {
lno := setlineno(a)
n.Right = nil
var v1, v2 *Node
var v1, v2 *ir.Node
l := n.List.Len()
if l > 0 {
v1 = n.List.First()
@ -188,11 +189,11 @@ func walkrange(n *Node) *Node {
v2 = n.List.Second()
}
if v2.isBlank() {
if ir.IsBlank(v2) {
v2 = nil
}
if v1.isBlank() && v2 == nil {
if ir.IsBlank(v1) && v2 == nil {
v1 = nil
}
@ -204,17 +205,17 @@ func walkrange(n *Node) *Node {
// to avoid erroneous processing by racewalk.
n.List.Set(nil)
var ifGuard *Node
var ifGuard *ir.Node
translatedLoopOp := OFOR
translatedLoopOp := ir.OFOR
var body []*Node
var init []*Node
var body []*ir.Node
var init []*ir.Node
switch t.Etype {
default:
base.Fatalf("walkrange")
case TARRAY, TSLICE:
case types.TARRAY, types.TSLICE:
if arrayClear(n, v1, v2, a) {
base.Pos = lno
return n
@ -223,14 +224,14 @@ func walkrange(n *Node) *Node {
// order.stmt arranged for a copy of the array/slice variable if needed.
ha := a
hv1 := temp(types.Types[TINT])
hn := temp(types.Types[TINT])
hv1 := temp(types.Types[types.TINT])
hn := temp(types.Types[types.TINT])
init = append(init, nod(OAS, hv1, nil))
init = append(init, nod(OAS, hn, nod(OLEN, ha, nil)))
init = append(init, ir.Nod(ir.OAS, hv1, nil))
init = append(init, ir.Nod(ir.OAS, hn, ir.Nod(ir.OLEN, ha, nil)))
n.Left = nod(OLT, hv1, hn)
n.Right = nod(OAS, hv1, nod(OADD, hv1, nodintconst(1)))
n.Left = ir.Nod(ir.OLT, hv1, hn)
n.Right = ir.Nod(ir.OAS, hv1, ir.Nod(ir.OADD, hv1, nodintconst(1)))
// for range ha { body }
if v1 == nil {
@ -239,21 +240,21 @@ func walkrange(n *Node) *Node {
// for v1 := range ha { body }
if v2 == nil {
body = []*Node{nod(OAS, v1, hv1)}
body = []*ir.Node{ir.Nod(ir.OAS, v1, hv1)}
break
}
// for v1, v2 := range ha { body }
if cheapComputableIndex(n.Type.Elem().Width) {
// v1, v2 = hv1, ha[hv1]
tmp := nod(OINDEX, ha, hv1)
tmp := ir.Nod(ir.OINDEX, ha, hv1)
tmp.SetBounded(true)
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
a := nod(OAS2, nil, nil)
a := ir.Nod(ir.OAS2, nil, nil)
a.List.Set2(v1, v2)
a.Rlist.Set2(hv1, tmp)
body = []*Node{a}
body = []*ir.Node{a}
break
}
@ -269,20 +270,20 @@ func walkrange(n *Node) *Node {
// TODO(austin): OFORUNTIL inhibits bounds-check
// elimination on the index variable (see #20711).
// Enhance the prove pass to understand this.
ifGuard = nod(OIF, nil, nil)
ifGuard.Left = nod(OLT, hv1, hn)
translatedLoopOp = OFORUNTIL
ifGuard = ir.Nod(ir.OIF, nil, nil)
ifGuard.Left = ir.Nod(ir.OLT, hv1, hn)
translatedLoopOp = ir.OFORUNTIL
hp := temp(types.NewPtr(n.Type.Elem()))
tmp := nod(OINDEX, ha, nodintconst(0))
tmp := ir.Nod(ir.OINDEX, ha, nodintconst(0))
tmp.SetBounded(true)
init = append(init, nod(OAS, hp, nod(OADDR, tmp, nil)))
init = append(init, ir.Nod(ir.OAS, hp, ir.Nod(ir.OADDR, tmp, nil)))
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
a := nod(OAS2, nil, nil)
a := ir.Nod(ir.OAS2, nil, nil)
a.List.Set2(v1, v2)
a.Rlist.Set2(hv1, nod(ODEREF, hp, nil))
a.Rlist.Set2(hv1, ir.Nod(ir.ODEREF, hp, nil))
body = append(body, a)
// Advance pointer as part of the late increment.
@ -290,11 +291,11 @@ func walkrange(n *Node) *Node {
// This runs *after* the condition check, so we know
// advancing the pointer is safe and won't go past the
// end of the allocation.
a = nod(OAS, hp, addptr(hp, t.Elem().Width))
a = ir.Nod(ir.OAS, hp, addptr(hp, t.Elem().Width))
a = typecheck(a, ctxStmt)
n.List.Set1(a)
case TMAP:
case types.TMAP:
// order.stmt allocated the iterator for us.
// we only use a once, so no copy needed.
ha := a
@ -308,29 +309,29 @@ func walkrange(n *Node) *Node {
fn := syslook("mapiterinit")
fn = substArgTypes(fn, t.Key(), t.Elem(), th)
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, nod(OADDR, hit, nil)))
n.Left = nod(ONE, nodSym(ODOT, hit, keysym), nodnil())
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, ir.Nod(ir.OADDR, hit, nil)))
n.Left = ir.Nod(ir.ONE, nodSym(ir.ODOT, hit, keysym), nodnil())
fn = syslook("mapiternext")
fn = substArgTypes(fn, th)
n.Right = mkcall1(fn, nil, nil, nod(OADDR, hit, nil))
n.Right = mkcall1(fn, nil, nil, ir.Nod(ir.OADDR, hit, nil))
key := nodSym(ODOT, hit, keysym)
key = nod(ODEREF, key, nil)
key := nodSym(ir.ODOT, hit, keysym)
key = ir.Nod(ir.ODEREF, key, nil)
if v1 == nil {
body = nil
} else if v2 == nil {
body = []*Node{nod(OAS, v1, key)}
body = []*ir.Node{ir.Nod(ir.OAS, v1, key)}
} else {
elem := nodSym(ODOT, hit, elemsym)
elem = nod(ODEREF, elem, nil)
a := nod(OAS2, nil, nil)
elem := nodSym(ir.ODOT, hit, elemsym)
elem = ir.Nod(ir.ODEREF, elem, nil)
a := ir.Nod(ir.OAS2, nil, nil)
a.List.Set2(v1, v2)
a.Rlist.Set2(key, elem)
body = []*Node{a}
body = []*ir.Node{a}
}
case TCHAN:
case types.TCHAN:
// order.stmt arranged for a copy of the channel variable.
ha := a
@ -339,27 +340,27 @@ func walkrange(n *Node) *Node {
hv1 := temp(t.Elem())
hv1.SetTypecheck(1)
if t.Elem().HasPointers() {
init = append(init, nod(OAS, hv1, nil))
init = append(init, ir.Nod(ir.OAS, hv1, nil))
}
hb := temp(types.Types[TBOOL])
hb := temp(types.Types[types.TBOOL])
n.Left = nod(ONE, hb, nodbool(false))
a := nod(OAS2RECV, nil, nil)
n.Left = ir.Nod(ir.ONE, hb, nodbool(false))
a := ir.Nod(ir.OAS2RECV, nil, nil)
a.SetTypecheck(1)
a.List.Set2(hv1, hb)
a.Right = nod(ORECV, ha, nil)
a.Right = ir.Nod(ir.ORECV, ha, nil)
n.Left.Ninit.Set1(a)
if v1 == nil {
body = nil
} else {
body = []*Node{nod(OAS, v1, hv1)}
body = []*ir.Node{ir.Nod(ir.OAS, v1, hv1)}
}
// Zero hv1. This prevents hv1 from being the sole, inaccessible
// reference to an otherwise GC-able value during the next channel receive.
// See issue 15281.
body = append(body, nod(OAS, hv1, nil))
body = append(body, ir.Nod(ir.OAS, hv1, nil))
case TSTRING:
case types.TSTRING:
// Transform string range statements like "for v1, v2 = range a" into
//
// ha := a
@ -378,35 +379,35 @@ func walkrange(n *Node) *Node {
// order.stmt arranged for a copy of the string variable.
ha := a
hv1 := temp(types.Types[TINT])
hv1t := temp(types.Types[TINT])
hv1 := temp(types.Types[types.TINT])
hv1t := temp(types.Types[types.TINT])
hv2 := temp(types.Runetype)
// hv1 := 0
init = append(init, nod(OAS, hv1, nil))
init = append(init, ir.Nod(ir.OAS, hv1, nil))
// hv1 < len(ha)
n.Left = nod(OLT, hv1, nod(OLEN, ha, nil))
n.Left = ir.Nod(ir.OLT, hv1, ir.Nod(ir.OLEN, ha, nil))
if v1 != nil {
// hv1t = hv1
body = append(body, nod(OAS, hv1t, hv1))
body = append(body, ir.Nod(ir.OAS, hv1t, hv1))
}
// hv2 := rune(ha[hv1])
nind := nod(OINDEX, ha, hv1)
nind := ir.Nod(ir.OINDEX, ha, hv1)
nind.SetBounded(true)
body = append(body, nod(OAS, hv2, conv(nind, types.Runetype)))
body = append(body, ir.Nod(ir.OAS, hv2, conv(nind, types.Runetype)))
// if hv2 < utf8.RuneSelf
nif := nod(OIF, nil, nil)
nif.Left = nod(OLT, hv2, nodintconst(utf8.RuneSelf))
nif := ir.Nod(ir.OIF, nil, nil)
nif.Left = ir.Nod(ir.OLT, hv2, nodintconst(utf8.RuneSelf))
// hv1++
nif.Nbody.Set1(nod(OAS, hv1, nod(OADD, hv1, nodintconst(1))))
nif.Nbody.Set1(ir.Nod(ir.OAS, hv1, ir.Nod(ir.OADD, hv1, nodintconst(1))))
// } else {
eif := nod(OAS2, nil, nil)
eif := ir.Nod(ir.OAS2, nil, nil)
nif.Rlist.Set1(eif)
// hv2, hv1 = decoderune(ha, hv1)
@ -419,13 +420,13 @@ func walkrange(n *Node) *Node {
if v1 != nil {
if v2 != nil {
// v1, v2 = hv1t, hv2
a := nod(OAS2, nil, nil)
a := ir.Nod(ir.OAS2, nil, nil)
a.List.Set2(v1, v2)
a.Rlist.Set2(hv1t, hv2)
body = append(body, a)
} else {
// v1 = hv1t
body = append(body, nod(OAS, v1, hv1t))
body = append(body, ir.Nod(ir.OAS, v1, hv1t))
}
}
}
@ -466,17 +467,17 @@ func walkrange(n *Node) *Node {
// }
//
// where == for keys of map m is reflexive.
func isMapClear(n *Node) bool {
func isMapClear(n *ir.Node) bool {
if base.Flag.N != 0 || instrumenting {
return false
}
if n.Op != ORANGE || n.Type.Etype != TMAP || n.List.Len() != 1 {
if n.Op != ir.ORANGE || n.Type.Etype != types.TMAP || n.List.Len() != 1 {
return false
}
k := n.List.First()
if k == nil || k.isBlank() {
if k == nil || ir.IsBlank(k) {
return false
}
@ -490,7 +491,7 @@ func isMapClear(n *Node) bool {
}
stmt := n.Nbody.First() // only stmt in body
if stmt == nil || stmt.Op != ODELETE {
if stmt == nil || stmt.Op != ir.ODELETE {
return false
}
@ -508,7 +509,7 @@ func isMapClear(n *Node) bool {
}
// mapClear constructs a call to runtime.mapclear for the map m.
func mapClear(m *Node) *Node {
func mapClear(m *ir.Node) *ir.Node {
t := m.Type
// instantiate mapclear(typ *type, hmap map[any]any)
@ -533,7 +534,7 @@ func mapClear(m *Node) *Node {
// in which the evaluation of a is side-effect-free.
//
// Parameters are as in walkrange: "for v1, v2 = range a".
func arrayClear(n, v1, v2, a *Node) bool {
func arrayClear(n, v1, v2, a *ir.Node) bool {
if base.Flag.N != 0 || instrumenting {
return false
}
@ -547,7 +548,7 @@ func arrayClear(n, v1, v2, a *Node) bool {
}
stmt := n.Nbody.First() // only stmt in body
if stmt.Op != OAS || stmt.Left.Op != OINDEX {
if stmt.Op != ir.OAS || stmt.Left.Op != ir.OINDEX {
return false
}
@ -567,32 +568,32 @@ func arrayClear(n, v1, v2, a *Node) bool {
// memclr{NoHeap,Has}Pointers(hp, hn)
// i = len(a) - 1
// }
n.Op = OIF
n.Op = ir.OIF
n.Nbody.Set(nil)
n.Left = nod(ONE, nod(OLEN, a, nil), nodintconst(0))
n.Left = ir.Nod(ir.ONE, ir.Nod(ir.OLEN, a, nil), nodintconst(0))
// hp = &a[0]
hp := temp(types.Types[TUNSAFEPTR])
hp := temp(types.Types[types.TUNSAFEPTR])
tmp := nod(OINDEX, a, nodintconst(0))
tmp := ir.Nod(ir.OINDEX, a, nodintconst(0))
tmp.SetBounded(true)
tmp = nod(OADDR, tmp, nil)
tmp = convnop(tmp, types.Types[TUNSAFEPTR])
n.Nbody.Append(nod(OAS, hp, tmp))
tmp = ir.Nod(ir.OADDR, tmp, nil)
tmp = convnop(tmp, types.Types[types.TUNSAFEPTR])
n.Nbody.Append(ir.Nod(ir.OAS, hp, tmp))
// hn = len(a) * sizeof(elem(a))
hn := temp(types.Types[TUINTPTR])
hn := temp(types.Types[types.TUINTPTR])
tmp = nod(OLEN, a, nil)
tmp = nod(OMUL, tmp, nodintconst(elemsize))
tmp = conv(tmp, types.Types[TUINTPTR])
n.Nbody.Append(nod(OAS, hn, tmp))
tmp = ir.Nod(ir.OLEN, a, nil)
tmp = ir.Nod(ir.OMUL, tmp, nodintconst(elemsize))
tmp = conv(tmp, types.Types[types.TUINTPTR])
n.Nbody.Append(ir.Nod(ir.OAS, hn, tmp))
var fn *Node
var fn *ir.Node
if a.Type.Elem().HasPointers() {
// memclrHasPointers(hp, hn)
Curfn.Func.setWBPos(stmt.Pos)
Curfn.Func.SetWBPos(stmt.Pos)
fn = mkcall("memclrHasPointers", nil, nil, hp, hn)
} else {
// memclrNoHeapPointers(hp, hn)
@ -602,7 +603,7 @@ func arrayClear(n, v1, v2, a *Node) bool {
n.Nbody.Append(fn)
// i = len(a) - 1
v1 = nod(OAS, v1, nod(OSUB, nod(OLEN, a, nil), nodintconst(1)))
v1 = ir.Nod(ir.OAS, v1, ir.Nod(ir.OSUB, ir.Nod(ir.OLEN, a, nil), nodintconst(1)))
n.Nbody.Append(v1)
@ -614,15 +615,15 @@ func arrayClear(n, v1, v2, a *Node) bool {
}
// addptr returns (*T)(uintptr(p) + n).
func addptr(p *Node, n int64) *Node {
func addptr(p *ir.Node, n int64) *ir.Node {
t := p.Type
p = nod(OCONVNOP, p, nil)
p.Type = types.Types[TUINTPTR]
p = ir.Nod(ir.OCONVNOP, p, nil)
p.Type = types.Types[types.TUINTPTR]
p = nod(OADD, p, nodintconst(n))
p = ir.Nod(ir.OADD, p, nodintconst(n))
p = nod(OCONVNOP, p, nil)
p = ir.Nod(ir.OCONVNOP, p, nil)
p.Type = t
return p

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/gcprog"
"cmd/internal/obj"
@ -84,7 +85,7 @@ func bmap(t *types.Type) *types.Type {
return t.MapType().Bucket
}
bucket := types.New(TSTRUCT)
bucket := types.New(types.TSTRUCT)
keytype := t.Key()
elemtype := t.Elem()
dowidth(keytype)
@ -99,7 +100,7 @@ func bmap(t *types.Type) *types.Type {
field := make([]*types.Field, 0, 5)
// The first field is: uint8 topbits[BUCKETSIZE].
arr := types.NewArray(types.Types[TUINT8], BUCKETSIZE)
arr := types.NewArray(types.Types[types.TUINT8], BUCKETSIZE)
field = append(field, makefield("topbits", arr))
arr = types.NewArray(keytype, BUCKETSIZE)
@ -120,7 +121,7 @@ func bmap(t *types.Type) *types.Type {
// See comment on hmap.overflow in runtime/map.go.
otyp := types.NewPtr(bucket)
if !elemtype.HasPointers() && !keytype.HasPointers() {
otyp = types.Types[TUINTPTR]
otyp = types.Types[types.TUINTPTR]
}
overflow := makefield("overflow", otyp)
field = append(field, overflow)
@ -209,18 +210,18 @@ func hmap(t *types.Type) *types.Type {
// }
// must match runtime/map.go:hmap.
fields := []*types.Field{
makefield("count", types.Types[TINT]),
makefield("flags", types.Types[TUINT8]),
makefield("B", types.Types[TUINT8]),
makefield("noverflow", types.Types[TUINT16]),
makefield("hash0", types.Types[TUINT32]), // Used in walk.go for OMAKEMAP.
makefield("buckets", types.NewPtr(bmap)), // Used in walk.go for OMAKEMAP.
makefield("count", types.Types[types.TINT]),
makefield("flags", types.Types[types.TUINT8]),
makefield("B", types.Types[types.TUINT8]),
makefield("noverflow", types.Types[types.TUINT16]),
makefield("hash0", types.Types[types.TUINT32]), // Used in walk.go for OMAKEMAP.
makefield("buckets", types.NewPtr(bmap)), // Used in walk.go for OMAKEMAP.
makefield("oldbuckets", types.NewPtr(bmap)),
makefield("nevacuate", types.Types[TUINTPTR]),
makefield("extra", types.Types[TUNSAFEPTR]),
makefield("nevacuate", types.Types[types.TUINTPTR]),
makefield("extra", types.Types[types.TUNSAFEPTR]),
}
hmap := types.New(TSTRUCT)
hmap := types.New(types.TSTRUCT)
hmap.SetNoalg(true)
hmap.SetFields(fields)
dowidth(hmap)
@ -268,23 +269,23 @@ func hiter(t *types.Type) *types.Type {
fields := []*types.Field{
makefield("key", types.NewPtr(t.Key())), // Used in range.go for TMAP.
makefield("elem", types.NewPtr(t.Elem())), // Used in range.go for TMAP.
makefield("t", types.Types[TUNSAFEPTR]),
makefield("t", types.Types[types.TUNSAFEPTR]),
makefield("h", types.NewPtr(hmap)),
makefield("buckets", types.NewPtr(bmap)),
makefield("bptr", types.NewPtr(bmap)),
makefield("overflow", types.Types[TUNSAFEPTR]),
makefield("oldoverflow", types.Types[TUNSAFEPTR]),
makefield("startBucket", types.Types[TUINTPTR]),
makefield("offset", types.Types[TUINT8]),
makefield("wrapped", types.Types[TBOOL]),
makefield("B", types.Types[TUINT8]),
makefield("i", types.Types[TUINT8]),
makefield("bucket", types.Types[TUINTPTR]),
makefield("checkBucket", types.Types[TUINTPTR]),
makefield("overflow", types.Types[types.TUNSAFEPTR]),
makefield("oldoverflow", types.Types[types.TUNSAFEPTR]),
makefield("startBucket", types.Types[types.TUINTPTR]),
makefield("offset", types.Types[types.TUINT8]),
makefield("wrapped", types.Types[types.TBOOL]),
makefield("B", types.Types[types.TUINT8]),
makefield("i", types.Types[types.TUINT8]),
makefield("bucket", types.Types[types.TUINTPTR]),
makefield("checkBucket", types.Types[types.TUINTPTR]),
}
// build iterator struct holding the above fields
hiter := types.New(TSTRUCT)
hiter := types.New(types.TSTRUCT)
hiter.SetNoalg(true)
hiter.SetFields(fields)
dowidth(hiter)
@ -303,35 +304,35 @@ func deferstruct(stksize int64) *types.Type {
// Unlike the global makefield function, this one needs to set Pkg
// because these types might be compared (in SSA CSE sorting).
// TODO: unify this makefield and the global one above.
sym := &types.Sym{Name: name, Pkg: localpkg}
sym := &types.Sym{Name: name, Pkg: ir.LocalPkg}
return types.NewField(src.NoXPos, sym, typ)
}
argtype := types.NewArray(types.Types[TUINT8], stksize)
argtype := types.NewArray(types.Types[types.TUINT8], stksize)
argtype.Width = stksize
argtype.Align = 1
// These fields must match the ones in runtime/runtime2.go:_defer and
// cmd/compile/internal/gc/ssa.go:(*state).call.
fields := []*types.Field{
makefield("siz", types.Types[TUINT32]),
makefield("started", types.Types[TBOOL]),
makefield("heap", types.Types[TBOOL]),
makefield("openDefer", types.Types[TBOOL]),
makefield("sp", types.Types[TUINTPTR]),
makefield("pc", types.Types[TUINTPTR]),
makefield("siz", types.Types[types.TUINT32]),
makefield("started", types.Types[types.TBOOL]),
makefield("heap", types.Types[types.TBOOL]),
makefield("openDefer", types.Types[types.TBOOL]),
makefield("sp", types.Types[types.TUINTPTR]),
makefield("pc", types.Types[types.TUINTPTR]),
// Note: the types here don't really matter. Defer structures
// are always scanned explicitly during stack copying and GC,
// so we make them uintptr type even though they are real pointers.
makefield("fn", types.Types[TUINTPTR]),
makefield("_panic", types.Types[TUINTPTR]),
makefield("link", types.Types[TUINTPTR]),
makefield("framepc", types.Types[TUINTPTR]),
makefield("varp", types.Types[TUINTPTR]),
makefield("fd", types.Types[TUINTPTR]),
makefield("fn", types.Types[types.TUINTPTR]),
makefield("_panic", types.Types[types.TUINTPTR]),
makefield("link", types.Types[types.TUINTPTR]),
makefield("framepc", types.Types[types.TUINTPTR]),
makefield("varp", types.Types[types.TUINTPTR]),
makefield("fd", types.Types[types.TUINTPTR]),
makefield("args", argtype),
}
// build struct holding the above fields
s := types.New(TSTRUCT)
s := types.New(types.TSTRUCT)
s.SetNoalg(true)
s.SetFields(fields)
s.Width = widstruct(s, s, 0, 1)
@ -346,7 +347,7 @@ func methodfunc(f *types.Type, receiver *types.Type) *types.Type {
if receiver != nil {
inLen++
}
in := make([]*Node, 0, inLen)
in := make([]*ir.Node, 0, inLen)
if receiver != nil {
d := anonfield(receiver)
@ -360,7 +361,7 @@ func methodfunc(f *types.Type, receiver *types.Type) *types.Type {
}
outLen := f.Results().Fields().Len()
out := make([]*Node, 0, outLen)
out := make([]*ir.Node, 0, outLen)
for _, t := range f.Results().Fields().Slice() {
d := anonfield(t.Type)
out = append(out, d)
@ -447,7 +448,7 @@ func methods(t *types.Type) []*Sig {
func imethods(t *types.Type) []*Sig {
var methods []*Sig
for _, f := range t.Fields().Slice() {
if f.Type.Etype != TFUNC || f.Sym == nil {
if f.Type.Etype != types.TFUNC || f.Sym == nil {
continue
}
if f.Sym.IsBlank() {
@ -494,7 +495,7 @@ func dimportpath(p *types.Pkg) {
}
str := p.Path
if p == localpkg {
if p == ir.LocalPkg {
// Note: myimportpath != "", or else dgopkgpath won't call dimportpath.
str = base.Ctxt.Pkgpath
}
@ -511,7 +512,7 @@ func dgopkgpath(s *obj.LSym, ot int, pkg *types.Pkg) int {
return duintptr(s, ot, 0)
}
if pkg == localpkg && base.Ctxt.Pkgpath == "" {
if pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "" {
// If we don't know the full import path of the package being compiled
// (i.e. -p was not passed on the compiler command line), emit a reference to
// type..importpath.""., which the linker will rewrite using the correct import path.
@ -530,7 +531,7 @@ func dgopkgpathOff(s *obj.LSym, ot int, pkg *types.Pkg) int {
if pkg == nil {
return duint32(s, ot, 0)
}
if pkg == localpkg && base.Ctxt.Pkgpath == "" {
if pkg == ir.LocalPkg && base.Ctxt.Pkgpath == "" {
// If we don't know the full import path of the package being compiled
// (i.e. -p was not passed on the compiler command line), emit a reference to
// type..importpath.""., which the linker will rewrite using the correct import path.
@ -674,7 +675,7 @@ func typePkg(t *types.Type) *types.Pkg {
tsym := t.Sym
if tsym == nil {
switch t.Etype {
case TARRAY, TSLICE, TPTR, TCHAN:
case types.TARRAY, types.TSLICE, types.TPTR, types.TCHAN:
if t.Elem() != nil {
tsym = t.Elem().Sym
}
@ -717,32 +718,32 @@ func dmethodptrOff(s *obj.LSym, ot int, x *obj.LSym) int {
}
var kinds = []int{
TINT: objabi.KindInt,
TUINT: objabi.KindUint,
TINT8: objabi.KindInt8,
TUINT8: objabi.KindUint8,
TINT16: objabi.KindInt16,
TUINT16: objabi.KindUint16,
TINT32: objabi.KindInt32,
TUINT32: objabi.KindUint32,
TINT64: objabi.KindInt64,
TUINT64: objabi.KindUint64,
TUINTPTR: objabi.KindUintptr,
TFLOAT32: objabi.KindFloat32,
TFLOAT64: objabi.KindFloat64,
TBOOL: objabi.KindBool,
TSTRING: objabi.KindString,
TPTR: objabi.KindPtr,
TSTRUCT: objabi.KindStruct,
TINTER: objabi.KindInterface,
TCHAN: objabi.KindChan,
TMAP: objabi.KindMap,
TARRAY: objabi.KindArray,
TSLICE: objabi.KindSlice,
TFUNC: objabi.KindFunc,
TCOMPLEX64: objabi.KindComplex64,
TCOMPLEX128: objabi.KindComplex128,
TUNSAFEPTR: objabi.KindUnsafePointer,
types.TINT: objabi.KindInt,
types.TUINT: objabi.KindUint,
types.TINT8: objabi.KindInt8,
types.TUINT8: objabi.KindUint8,
types.TINT16: objabi.KindInt16,
types.TUINT16: objabi.KindUint16,
types.TINT32: objabi.KindInt32,
types.TUINT32: objabi.KindUint32,
types.TINT64: objabi.KindInt64,
types.TUINT64: objabi.KindUint64,
types.TUINTPTR: objabi.KindUintptr,
types.TFLOAT32: objabi.KindFloat32,
types.TFLOAT64: objabi.KindFloat64,
types.TBOOL: objabi.KindBool,
types.TSTRING: objabi.KindString,
types.TPTR: objabi.KindPtr,
types.TSTRUCT: objabi.KindStruct,
types.TINTER: objabi.KindInterface,
types.TCHAN: objabi.KindChan,
types.TMAP: objabi.KindMap,
types.TARRAY: objabi.KindArray,
types.TSLICE: objabi.KindSlice,
types.TFUNC: objabi.KindFunc,
types.TCOMPLEX64: objabi.KindComplex64,
types.TCOMPLEX128: objabi.KindComplex128,
types.TUNSAFEPTR: objabi.KindUnsafePointer,
}
// typeptrdata returns the length in bytes of the prefix of t
@ -753,32 +754,32 @@ func typeptrdata(t *types.Type) int64 {
}
switch t.Etype {
case TPTR,
TUNSAFEPTR,
TFUNC,
TCHAN,
TMAP:
case types.TPTR,
types.TUNSAFEPTR,
types.TFUNC,
types.TCHAN,
types.TMAP:
return int64(Widthptr)
case TSTRING:
case types.TSTRING:
// struct { byte *str; intgo len; }
return int64(Widthptr)
case TINTER:
case types.TINTER:
// struct { Itab *tab; void *data; } or
// struct { Type *type; void *data; }
// Note: see comment in plive.go:onebitwalktype1.
return 2 * int64(Widthptr)
case TSLICE:
case types.TSLICE:
// struct { byte *array; uintgo len; uintgo cap; }
return int64(Widthptr)
case TARRAY:
case types.TARRAY:
// haspointers already eliminated t.NumElem() == 0.
return (t.NumElem()-1)*t.Elem().Width + typeptrdata(t.Elem())
case TSTRUCT:
case types.TSTRUCT:
// Find the last field that has pointers.
var lastPtrField *types.Field
for _, t1 := range t.Fields().Slice() {
@ -989,38 +990,38 @@ func typenamesym(t *types.Type) *types.Sym {
return s
}
func typename(t *types.Type) *Node {
func typename(t *types.Type) *ir.Node {
s := typenamesym(t)
if s.Def == nil {
n := newnamel(src.NoXPos, s)
n.Type = types.Types[TUINT8]
n.SetClass(PEXTERN)
n := ir.NewNameAt(src.NoXPos, s)
n.Type = types.Types[types.TUINT8]
n.SetClass(ir.PEXTERN)
n.SetTypecheck(1)
s.Def = asTypesNode(n)
s.Def = ir.AsTypesNode(n)
}
n := nod(OADDR, asNode(s.Def), nil)
n.Type = types.NewPtr(asNode(s.Def).Type)
n := ir.Nod(ir.OADDR, ir.AsNode(s.Def), nil)
n.Type = types.NewPtr(ir.AsNode(s.Def).Type)
n.SetTypecheck(1)
return n
}
func itabname(t, itype *types.Type) *Node {
func itabname(t, itype *types.Type) *ir.Node {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() {
base.Fatalf("itabname(%v, %v)", t, itype)
}
s := itabpkg.Lookup(t.ShortString() + "," + itype.ShortString())
if s.Def == nil {
n := newname(s)
n.Type = types.Types[TUINT8]
n.SetClass(PEXTERN)
n := NewName(s)
n.Type = types.Types[types.TUINT8]
n.SetClass(ir.PEXTERN)
n.SetTypecheck(1)
s.Def = asTypesNode(n)
s.Def = ir.AsTypesNode(n)
itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: s.Linksym()})
}
n := nod(OADDR, asNode(s.Def), nil)
n.Type = types.NewPtr(asNode(s.Def).Type)
n := ir.Nod(ir.OADDR, ir.AsNode(s.Def), nil)
n.Type = types.NewPtr(ir.AsNode(s.Def).Type)
n.SetTypecheck(1)
return n
}
@ -1029,35 +1030,35 @@ func itabname(t, itype *types.Type) *Node {
// That is, if x==x for all x of type t.
func isreflexive(t *types.Type) bool {
switch t.Etype {
case TBOOL,
TINT,
TUINT,
TINT8,
TUINT8,
TINT16,
TUINT16,
TINT32,
TUINT32,
TINT64,
TUINT64,
TUINTPTR,
TPTR,
TUNSAFEPTR,
TSTRING,
TCHAN:
case types.TBOOL,
types.TINT,
types.TUINT,
types.TINT8,
types.TUINT8,
types.TINT16,
types.TUINT16,
types.TINT32,
types.TUINT32,
types.TINT64,
types.TUINT64,
types.TUINTPTR,
types.TPTR,
types.TUNSAFEPTR,
types.TSTRING,
types.TCHAN:
return true
case TFLOAT32,
TFLOAT64,
TCOMPLEX64,
TCOMPLEX128,
TINTER:
case types.TFLOAT32,
types.TFLOAT64,
types.TCOMPLEX64,
types.TCOMPLEX128,
types.TINTER:
return false
case TARRAY:
case types.TARRAY:
return isreflexive(t.Elem())
case TSTRUCT:
case types.TSTRUCT:
for _, t1 := range t.Fields().Slice() {
if !isreflexive(t1.Type) {
return false
@ -1075,19 +1076,19 @@ func isreflexive(t *types.Type) bool {
// need the key to be updated.
func needkeyupdate(t *types.Type) bool {
switch t.Etype {
case TBOOL, TINT, TUINT, TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32,
TINT64, TUINT64, TUINTPTR, TPTR, TUNSAFEPTR, TCHAN:
case types.TBOOL, types.TINT, types.TUINT, types.TINT8, types.TUINT8, types.TINT16, types.TUINT16, types.TINT32, types.TUINT32,
types.TINT64, types.TUINT64, types.TUINTPTR, types.TPTR, types.TUNSAFEPTR, types.TCHAN:
return false
case TFLOAT32, TFLOAT64, TCOMPLEX64, TCOMPLEX128, // floats and complex can be +0/-0
TINTER,
TSTRING: // strings might have smaller backing stores
case types.TFLOAT32, types.TFLOAT64, types.TCOMPLEX64, types.TCOMPLEX128, // floats and complex can be +0/-0
types.TINTER,
types.TSTRING: // strings might have smaller backing stores
return true
case TARRAY:
case types.TARRAY:
return needkeyupdate(t.Elem())
case TSTRUCT:
case types.TSTRUCT:
for _, t1 := range t.Fields().Slice() {
if needkeyupdate(t1.Type) {
return true
@ -1104,13 +1105,13 @@ func needkeyupdate(t *types.Type) bool {
// hashMightPanic reports whether the hash of a map key of type t might panic.
func hashMightPanic(t *types.Type) bool {
switch t.Etype {
case TINTER:
case types.TINTER:
return true
case TARRAY:
case types.TARRAY:
return hashMightPanic(t.Elem())
case TSTRUCT:
case types.TSTRUCT:
for _, t1 := range t.Fields().Slice() {
if hashMightPanic(t1.Type) {
return true
@ -1161,7 +1162,7 @@ func dtypesym(t *types.Type) *obj.LSym {
if base.Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Etype] && tbase != types.Bytetype && tbase != types.Runetype && tbase != types.Errortype) { // int, float, etc
// named types from other files are defined only by those files
if tbase.Sym != nil && tbase.Sym.Pkg != localpkg {
if tbase.Sym != nil && tbase.Sym.Pkg != ir.LocalPkg {
if i, ok := typeSymIdx[tbase]; ok {
lsym.Pkg = tbase.Sym.Pkg.Prefix
if t != tbase {
@ -1174,7 +1175,7 @@ func dtypesym(t *types.Type) *obj.LSym {
return lsym
}
// TODO(mdempsky): Investigate whether this can happen.
if tbase.Etype == TFORW {
if tbase.Etype == types.TFORW {
return lsym
}
}
@ -1185,7 +1186,7 @@ func dtypesym(t *types.Type) *obj.LSym {
ot = dcommontype(lsym, t)
ot = dextratype(lsym, ot, t, 0)
case TARRAY:
case types.TARRAY:
// ../../../../runtime/type.go:/arrayType
s1 := dtypesym(t.Elem())
t2 := types.NewSlice(t.Elem())
@ -1196,14 +1197,14 @@ func dtypesym(t *types.Type) *obj.LSym {
ot = duintptr(lsym, ot, uint64(t.NumElem()))
ot = dextratype(lsym, ot, t, 0)
case TSLICE:
case types.TSLICE:
// ../../../../runtime/type.go:/sliceType
s1 := dtypesym(t.Elem())
ot = dcommontype(lsym, t)
ot = dsymptr(lsym, ot, s1, 0)
ot = dextratype(lsym, ot, t, 0)
case TCHAN:
case types.TCHAN:
// ../../../../runtime/type.go:/chanType
s1 := dtypesym(t.Elem())
ot = dcommontype(lsym, t)
@ -1211,7 +1212,7 @@ func dtypesym(t *types.Type) *obj.LSym {
ot = duintptr(lsym, ot, uint64(t.ChanDir()))
ot = dextratype(lsym, ot, t, 0)
case TFUNC:
case types.TFUNC:
for _, t1 := range t.Recvs().Fields().Slice() {
dtypesym(t1.Type)
}
@ -1250,7 +1251,7 @@ func dtypesym(t *types.Type) *obj.LSym {
ot = dsymptr(lsym, ot, dtypesym(t1.Type), 0)
}
case TINTER:
case types.TINTER:
m := imethods(t)
n := len(m)
for _, a := range m {
@ -1286,7 +1287,7 @@ func dtypesym(t *types.Type) *obj.LSym {
}
// ../../../../runtime/type.go:/mapType
case TMAP:
case types.TMAP:
s1 := dtypesym(t.Key())
s2 := dtypesym(t.Elem())
s3 := dtypesym(bmap(t))
@ -1326,8 +1327,8 @@ func dtypesym(t *types.Type) *obj.LSym {
ot = duint32(lsym, ot, flags)
ot = dextratype(lsym, ot, t, 0)
case TPTR:
if t.Elem().Etype == TANY {
case types.TPTR:
if t.Elem().Etype == types.TANY {
// ../../../../runtime/type.go:/UnsafePointerType
ot = dcommontype(lsym, t)
ot = dextratype(lsym, ot, t, 0)
@ -1344,7 +1345,7 @@ func dtypesym(t *types.Type) *obj.LSym {
// ../../../../runtime/type.go:/structType
// for security, only the exported fields.
case TSTRUCT:
case types.TSTRUCT:
fields := t.Fields().Slice()
for _, t1 := range fields {
dtypesym(t1.Type)
@ -1403,7 +1404,7 @@ func dtypesym(t *types.Type) *obj.LSym {
// functions must return the existing type structure rather
// than creating a new one.
switch t.Etype {
case TPTR, TARRAY, TCHAN, TFUNC, TMAP, TSLICE, TSTRUCT:
case types.TPTR, types.TARRAY, types.TCHAN, types.TFUNC, types.TMAP, types.TSLICE, types.TSTRUCT:
keep = true
}
}
@ -1515,10 +1516,10 @@ func addsignat(t *types.Type) {
}
}
func addsignats(dcls []*Node) {
func addsignats(dcls []*ir.Node) {
// copy types from dcl list to signatset
for _, n := range dcls {
if n.Op == OTYPE {
if n.Op == ir.OTYPE {
addsignat(n.Type)
}
}
@ -1571,7 +1572,7 @@ func dumptabs() {
}
// process ptabs
if localpkg.Name == "main" && len(ptabs) > 0 {
if ir.LocalPkg.Name == "main" && len(ptabs) > 0 {
ot := 0
s := base.Ctxt.Lookup("go.plugin.tabs")
for _, p := range ptabs {
@ -1615,17 +1616,17 @@ func dumpbasictypes() {
// another possible choice would be package main,
// but using runtime means fewer copies in object files.
if base.Ctxt.Pkgpath == "runtime" {
for i := types.EType(1); i <= TBOOL; i++ {
for i := types.EType(1); i <= types.TBOOL; i++ {
dtypesym(types.NewPtr(types.Types[i]))
}
dtypesym(types.NewPtr(types.Types[TSTRING]))
dtypesym(types.NewPtr(types.Types[TUNSAFEPTR]))
dtypesym(types.NewPtr(types.Types[types.TSTRING]))
dtypesym(types.NewPtr(types.Types[types.TUNSAFEPTR]))
// emit type structs for error and func(error) string.
// The latter is the type of an auto-generated wrapper.
dtypesym(types.NewPtr(types.Errortype))
dtypesym(functype(nil, []*Node{anonfield(types.Errortype)}, []*Node{anonfield(types.Types[TSTRING])}))
dtypesym(functype(nil, []*ir.Node{anonfield(types.Errortype)}, []*ir.Node{anonfield(types.Types[types.TSTRING])}))
// add paths for runtime and main, which 6l imports implicitly.
dimportpath(Runtimepkg)
@ -1767,7 +1768,7 @@ func fillptrmask(t *types.Type, ptrmask []byte) {
// For non-trivial arrays, the program describes the full t.Width size.
func dgcprog(t *types.Type) (*obj.LSym, int64) {
dowidth(t)
if t.Width == BADWIDTH {
if t.Width == types.BADWIDTH {
base.Fatalf("dgcprog: %v badwidth", t)
}
lsym := typesymprefix(".gcprog", t).Linksym()
@ -1824,17 +1825,17 @@ func (p *GCProg) emit(t *types.Type, offset int64) {
default:
base.Fatalf("GCProg.emit: unexpected type %v", t)
case TSTRING:
case types.TSTRING:
p.w.Ptr(offset / int64(Widthptr))
case TINTER:
case types.TINTER:
// Note: the first word isn't a pointer. See comment in plive.go:onebitwalktype1.
p.w.Ptr(offset/int64(Widthptr) + 1)
case TSLICE:
case types.TSLICE:
p.w.Ptr(offset / int64(Widthptr))
case TARRAY:
case types.TARRAY:
if t.NumElem() == 0 {
// should have been handled by haspointers check above
base.Fatalf("GCProg.emit: empty array")
@ -1859,7 +1860,7 @@ func (p *GCProg) emit(t *types.Type, offset int64) {
p.w.ZeroUntil((offset + elem.Width) / int64(Widthptr))
p.w.Repeat(elem.Width/int64(Widthptr), count-1)
case TSTRUCT:
case types.TSTRUCT:
for _, t1 := range t.Fields().Slice() {
p.emit(t1.Type, offset+t1.Offset)
}
@ -1868,7 +1869,7 @@ func (p *GCProg) emit(t *types.Type, offset int64) {
// zeroaddr returns the address of a symbol with at least
// size bytes of zeros.
func zeroaddr(size int64) *Node {
func zeroaddr(size int64) *ir.Node {
if size >= 1<<31 {
base.Fatalf("map elem too big %d", size)
}
@ -1877,14 +1878,14 @@ func zeroaddr(size int64) *Node {
}
s := mappkg.Lookup("zero")
if s.Def == nil {
x := newname(s)
x.Type = types.Types[TUINT8]
x.SetClass(PEXTERN)
x := NewName(s)
x.Type = types.Types[types.TUINT8]
x.SetClass(ir.PEXTERN)
x.SetTypecheck(1)
s.Def = asTypesNode(x)
s.Def = ir.AsTypesNode(x)
}
z := nod(OADDR, asNode(s.Def), nil)
z.Type = types.NewPtr(types.Types[TUINT8])
z := ir.Nod(ir.OADDR, ir.AsNode(s.Def), nil)
z.Type = types.NewPtr(types.Types[types.TUINT8])
z.SetTypecheck(1)
return z
}

View file

@ -4,6 +4,8 @@
package gc
import "cmd/compile/internal/ir"
// Strongly connected components.
//
// Run analysis on minimal sets of mutually recursive functions
@ -30,10 +32,10 @@ package gc
// when analyzing a set of mutually recursive functions.
type bottomUpVisitor struct {
analyze func([]*Node, bool)
analyze func([]*ir.Node, bool)
visitgen uint32
nodeID map[*Node]uint32
stack []*Node
nodeID map[*ir.Node]uint32
stack []*ir.Node
}
// visitBottomUp invokes analyze on the ODCLFUNC nodes listed in list.
@ -49,18 +51,18 @@ type bottomUpVisitor struct {
// If recursive is false, the list consists of only a single function and its closures.
// If recursive is true, the list may still contain only a single function,
// if that function is itself recursive.
func visitBottomUp(list []*Node, analyze func(list []*Node, recursive bool)) {
func visitBottomUp(list []*ir.Node, analyze func(list []*ir.Node, recursive bool)) {
var v bottomUpVisitor
v.analyze = analyze
v.nodeID = make(map[*Node]uint32)
v.nodeID = make(map[*ir.Node]uint32)
for _, n := range list {
if n.Op == ODCLFUNC && !n.Func.IsHiddenClosure() {
if n.Op == ir.ODCLFUNC && !n.Func.IsHiddenClosure() {
v.visit(n)
}
}
}
func (v *bottomUpVisitor) visit(n *Node) uint32 {
func (v *bottomUpVisitor) visit(n *ir.Node) uint32 {
if id := v.nodeID[n]; id > 0 {
// already visited
return id
@ -73,38 +75,38 @@ func (v *bottomUpVisitor) visit(n *Node) uint32 {
min := v.visitgen
v.stack = append(v.stack, n)
inspectList(n.Nbody, func(n *Node) bool {
ir.InspectList(n.Nbody, func(n *ir.Node) bool {
switch n.Op {
case ONAME:
if n.Class() == PFUNC {
case ir.ONAME:
if n.Class() == ir.PFUNC {
if n != nil && n.Name.Defn != nil {
if m := v.visit(n.Name.Defn); m < min {
min = m
}
}
}
case OMETHEXPR:
fn := n.MethodName()
case ir.OMETHEXPR:
fn := methodExprName(n)
if fn != nil && fn.Name.Defn != nil {
if m := v.visit(fn.Name.Defn); m < min {
min = m
}
}
case ODOTMETH:
fn := n.MethodName()
if fn != nil && fn.Op == ONAME && fn.Class() == PFUNC && fn.Name.Defn != nil {
case ir.ODOTMETH:
fn := methodExprName(n)
if fn != nil && fn.Op == ir.ONAME && fn.Class() == ir.PFUNC && fn.Name.Defn != nil {
if m := v.visit(fn.Name.Defn); m < min {
min = m
}
}
case OCALLPART:
fn := asNode(callpartMethod(n).Nname)
if fn != nil && fn.Op == ONAME && fn.Class() == PFUNC && fn.Name.Defn != nil {
case ir.OCALLPART:
fn := ir.AsNode(callpartMethod(n).Nname)
if fn != nil && fn.Op == ir.ONAME && fn.Class() == ir.PFUNC && fn.Name.Defn != nil {
if m := v.visit(fn.Name.Defn); m < min {
min = m
}
}
case OCLOSURE:
case ir.OCLOSURE:
if m := v.visit(n.Func.Decl); m < min {
min = m
}

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/internal/dwarf"
"cmd/internal/obj"
"cmd/internal/src"
@ -17,7 +18,7 @@ func xposBefore(p, q src.XPos) bool {
return base.Ctxt.PosTable.Pos(p).Before(base.Ctxt.PosTable.Pos(q))
}
func findScope(marks []Mark, pos src.XPos) ScopeID {
func findScope(marks []ir.Mark, pos src.XPos) ir.ScopeID {
i := sort.Search(len(marks), func(i int) bool {
return xposBefore(pos, marks[i].Pos)
})
@ -27,7 +28,7 @@ func findScope(marks []Mark, pos src.XPos) ScopeID {
return marks[i-1].Scope
}
func assembleScopes(fnsym *obj.LSym, fn *Node, dwarfVars []*dwarf.Var, varScopes []ScopeID) []dwarf.Scope {
func assembleScopes(fnsym *obj.LSym, fn *ir.Node, dwarfVars []*dwarf.Var, varScopes []ir.ScopeID) []dwarf.Scope {
// Initialize the DWARF scope tree based on lexical scopes.
dwarfScopes := make([]dwarf.Scope, 1+len(fn.Func.Parents))
for i, parent := range fn.Func.Parents {
@ -40,7 +41,7 @@ func assembleScopes(fnsym *obj.LSym, fn *Node, dwarfVars []*dwarf.Var, varScopes
}
// scopeVariables assigns DWARF variable records to their scopes.
func scopeVariables(dwarfVars []*dwarf.Var, varScopes []ScopeID, dwarfScopes []dwarf.Scope) {
func scopeVariables(dwarfVars []*dwarf.Var, varScopes []ir.ScopeID, dwarfScopes []dwarf.Scope) {
sort.Stable(varsByScopeAndOffset{dwarfVars, varScopes})
i0 := 0
@ -57,7 +58,7 @@ func scopeVariables(dwarfVars []*dwarf.Var, varScopes []ScopeID, dwarfScopes []d
}
// scopePCs assigns PC ranges to their scopes.
func scopePCs(fnsym *obj.LSym, marks []Mark, dwarfScopes []dwarf.Scope) {
func scopePCs(fnsym *obj.LSym, marks []ir.Mark, dwarfScopes []dwarf.Scope) {
// If there aren't any child scopes (in particular, when scope
// tracking is disabled), we can skip a whole lot of work.
if len(marks) == 0 {
@ -90,7 +91,7 @@ func compactScopes(dwarfScopes []dwarf.Scope) []dwarf.Scope {
type varsByScopeAndOffset struct {
vars []*dwarf.Var
scopes []ScopeID
scopes []ir.ScopeID
}
func (v varsByScopeAndOffset) Len() int {

View file

@ -6,16 +6,17 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
)
// select
func typecheckselect(sel *Node) {
var def *Node
func typecheckselect(sel *ir.Node) {
var def *ir.Node
lno := setlineno(sel)
typecheckslice(sel.Ninit.Slice(), ctxStmt)
for _, ncase := range sel.List.Slice() {
if ncase.Op != OCASE {
if ncase.Op != ir.OCASE {
setlineno(ncase)
base.Fatalf("typecheckselect %v", ncase.Op)
}
@ -23,7 +24,7 @@ func typecheckselect(sel *Node) {
if ncase.List.Len() == 0 {
// default
if def != nil {
base.ErrorfAt(ncase.Pos, "multiple defaults in select (first at %v)", def.Line())
base.ErrorfAt(ncase.Pos, "multiple defaults in select (first at %v)", ir.Line(def))
} else {
def = ncase
}
@ -37,7 +38,7 @@ func typecheckselect(sel *Node) {
switch n.Op {
default:
pos := n.Pos
if n.Op == ONAME {
if n.Op == ir.ONAME {
// We don't have the right position for ONAME nodes (see #15459 and
// others). Using ncase.Pos for now as it will provide the correct
// line number (assuming the expression follows the "case" keyword
@ -49,37 +50,37 @@ func typecheckselect(sel *Node) {
// convert x = <-c into OSELRECV(x, <-c).
// remove implicit conversions; the eventual assignment
// will reintroduce them.
case OAS:
if (n.Right.Op == OCONVNOP || n.Right.Op == OCONVIFACE) && n.Right.Implicit() {
case ir.OAS:
if (n.Right.Op == ir.OCONVNOP || n.Right.Op == ir.OCONVIFACE) && n.Right.Implicit() {
n.Right = n.Right.Left
}
if n.Right.Op != ORECV {
if n.Right.Op != ir.ORECV {
base.ErrorfAt(n.Pos, "select assignment must have receive on right hand side")
break
}
n.Op = OSELRECV
n.Op = ir.OSELRECV
// convert x, ok = <-c into OSELRECV2(x, <-c) with ntest=ok
case OAS2RECV:
if n.Right.Op != ORECV {
case ir.OAS2RECV:
if n.Right.Op != ir.ORECV {
base.ErrorfAt(n.Pos, "select assignment must have receive on right hand side")
break
}
n.Op = OSELRECV2
n.Op = ir.OSELRECV2
n.Left = n.List.First()
n.List.Set1(n.List.Second())
// convert <-c into OSELRECV(N, <-c)
case ORECV:
n = nodl(n.Pos, OSELRECV, nil, n)
case ir.ORECV:
n = ir.NodAt(n.Pos, ir.OSELRECV, nil, n)
n.SetTypecheck(1)
ncase.Left = n
case OSEND:
case ir.OSEND:
break
}
}
@ -90,7 +91,7 @@ func typecheckselect(sel *Node) {
base.Pos = lno
}
func walkselect(sel *Node) {
func walkselect(sel *ir.Node) {
lno := setlineno(sel)
if sel.Nbody.Len() != 0 {
base.Fatalf("double walkselect")
@ -108,13 +109,13 @@ func walkselect(sel *Node) {
base.Pos = lno
}
func walkselectcases(cases *Nodes) []*Node {
func walkselectcases(cases *ir.Nodes) []*ir.Node {
ncas := cases.Len()
sellineno := base.Pos
// optimization: zero-case select
if ncas == 0 {
return []*Node{mkcall("block", nil, nil)}
return []*ir.Node{mkcall("block", nil, nil)}
}
// optimization: one-case select: single op.
@ -130,25 +131,25 @@ func walkselectcases(cases *Nodes) []*Node {
default:
base.Fatalf("select %v", n.Op)
case OSEND:
case ir.OSEND:
// already ok
case OSELRECV, OSELRECV2:
if n.Op == OSELRECV || n.List.Len() == 0 {
case ir.OSELRECV, ir.OSELRECV2:
if n.Op == ir.OSELRECV || n.List.Len() == 0 {
if n.Left == nil {
n = n.Right
} else {
n.Op = OAS
n.Op = ir.OAS
}
break
}
if n.Left == nil {
nblank = typecheck(nblank, ctxExpr|ctxAssign)
n.Left = nblank
ir.BlankNode = typecheck(ir.BlankNode, ctxExpr|ctxAssign)
n.Left = ir.BlankNode
}
n.Op = OAS2
n.Op = ir.OAS2
n.List.Prepend(n.Left)
n.Rlist.Set1(n.Right)
n.Right = nil
@ -161,13 +162,13 @@ func walkselectcases(cases *Nodes) []*Node {
}
l = append(l, cas.Nbody.Slice()...)
l = append(l, nod(OBREAK, nil, nil))
l = append(l, ir.Nod(ir.OBREAK, nil, nil))
return l
}
// convert case value arguments to addresses.
// this rewrite is used by both the general code and the next optimization.
var dflt *Node
var dflt *ir.Node
for _, cas := range cases.Slice() {
setlineno(cas)
n := cas.Left
@ -176,17 +177,17 @@ func walkselectcases(cases *Nodes) []*Node {
continue
}
switch n.Op {
case OSEND:
n.Right = nod(OADDR, n.Right, nil)
case ir.OSEND:
n.Right = ir.Nod(ir.OADDR, n.Right, nil)
n.Right = typecheck(n.Right, ctxExpr)
case OSELRECV, OSELRECV2:
if n.Op == OSELRECV2 && n.List.Len() == 0 {
n.Op = OSELRECV
case ir.OSELRECV, ir.OSELRECV2:
if n.Op == ir.OSELRECV2 && n.List.Len() == 0 {
n.Op = ir.OSELRECV
}
if n.Left != nil {
n.Left = nod(OADDR, n.Left, nil)
n.Left = ir.Nod(ir.OADDR, n.Left, nil)
n.Left = typecheck(n.Left, ctxExpr)
}
}
@ -201,66 +202,66 @@ func walkselectcases(cases *Nodes) []*Node {
n := cas.Left
setlineno(n)
r := nod(OIF, nil, nil)
r := ir.Nod(ir.OIF, nil, nil)
r.Ninit.Set(cas.Ninit.Slice())
switch n.Op {
default:
base.Fatalf("select %v", n.Op)
case OSEND:
case ir.OSEND:
// if selectnbsend(c, v) { body } else { default body }
ch := n.Left
r.Left = mkcall1(chanfn("selectnbsend", 2, ch.Type), types.Types[TBOOL], &r.Ninit, ch, n.Right)
r.Left = mkcall1(chanfn("selectnbsend", 2, ch.Type), types.Types[types.TBOOL], &r.Ninit, ch, n.Right)
case OSELRECV:
case ir.OSELRECV:
// if selectnbrecv(&v, c) { body } else { default body }
ch := n.Right.Left
elem := n.Left
if elem == nil {
elem = nodnil()
}
r.Left = mkcall1(chanfn("selectnbrecv", 2, ch.Type), types.Types[TBOOL], &r.Ninit, elem, ch)
r.Left = mkcall1(chanfn("selectnbrecv", 2, ch.Type), types.Types[types.TBOOL], &r.Ninit, elem, ch)
case OSELRECV2:
case ir.OSELRECV2:
// if selectnbrecv2(&v, &received, c) { body } else { default body }
ch := n.Right.Left
elem := n.Left
if elem == nil {
elem = nodnil()
}
receivedp := nod(OADDR, n.List.First(), nil)
receivedp := ir.Nod(ir.OADDR, n.List.First(), nil)
receivedp = typecheck(receivedp, ctxExpr)
r.Left = mkcall1(chanfn("selectnbrecv2", 2, ch.Type), types.Types[TBOOL], &r.Ninit, elem, receivedp, ch)
r.Left = mkcall1(chanfn("selectnbrecv2", 2, ch.Type), types.Types[types.TBOOL], &r.Ninit, elem, receivedp, ch)
}
r.Left = typecheck(r.Left, ctxExpr)
r.Nbody.Set(cas.Nbody.Slice())
r.Rlist.Set(append(dflt.Ninit.Slice(), dflt.Nbody.Slice()...))
return []*Node{r, nod(OBREAK, nil, nil)}
return []*ir.Node{r, ir.Nod(ir.OBREAK, nil, nil)}
}
if dflt != nil {
ncas--
}
casorder := make([]*Node, ncas)
casorder := make([]*ir.Node, ncas)
nsends, nrecvs := 0, 0
var init []*Node
var init []*ir.Node
// generate sel-struct
base.Pos = sellineno
selv := temp(types.NewArray(scasetype(), int64(ncas)))
r := nod(OAS, selv, nil)
r := ir.Nod(ir.OAS, selv, nil)
r = typecheck(r, ctxStmt)
init = append(init, r)
// No initialization for order; runtime.selectgo is responsible for that.
order := temp(types.NewArray(types.Types[TUINT16], 2*int64(ncas)))
order := temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas)))
var pc0, pcs *Node
var pc0, pcs *ir.Node
if base.Flag.Race {
pcs = temp(types.NewArray(types.Types[TUINTPTR], int64(ncas)))
pc0 = typecheck(nod(OADDR, nod(OINDEX, pcs, nodintconst(0)), nil), ctxExpr)
pcs = temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas)))
pc0 = typecheck(ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, pcs, nodintconst(0)), nil), ctxExpr)
} else {
pc0 = nodnil()
}
@ -278,16 +279,16 @@ func walkselectcases(cases *Nodes) []*Node {
}
var i int
var c, elem *Node
var c, elem *ir.Node
switch n.Op {
default:
base.Fatalf("select %v", n.Op)
case OSEND:
case ir.OSEND:
i = nsends
nsends++
c = n.Left
elem = n.Right
case OSELRECV, OSELRECV2:
case ir.OSELRECV, ir.OSELRECV2:
nrecvs++
i = ncas - nrecvs
c = n.Right.Left
@ -296,23 +297,23 @@ func walkselectcases(cases *Nodes) []*Node {
casorder[i] = cas
setField := func(f string, val *Node) {
r := nod(OAS, nodSym(ODOT, nod(OINDEX, selv, nodintconst(int64(i))), lookup(f)), val)
setField := func(f string, val *ir.Node) {
r := ir.Nod(ir.OAS, nodSym(ir.ODOT, ir.Nod(ir.OINDEX, selv, nodintconst(int64(i))), lookup(f)), val)
r = typecheck(r, ctxStmt)
init = append(init, r)
}
c = convnop(c, types.Types[TUNSAFEPTR])
c = convnop(c, types.Types[types.TUNSAFEPTR])
setField("c", c)
if elem != nil {
elem = convnop(elem, types.Types[TUNSAFEPTR])
elem = convnop(elem, types.Types[types.TUNSAFEPTR])
setField("elem", elem)
}
// TODO(mdempsky): There should be a cleaner way to
// handle this.
if base.Flag.Race {
r = mkcall("selectsetpc", nil, nil, nod(OADDR, nod(OINDEX, pcs, nodintconst(int64(i))), nil))
r = mkcall("selectsetpc", nil, nil, ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, pcs, nodintconst(int64(i))), nil))
init = append(init, r)
}
}
@ -322,9 +323,9 @@ func walkselectcases(cases *Nodes) []*Node {
// run the select
base.Pos = sellineno
chosen := temp(types.Types[TINT])
recvOK := temp(types.Types[TBOOL])
r = nod(OAS2, nil, nil)
chosen := temp(types.Types[types.TINT])
recvOK := temp(types.Types[types.TBOOL])
r = ir.Nod(ir.OAS2, nil, nil)
r.List.Set2(chosen, recvOK)
fn := syslook("selectgo")
r.Rlist.Set1(mkcall1(fn, fn.Type.Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, nodintconst(int64(nsends)), nodintconst(int64(nrecvs)), nodbool(dflt == nil)))
@ -332,46 +333,46 @@ func walkselectcases(cases *Nodes) []*Node {
init = append(init, r)
// selv and order are no longer alive after selectgo.
init = append(init, nod(OVARKILL, selv, nil))
init = append(init, nod(OVARKILL, order, nil))
init = append(init, ir.Nod(ir.OVARKILL, selv, nil))
init = append(init, ir.Nod(ir.OVARKILL, order, nil))
if base.Flag.Race {
init = append(init, nod(OVARKILL, pcs, nil))
init = append(init, ir.Nod(ir.OVARKILL, pcs, nil))
}
// dispatch cases
dispatch := func(cond, cas *Node) {
dispatch := func(cond, cas *ir.Node) {
cond = typecheck(cond, ctxExpr)
cond = defaultlit(cond, nil)
r := nod(OIF, cond, nil)
r := ir.Nod(ir.OIF, cond, nil)
if n := cas.Left; n != nil && n.Op == OSELRECV2 {
x := nod(OAS, n.List.First(), recvOK)
if n := cas.Left; n != nil && n.Op == ir.OSELRECV2 {
x := ir.Nod(ir.OAS, n.List.First(), recvOK)
x = typecheck(x, ctxStmt)
r.Nbody.Append(x)
}
r.Nbody.AppendNodes(&cas.Nbody)
r.Nbody.Append(nod(OBREAK, nil, nil))
r.Nbody.Append(ir.Nod(ir.OBREAK, nil, nil))
init = append(init, r)
}
if dflt != nil {
setlineno(dflt)
dispatch(nod(OLT, chosen, nodintconst(0)), dflt)
dispatch(ir.Nod(ir.OLT, chosen, nodintconst(0)), dflt)
}
for i, cas := range casorder {
setlineno(cas)
dispatch(nod(OEQ, chosen, nodintconst(int64(i))), cas)
dispatch(ir.Nod(ir.OEQ, chosen, nodintconst(int64(i))), cas)
}
return init
}
// bytePtrToIndex returns a Node representing "(*byte)(&n[i])".
func bytePtrToIndex(n *Node, i int64) *Node {
s := nod(OADDR, nod(OINDEX, n, nodintconst(i)), nil)
t := types.NewPtr(types.Types[TUINT8])
func bytePtrToIndex(n *ir.Node, i int64) *ir.Node {
s := ir.Nod(ir.OADDR, ir.Nod(ir.OINDEX, n, nodintconst(i)), nil)
t := types.NewPtr(types.Types[types.TUINT8])
return convnop(s, t)
}
@ -380,9 +381,9 @@ var scase *types.Type
// Keep in sync with src/runtime/select.go.
func scasetype() *types.Type {
if scase == nil {
scase = tostruct([]*Node{
namedfield("c", types.Types[TUNSAFEPTR]),
namedfield("elem", types.Types[TUNSAFEPTR]),
scase = tostruct([]*ir.Node{
namedfield("c", types.Types[types.TUNSAFEPTR]),
namedfield("elem", types.Types[types.TUNSAFEPTR]),
})
scase.SetNoalg(true)
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
"go/constant"
@ -14,16 +15,16 @@ import (
)
// typecheckswitch typechecks a switch statement.
func typecheckswitch(n *Node) {
func typecheckswitch(n *ir.Node) {
typecheckslice(n.Ninit.Slice(), ctxStmt)
if n.Left != nil && n.Left.Op == OTYPESW {
if n.Left != nil && n.Left.Op == ir.OTYPESW {
typecheckTypeSwitch(n)
} else {
typecheckExprSwitch(n)
}
}
func typecheckTypeSwitch(n *Node) {
func typecheckTypeSwitch(n *ir.Node) {
n.Left.Right = typecheck(n.Left.Right, ctxExpr)
t := n.Left.Right.Type
if t != nil && !t.IsInterface() {
@ -34,17 +35,17 @@ func typecheckTypeSwitch(n *Node) {
// We don't actually declare the type switch's guarded
// declaration itself. So if there are no cases, we won't
// notice that it went unused.
if v := n.Left.Left; v != nil && !v.isBlank() && n.List.Len() == 0 {
if v := n.Left.Left; v != nil && !ir.IsBlank(v) && n.List.Len() == 0 {
base.ErrorfAt(v.Pos, "%v declared but not used", v.Sym)
}
var defCase, nilCase *Node
var defCase, nilCase *ir.Node
var ts typeSet
for _, ncase := range n.List.Slice() {
ls := ncase.List.Slice()
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line())
base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", ir.Line(defCase))
} else {
defCase = ncase
}
@ -60,13 +61,13 @@ func typecheckTypeSwitch(n *Node) {
var missing, have *types.Field
var ptr int
switch {
case n1.isNil(): // case nil:
case ir.IsNil(n1): // case nil:
if nilCase != nil {
base.ErrorfAt(ncase.Pos, "multiple nil cases in type switch (first at %v)", nilCase.Line())
base.ErrorfAt(ncase.Pos, "multiple nil cases in type switch (first at %v)", ir.Line(nilCase))
} else {
nilCase = ncase
}
case n1.Op != OTYPE:
case n1.Op != ir.OTYPE:
base.ErrorfAt(ncase.Pos, "%L is not a type", n1)
case !n1.Type.IsInterface() && !implements(n1.Type, t, &missing, &have, &ptr) && !missing.Broke():
if have != nil && !have.Broke() {
@ -81,7 +82,7 @@ func typecheckTypeSwitch(n *Node) {
}
}
if n1.Op == OTYPE {
if n1.Op == ir.OTYPE {
ts.add(ncase.Pos, n1.Type)
}
}
@ -90,9 +91,9 @@ func typecheckTypeSwitch(n *Node) {
// Assign the clause variable's type.
vt := t
if len(ls) == 1 {
if ls[0].Op == OTYPE {
if ls[0].Op == ir.OTYPE {
vt = ls[0].Type
} else if !ls[0].isNil() {
} else if !ir.IsNil(ls[0]) {
// Invalid single-type case;
// mark variable as broken.
vt = nil
@ -143,8 +144,8 @@ func (s *typeSet) add(pos src.XPos, typ *types.Type) {
s.m[ls] = append(prevs, typeSetEntry{pos, typ})
}
func typecheckExprSwitch(n *Node) {
t := types.Types[TBOOL]
func typecheckExprSwitch(n *ir.Node) {
t := types.Types[types.TBOOL]
if n.Left != nil {
n.Left = typecheck(n.Left, ctxExpr)
n.Left = defaultlit(n.Left, nil)
@ -156,7 +157,7 @@ func typecheckExprSwitch(n *Node) {
switch {
case t.IsMap():
nilonly = "map"
case t.Etype == TFUNC:
case t.Etype == types.TFUNC:
nilonly = "func"
case t.IsSlice():
nilonly = "slice"
@ -171,13 +172,13 @@ func typecheckExprSwitch(n *Node) {
}
}
var defCase *Node
var defCase *ir.Node
var cs constSet
for _, ncase := range n.List.Slice() {
ls := ncase.List.Slice()
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", defCase.Line())
base.ErrorfAt(ncase.Pos, "multiple defaults in switch (first at %v)", ir.Line(defCase))
} else {
defCase = ncase
}
@ -192,14 +193,14 @@ func typecheckExprSwitch(n *Node) {
continue
}
if nilonly != "" && !n1.isNil() {
if nilonly != "" && !ir.IsNil(n1) {
base.ErrorfAt(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left)
} else if t.IsInterface() && !n1.Type.IsInterface() && !IsComparable(n1.Type) {
base.ErrorfAt(ncase.Pos, "invalid case %L in switch (incomparable type)", n1)
} else {
op1, _ := assignop(n1.Type, t)
op2, _ := assignop(t, n1.Type)
if op1 == OXXX && op2 == OXXX {
if op1 == ir.OXXX && op2 == ir.OXXX {
if n.Left != nil {
base.ErrorfAt(ncase.Pos, "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Left, n1.Type, t)
} else {
@ -224,13 +225,13 @@ func typecheckExprSwitch(n *Node) {
}
// walkswitch walks a switch statement.
func walkswitch(sw *Node) {
func walkswitch(sw *ir.Node) {
// Guard against double walk, see #25776.
if sw.List.Len() == 0 && sw.Nbody.Len() > 0 {
return // Was fatal, but eliminating every possible source of double-walking is hard
}
if sw.Left != nil && sw.Left.Op == OTYPESW {
if sw.Left != nil && sw.Left.Op == ir.OTYPESW {
walkTypeSwitch(sw)
} else {
walkExprSwitch(sw)
@ -239,7 +240,7 @@ func walkswitch(sw *Node) {
// walkExprSwitch generates an AST implementing sw. sw is an
// expression switch.
func walkExprSwitch(sw *Node) {
func walkExprSwitch(sw *ir.Node) {
lno := setlineno(sw)
cond := sw.Left
@ -259,12 +260,12 @@ func walkExprSwitch(sw *Node) {
// because walkexpr will lower the string
// conversion into a runtime call.
// See issue 24937 for more discussion.
if cond.Op == OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
cond.Op = OBYTES2STRTMP
if cond.Op == ir.OBYTES2STR && allCaseExprsAreSideEffectFree(sw) {
cond.Op = ir.OBYTES2STRTMP
}
cond = walkexpr(cond, &sw.Ninit)
if cond.Op != OLITERAL && cond.Op != ONIL {
if cond.Op != ir.OLITERAL && cond.Op != ir.ONIL {
cond = copyexpr(cond, cond.Type, &sw.Nbody)
}
@ -274,11 +275,11 @@ func walkExprSwitch(sw *Node) {
exprname: cond,
}
var defaultGoto *Node
var body Nodes
var defaultGoto *ir.Node
var body ir.Nodes
for _, ncase := range sw.List.Slice() {
label := autolabel(".s")
jmp := npos(ncase.Pos, nodSym(OGOTO, nil, label))
jmp := npos(ncase.Pos, nodSym(ir.OGOTO, nil, label))
// Process case dispatch.
if ncase.List.Len() == 0 {
@ -293,10 +294,10 @@ func walkExprSwitch(sw *Node) {
}
// Process body.
body.Append(npos(ncase.Pos, nodSym(OLABEL, nil, label)))
body.Append(npos(ncase.Pos, nodSym(ir.OLABEL, nil, label)))
body.Append(ncase.Nbody.Slice()...)
if fall, pos := hasFall(ncase.Nbody.Slice()); !fall {
br := nod(OBREAK, nil, nil)
br := ir.Nod(ir.OBREAK, nil, nil)
br.Pos = pos
body.Append(br)
}
@ -304,7 +305,7 @@ func walkExprSwitch(sw *Node) {
sw.List.Set(nil)
if defaultGoto == nil {
br := nod(OBREAK, nil, nil)
br := ir.Nod(ir.OBREAK, nil, nil)
br.Pos = br.Pos.WithNotStmt()
defaultGoto = br
}
@ -317,21 +318,21 @@ func walkExprSwitch(sw *Node) {
// An exprSwitch walks an expression switch.
type exprSwitch struct {
exprname *Node // value being switched on
exprname *ir.Node // value being switched on
done Nodes
done ir.Nodes
clauses []exprClause
}
type exprClause struct {
pos src.XPos
lo, hi *Node
jmp *Node
lo, hi *ir.Node
jmp *ir.Node
}
func (s *exprSwitch) Add(pos src.XPos, expr, jmp *Node) {
func (s *exprSwitch) Add(pos src.XPos, expr, jmp *ir.Node) {
c := exprClause{pos: pos, lo: expr, hi: expr, jmp: jmp}
if okforcmp[s.exprname.Type.Etype] && expr.Op == OLITERAL {
if okforcmp[s.exprname.Type.Etype] && expr.Op == ir.OLITERAL {
s.clauses = append(s.clauses, c)
return
}
@ -341,7 +342,7 @@ func (s *exprSwitch) Add(pos src.XPos, expr, jmp *Node) {
s.flush()
}
func (s *exprSwitch) Emit(out *Nodes) {
func (s *exprSwitch) Emit(out *ir.Nodes) {
s.flush()
out.AppendNodes(&s.done)
}
@ -389,12 +390,12 @@ func (s *exprSwitch) flush() {
// Perform two-level binary search.
binarySearch(len(runs), &s.done,
func(i int) *Node {
return nod(OLE, nod(OLEN, s.exprname, nil), nodintconst(runLen(runs[i-1])))
func(i int) *ir.Node {
return ir.Nod(ir.OLE, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(runs[i-1])))
},
func(i int, nif *Node) {
func(i int, nif *ir.Node) {
run := runs[i]
nif.Left = nod(OEQ, nod(OLEN, s.exprname, nil), nodintconst(runLen(run)))
nif.Left = ir.Nod(ir.OEQ, ir.Nod(ir.OLEN, s.exprname, nil), nodintconst(runLen(run)))
s.search(run, &nif.Nbody)
},
)
@ -422,12 +423,12 @@ func (s *exprSwitch) flush() {
s.search(cc, &s.done)
}
func (s *exprSwitch) search(cc []exprClause, out *Nodes) {
func (s *exprSwitch) search(cc []exprClause, out *ir.Nodes) {
binarySearch(len(cc), out,
func(i int) *Node {
return nod(OLE, s.exprname, cc[i-1].hi)
func(i int) *ir.Node {
return ir.Nod(ir.OLE, s.exprname, cc[i-1].hi)
},
func(i int, nif *Node) {
func(i int, nif *ir.Node) {
c := &cc[i]
nif.Left = c.test(s.exprname)
nif.Nbody.Set1(c.jmp)
@ -435,27 +436,27 @@ func (s *exprSwitch) search(cc []exprClause, out *Nodes) {
)
}
func (c *exprClause) test(exprname *Node) *Node {
func (c *exprClause) test(exprname *ir.Node) *ir.Node {
// Integer range.
if c.hi != c.lo {
low := nodl(c.pos, OGE, exprname, c.lo)
high := nodl(c.pos, OLE, exprname, c.hi)
return nodl(c.pos, OANDAND, low, high)
low := ir.NodAt(c.pos, ir.OGE, exprname, c.lo)
high := ir.NodAt(c.pos, ir.OLE, exprname, c.hi)
return ir.NodAt(c.pos, ir.OANDAND, low, high)
}
// Optimize "switch true { ...}" and "switch false { ... }".
if Isconst(exprname, constant.Bool) && !c.lo.Type.IsInterface() {
if ir.IsConst(exprname, constant.Bool) && !c.lo.Type.IsInterface() {
if exprname.BoolVal() {
return c.lo
} else {
return nodl(c.pos, ONOT, c.lo, nil)
return ir.NodAt(c.pos, ir.ONOT, c.lo, nil)
}
}
return nodl(c.pos, OEQ, exprname, c.lo)
return ir.NodAt(c.pos, ir.OEQ, exprname, c.lo)
}
func allCaseExprsAreSideEffectFree(sw *Node) bool {
func allCaseExprsAreSideEffectFree(sw *ir.Node) bool {
// In theory, we could be more aggressive, allowing any
// side-effect-free expressions in cases, but it's a bit
// tricky because some of that information is unavailable due
@ -464,11 +465,11 @@ func allCaseExprsAreSideEffectFree(sw *Node) bool {
// enough.
for _, ncase := range sw.List.Slice() {
if ncase.Op != OCASE {
if ncase.Op != ir.OCASE {
base.Fatalf("switch string(byteslice) bad op: %v", ncase.Op)
}
for _, v := range ncase.List.Slice() {
if v.Op != OLITERAL {
if v.Op != ir.OLITERAL {
return false
}
}
@ -477,7 +478,7 @@ func allCaseExprsAreSideEffectFree(sw *Node) bool {
}
// hasFall reports whether stmts ends with a "fallthrough" statement.
func hasFall(stmts []*Node) (bool, src.XPos) {
func hasFall(stmts []*ir.Node) (bool, src.XPos) {
// Search backwards for the index of the fallthrough
// statement. Do not assume it'll be in the last
// position, since in some cases (e.g. when the statement
@ -485,30 +486,30 @@ func hasFall(stmts []*Node) (bool, src.XPos) {
// nodes will be at the end of the list.
i := len(stmts) - 1
for i >= 0 && stmts[i].Op == OVARKILL {
for i >= 0 && stmts[i].Op == ir.OVARKILL {
i--
}
if i < 0 {
return false, src.NoXPos
}
return stmts[i].Op == OFALL, stmts[i].Pos
return stmts[i].Op == ir.OFALL, stmts[i].Pos
}
// walkTypeSwitch generates an AST that implements sw, where sw is a
// type switch.
func walkTypeSwitch(sw *Node) {
func walkTypeSwitch(sw *ir.Node) {
var s typeSwitch
s.facename = sw.Left.Right
sw.Left = nil
s.facename = walkexpr(s.facename, &sw.Ninit)
s.facename = copyexpr(s.facename, s.facename.Type, &sw.Nbody)
s.okname = temp(types.Types[TBOOL])
s.okname = temp(types.Types[types.TBOOL])
// Get interface descriptor word.
// For empty interfaces this will be the type.
// For non-empty interfaces this will be the itab.
itab := nod(OITAB, s.facename, nil)
itab := ir.Nod(ir.OITAB, s.facename, nil)
// For empty interfaces, do:
// if e._type == nil {
@ -516,8 +517,8 @@ func walkTypeSwitch(sw *Node) {
// }
// h := e._type.hash
// Use a similar strategy for non-empty interfaces.
ifNil := nod(OIF, nil, nil)
ifNil.Left = nod(OEQ, itab, nodnil())
ifNil := ir.Nod(ir.OIF, nil, nil)
ifNil.Left = ir.Nod(ir.OEQ, itab, nodnil())
base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
ifNil.Left = typecheck(ifNil.Left, ctxExpr)
ifNil.Left = defaultlit(ifNil.Left, nil)
@ -525,8 +526,8 @@ func walkTypeSwitch(sw *Node) {
sw.Nbody.Append(ifNil)
// Load hash from type or itab.
dotHash := nodSym(ODOTPTR, itab, nil)
dotHash.Type = types.Types[TUINT32]
dotHash := nodSym(ir.ODOTPTR, itab, nil)
dotHash.Type = types.Types[types.TUINT32]
dotHash.SetTypecheck(1)
if s.facename.Type.IsEmptyInterface() {
dotHash.Xoffset = int64(2 * Widthptr) // offset of hash in runtime._type
@ -536,11 +537,11 @@ func walkTypeSwitch(sw *Node) {
dotHash.SetBounded(true) // guaranteed not to fault
s.hashname = copyexpr(dotHash, dotHash.Type, &sw.Nbody)
br := nod(OBREAK, nil, nil)
var defaultGoto, nilGoto *Node
var body Nodes
br := ir.Nod(ir.OBREAK, nil, nil)
var defaultGoto, nilGoto *ir.Node
var body ir.Nodes
for _, ncase := range sw.List.Slice() {
var caseVar *Node
var caseVar *ir.Node
if ncase.Rlist.Len() != 0 {
caseVar = ncase.Rlist.First()
}
@ -549,13 +550,13 @@ func walkTypeSwitch(sw *Node) {
// we initialize the case variable as part of the type assertion.
// In other cases, we initialize it in the body.
var singleType *types.Type
if ncase.List.Len() == 1 && ncase.List.First().Op == OTYPE {
if ncase.List.Len() == 1 && ncase.List.First().Op == ir.OTYPE {
singleType = ncase.List.First().Type
}
caseVarInitialized := false
label := autolabel(".s")
jmp := npos(ncase.Pos, nodSym(OGOTO, nil, label))
jmp := npos(ncase.Pos, nodSym(ir.OGOTO, nil, label))
if ncase.List.Len() == 0 { // default:
if defaultGoto != nil {
@ -565,7 +566,7 @@ func walkTypeSwitch(sw *Node) {
}
for _, n1 := range ncase.List.Slice() {
if n1.isNil() { // case nil:
if ir.IsNil(n1) { // case nil:
if nilGoto != nil {
base.Fatalf("duplicate nil case not detected during typechecking")
}
@ -581,7 +582,7 @@ func walkTypeSwitch(sw *Node) {
}
}
body.Append(npos(ncase.Pos, nodSym(OLABEL, nil, label)))
body.Append(npos(ncase.Pos, nodSym(ir.OLABEL, nil, label)))
if caseVar != nil && !caseVarInitialized {
val := s.facename
if singleType != nil {
@ -591,9 +592,9 @@ func walkTypeSwitch(sw *Node) {
}
val = ifaceData(ncase.Pos, s.facename, singleType)
}
l := []*Node{
nodl(ncase.Pos, ODCL, caseVar, nil),
nodl(ncase.Pos, OAS, caseVar, val),
l := []*ir.Node{
ir.NodAt(ncase.Pos, ir.ODCL, caseVar, nil),
ir.NodAt(ncase.Pos, ir.OAS, caseVar, val),
}
typecheckslice(l, ctxStmt)
body.Append(l...)
@ -621,36 +622,36 @@ func walkTypeSwitch(sw *Node) {
// A typeSwitch walks a type switch.
type typeSwitch struct {
// Temporary variables (i.e., ONAMEs) used by type switch dispatch logic:
facename *Node // value being type-switched on
hashname *Node // type hash of the value being type-switched on
okname *Node // boolean used for comma-ok type assertions
facename *ir.Node // value being type-switched on
hashname *ir.Node // type hash of the value being type-switched on
okname *ir.Node // boolean used for comma-ok type assertions
done Nodes
done ir.Nodes
clauses []typeClause
}
type typeClause struct {
hash uint32
body Nodes
body ir.Nodes
}
func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp *Node) {
var body Nodes
func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp *ir.Node) {
var body ir.Nodes
if caseVar != nil {
l := []*Node{
nodl(pos, ODCL, caseVar, nil),
nodl(pos, OAS, caseVar, nil),
l := []*ir.Node{
ir.NodAt(pos, ir.ODCL, caseVar, nil),
ir.NodAt(pos, ir.OAS, caseVar, nil),
}
typecheckslice(l, ctxStmt)
body.Append(l...)
} else {
caseVar = nblank
caseVar = ir.BlankNode
}
// cv, ok = iface.(type)
as := nodl(pos, OAS2, nil, nil)
as := ir.NodAt(pos, ir.OAS2, nil, nil)
as.List.Set2(caseVar, s.okname) // cv, ok =
dot := nodl(pos, ODOTTYPE, s.facename, nil)
dot := ir.NodAt(pos, ir.ODOTTYPE, s.facename, nil)
dot.Type = typ // iface.(type)
as.Rlist.Set1(dot)
as = typecheck(as, ctxStmt)
@ -658,7 +659,7 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp *Node) {
body.Append(as)
// if ok { goto label }
nif := nodl(pos, OIF, nil, nil)
nif := ir.NodAt(pos, ir.OIF, nil, nil)
nif.Left = s.okname
nif.Nbody.Set1(jmp)
body.Append(nif)
@ -675,7 +676,7 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp *Node) {
s.done.AppendNodes(&body)
}
func (s *typeSwitch) Emit(out *Nodes) {
func (s *typeSwitch) Emit(out *ir.Nodes) {
s.flush()
out.AppendNodes(&s.done)
}
@ -702,14 +703,14 @@ func (s *typeSwitch) flush() {
cc = merged
binarySearch(len(cc), &s.done,
func(i int) *Node {
return nod(OLE, s.hashname, nodintconst(int64(cc[i-1].hash)))
func(i int) *ir.Node {
return ir.Nod(ir.OLE, s.hashname, nodintconst(int64(cc[i-1].hash)))
},
func(i int, nif *Node) {
func(i int, nif *ir.Node) {
// TODO(mdempsky): Omit hash equality check if
// there's only one type.
c := cc[i]
nif.Left = nod(OEQ, s.hashname, nodintconst(int64(c.hash)))
nif.Left = ir.Nod(ir.OEQ, s.hashname, nodintconst(int64(c.hash)))
nif.Nbody.AppendNodes(&c.body)
},
)
@ -724,15 +725,15 @@ func (s *typeSwitch) flush() {
//
// leaf(i, nif) should setup nif (an OIF node) to test case i. In
// particular, it should set nif.Left and nif.Nbody.
func binarySearch(n int, out *Nodes, less func(i int) *Node, leaf func(i int, nif *Node)) {
func binarySearch(n int, out *ir.Nodes, less func(i int) *ir.Node, leaf func(i int, nif *ir.Node)) {
const binarySearchMin = 4 // minimum number of cases for binary search
var do func(lo, hi int, out *Nodes)
do = func(lo, hi int, out *Nodes) {
var do func(lo, hi int, out *ir.Nodes)
do = func(lo, hi int, out *ir.Nodes) {
n := hi - lo
if n < binarySearchMin {
for i := lo; i < hi; i++ {
nif := nod(OIF, nil, nil)
nif := ir.Nod(ir.OIF, nil, nil)
leaf(i, nif)
base.Pos = base.Pos.WithNotStmt()
nif.Left = typecheck(nif.Left, ctxExpr)
@ -744,7 +745,7 @@ func binarySearch(n int, out *Nodes, less func(i int) *Node, leaf func(i int, ni
}
half := lo + n/2
nif := nod(OIF, nil, nil)
nif := ir.Nod(ir.OIF, nil, nil)
nif.Left = less(half)
base.Pos = base.Pos.WithNotStmt()
nif.Left = typecheck(nif.Left, ctxExpr)

File diff suppressed because it is too large Load diff

View file

@ -3,56 +3,3 @@
// license that can be found in the LICENSE file.
package gc
import (
"cmd/compile/internal/types"
)
// convenience constants
const (
Txxx = types.Txxx
TINT8 = types.TINT8
TUINT8 = types.TUINT8
TINT16 = types.TINT16
TUINT16 = types.TUINT16
TINT32 = types.TINT32
TUINT32 = types.TUINT32
TINT64 = types.TINT64
TUINT64 = types.TUINT64
TINT = types.TINT
TUINT = types.TUINT
TUINTPTR = types.TUINTPTR
TCOMPLEX64 = types.TCOMPLEX64
TCOMPLEX128 = types.TCOMPLEX128
TFLOAT32 = types.TFLOAT32
TFLOAT64 = types.TFLOAT64
TBOOL = types.TBOOL
TPTR = types.TPTR
TFUNC = types.TFUNC
TSLICE = types.TSLICE
TARRAY = types.TARRAY
TSTRUCT = types.TSTRUCT
TCHAN = types.TCHAN
TMAP = types.TMAP
TINTER = types.TINTER
TFORW = types.TFORW
TANY = types.TANY
TSTRING = types.TSTRING
TUNSAFEPTR = types.TUNSAFEPTR
// pseudo-types for literals
TIDEAL = types.TIDEAL
TNIL = types.TNIL
TBLANK = types.TBLANK
// pseudo-types for frame layout
TFUNCARGS = types.TFUNCARGS
TCHANARGS = types.TCHANARGS
NTYPE = types.NTYPE
)

View file

@ -6,11 +6,3 @@
// TODO(gri) try to eliminate these soon
package gc
import (
"cmd/compile/internal/types"
"unsafe"
)
func asNode(n *types.Node) *Node { return (*Node)(unsafe.Pointer(n)) }
func asTypesNode(n *Node) *types.Node { return (*types.Node)(unsafe.Pointer(n)) }

View file

@ -8,31 +8,29 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
)
// builtinpkg is a fake package that declares the universe block.
var builtinpkg *types.Pkg
var basicTypes = [...]struct {
name string
etype types.EType
}{
{"int8", TINT8},
{"int16", TINT16},
{"int32", TINT32},
{"int64", TINT64},
{"uint8", TUINT8},
{"uint16", TUINT16},
{"uint32", TUINT32},
{"uint64", TUINT64},
{"float32", TFLOAT32},
{"float64", TFLOAT64},
{"complex64", TCOMPLEX64},
{"complex128", TCOMPLEX128},
{"bool", TBOOL},
{"string", TSTRING},
{"int8", types.TINT8},
{"int16", types.TINT16},
{"int32", types.TINT32},
{"int64", types.TINT64},
{"uint8", types.TUINT8},
{"uint16", types.TUINT16},
{"uint32", types.TUINT32},
{"uint64", types.TUINT64},
{"float32", types.TFLOAT32},
{"float64", types.TFLOAT64},
{"complex64", types.TCOMPLEX64},
{"complex128", types.TCOMPLEX128},
{"bool", types.TBOOL},
{"string", types.TSTRING},
}
var typedefs = [...]struct {
@ -41,30 +39,30 @@ var typedefs = [...]struct {
sameas32 types.EType
sameas64 types.EType
}{
{"int", TINT, TINT32, TINT64},
{"uint", TUINT, TUINT32, TUINT64},
{"uintptr", TUINTPTR, TUINT32, TUINT64},
{"int", types.TINT, types.TINT32, types.TINT64},
{"uint", types.TUINT, types.TUINT32, types.TUINT64},
{"uintptr", types.TUINTPTR, types.TUINT32, types.TUINT64},
}
var builtinFuncs = [...]struct {
name string
op Op
op ir.Op
}{
{"append", OAPPEND},
{"cap", OCAP},
{"close", OCLOSE},
{"complex", OCOMPLEX},
{"copy", OCOPY},
{"delete", ODELETE},
{"imag", OIMAG},
{"len", OLEN},
{"make", OMAKE},
{"new", ONEW},
{"panic", OPANIC},
{"print", OPRINT},
{"println", OPRINTN},
{"real", OREAL},
{"recover", ORECOVER},
{"append", ir.OAPPEND},
{"cap", ir.OCAP},
{"close", ir.OCLOSE},
{"complex", ir.OCOMPLEX},
{"copy", ir.OCOPY},
{"delete", ir.ODELETE},
{"imag", ir.OIMAG},
{"len", ir.OLEN},
{"make", ir.OMAKE},
{"new", ir.ONEW},
{"panic", ir.OPANIC},
{"print", ir.OPRINT},
{"println", ir.OPRINTN},
{"real", ir.OREAL},
{"recover", ir.ORECOVER},
}
// isBuiltinFuncName reports whether name matches a builtin function
@ -80,11 +78,11 @@ func isBuiltinFuncName(name string) bool {
var unsafeFuncs = [...]struct {
name string
op Op
op ir.Op
}{
{"Alignof", OALIGNOF},
{"Offsetof", OOFFSETOF},
{"Sizeof", OSIZEOF},
{"Alignof", ir.OALIGNOF},
{"Offsetof", ir.OOFFSETOF},
{"Sizeof", ir.OSIZEOF},
}
// initUniverse initializes the universe block.
@ -101,71 +99,71 @@ func lexinit() {
if int(etype) >= len(types.Types) {
base.Fatalf("lexinit: %s bad etype", s.name)
}
s2 := builtinpkg.Lookup(s.name)
s2 := ir.BuiltinPkg.Lookup(s.name)
t := types.Types[etype]
if t == nil {
t = types.New(etype)
t.Sym = s2
if etype != TANY && etype != TSTRING {
if etype != types.TANY && etype != types.TSTRING {
dowidth(t)
}
types.Types[etype] = t
}
s2.Def = asTypesNode(typenod(t))
asNode(s2.Def).Name = new(Name)
s2.Def = ir.AsTypesNode(typenod(t))
ir.AsNode(s2.Def).Name = new(ir.Name)
}
for _, s := range &builtinFuncs {
s2 := builtinpkg.Lookup(s.name)
s2.Def = asTypesNode(newname(s2))
asNode(s2.Def).SetSubOp(s.op)
s2 := ir.BuiltinPkg.Lookup(s.name)
s2.Def = ir.AsTypesNode(NewName(s2))
ir.AsNode(s2.Def).SetSubOp(s.op)
}
for _, s := range &unsafeFuncs {
s2 := unsafepkg.Lookup(s.name)
s2.Def = asTypesNode(newname(s2))
asNode(s2.Def).SetSubOp(s.op)
s2.Def = ir.AsTypesNode(NewName(s2))
ir.AsNode(s2.Def).SetSubOp(s.op)
}
types.UntypedString = types.New(TSTRING)
types.UntypedBool = types.New(TBOOL)
types.Types[TANY] = types.New(TANY)
types.UntypedString = types.New(types.TSTRING)
types.UntypedBool = types.New(types.TBOOL)
types.Types[types.TANY] = types.New(types.TANY)
s := builtinpkg.Lookup("true")
s.Def = asTypesNode(nodbool(true))
asNode(s.Def).Sym = lookup("true")
asNode(s.Def).Name = new(Name)
asNode(s.Def).Type = types.UntypedBool
s := ir.BuiltinPkg.Lookup("true")
s.Def = ir.AsTypesNode(nodbool(true))
ir.AsNode(s.Def).Sym = lookup("true")
ir.AsNode(s.Def).Name = new(ir.Name)
ir.AsNode(s.Def).Type = types.UntypedBool
s = builtinpkg.Lookup("false")
s.Def = asTypesNode(nodbool(false))
asNode(s.Def).Sym = lookup("false")
asNode(s.Def).Name = new(Name)
asNode(s.Def).Type = types.UntypedBool
s = ir.BuiltinPkg.Lookup("false")
s.Def = ir.AsTypesNode(nodbool(false))
ir.AsNode(s.Def).Sym = lookup("false")
ir.AsNode(s.Def).Name = new(ir.Name)
ir.AsNode(s.Def).Type = types.UntypedBool
s = lookup("_")
s.Block = -100
s.Def = asTypesNode(newname(s))
types.Types[TBLANK] = types.New(TBLANK)
asNode(s.Def).Type = types.Types[TBLANK]
nblank = asNode(s.Def)
s.Def = ir.AsTypesNode(NewName(s))
types.Types[types.TBLANK] = types.New(types.TBLANK)
ir.AsNode(s.Def).Type = types.Types[types.TBLANK]
ir.BlankNode = ir.AsNode(s.Def)
s = builtinpkg.Lookup("_")
s = ir.BuiltinPkg.Lookup("_")
s.Block = -100
s.Def = asTypesNode(newname(s))
types.Types[TBLANK] = types.New(TBLANK)
asNode(s.Def).Type = types.Types[TBLANK]
s.Def = ir.AsTypesNode(NewName(s))
types.Types[types.TBLANK] = types.New(types.TBLANK)
ir.AsNode(s.Def).Type = types.Types[types.TBLANK]
types.Types[TNIL] = types.New(TNIL)
s = builtinpkg.Lookup("nil")
s.Def = asTypesNode(nodnil())
asNode(s.Def).Sym = s
asNode(s.Def).Name = new(Name)
types.Types[types.TNIL] = types.New(types.TNIL)
s = ir.BuiltinPkg.Lookup("nil")
s.Def = ir.AsTypesNode(nodnil())
ir.AsNode(s.Def).Sym = s
ir.AsNode(s.Def).Name = new(ir.Name)
s = builtinpkg.Lookup("iota")
s.Def = asTypesNode(nod(OIOTA, nil, nil))
asNode(s.Def).Sym = s
asNode(s.Def).Name = new(Name)
s = ir.BuiltinPkg.Lookup("iota")
s.Def = ir.AsTypesNode(ir.Nod(ir.OIOTA, nil, nil))
ir.AsNode(s.Def).Sym = s
ir.AsNode(s.Def).Name = new(ir.Name)
}
func typeinit() {
@ -173,42 +171,42 @@ func typeinit() {
base.Fatalf("typeinit before betypeinit")
}
for et := types.EType(0); et < NTYPE; et++ {
for et := types.EType(0); et < types.NTYPE; et++ {
simtype[et] = et
}
types.Types[TPTR] = types.New(TPTR)
dowidth(types.Types[TPTR])
types.Types[types.TPTR] = types.New(types.TPTR)
dowidth(types.Types[types.TPTR])
t := types.New(TUNSAFEPTR)
types.Types[TUNSAFEPTR] = t
t := types.New(types.TUNSAFEPTR)
types.Types[types.TUNSAFEPTR] = t
t.Sym = unsafepkg.Lookup("Pointer")
t.Sym.Def = asTypesNode(typenod(t))
asNode(t.Sym.Def).Name = new(Name)
dowidth(types.Types[TUNSAFEPTR])
t.Sym.Def = ir.AsTypesNode(typenod(t))
ir.AsNode(t.Sym.Def).Name = new(ir.Name)
dowidth(types.Types[types.TUNSAFEPTR])
for et := TINT8; et <= TUINT64; et++ {
for et := types.TINT8; et <= types.TUINT64; et++ {
isInt[et] = true
}
isInt[TINT] = true
isInt[TUINT] = true
isInt[TUINTPTR] = true
isInt[types.TINT] = true
isInt[types.TUINT] = true
isInt[types.TUINTPTR] = true
isFloat[TFLOAT32] = true
isFloat[TFLOAT64] = true
isFloat[types.TFLOAT32] = true
isFloat[types.TFLOAT64] = true
isComplex[TCOMPLEX64] = true
isComplex[TCOMPLEX128] = true
isComplex[types.TCOMPLEX64] = true
isComplex[types.TCOMPLEX128] = true
// initialize okfor
for et := types.EType(0); et < NTYPE; et++ {
if isInt[et] || et == TIDEAL {
for et := types.EType(0); et < types.NTYPE; et++ {
if isInt[et] || et == types.TIDEAL {
okforeq[et] = true
okforcmp[et] = true
okforarith[et] = true
okforadd[et] = true
okforand[et] = true
okforconst[et] = true
ir.OKForConst[et] = true
issimple[et] = true
}
@ -217,7 +215,7 @@ func typeinit() {
okforcmp[et] = true
okforadd[et] = true
okforarith[et] = true
okforconst[et] = true
ir.OKForConst[et] = true
issimple[et] = true
}
@ -225,43 +223,43 @@ func typeinit() {
okforeq[et] = true
okforadd[et] = true
okforarith[et] = true
okforconst[et] = true
ir.OKForConst[et] = true
issimple[et] = true
}
}
issimple[TBOOL] = true
issimple[types.TBOOL] = true
okforadd[TSTRING] = true
okforadd[types.TSTRING] = true
okforbool[TBOOL] = true
okforbool[types.TBOOL] = true
okforcap[TARRAY] = true
okforcap[TCHAN] = true
okforcap[TSLICE] = true
okforcap[types.TARRAY] = true
okforcap[types.TCHAN] = true
okforcap[types.TSLICE] = true
okforconst[TBOOL] = true
okforconst[TSTRING] = true
ir.OKForConst[types.TBOOL] = true
ir.OKForConst[types.TSTRING] = true
okforlen[TARRAY] = true
okforlen[TCHAN] = true
okforlen[TMAP] = true
okforlen[TSLICE] = true
okforlen[TSTRING] = true
okforlen[types.TARRAY] = true
okforlen[types.TCHAN] = true
okforlen[types.TMAP] = true
okforlen[types.TSLICE] = true
okforlen[types.TSTRING] = true
okforeq[TPTR] = true
okforeq[TUNSAFEPTR] = true
okforeq[TINTER] = true
okforeq[TCHAN] = true
okforeq[TSTRING] = true
okforeq[TBOOL] = true
okforeq[TMAP] = true // nil only; refined in typecheck
okforeq[TFUNC] = true // nil only; refined in typecheck
okforeq[TSLICE] = true // nil only; refined in typecheck
okforeq[TARRAY] = true // only if element type is comparable; refined in typecheck
okforeq[TSTRUCT] = true // only if all struct fields are comparable; refined in typecheck
okforeq[types.TPTR] = true
okforeq[types.TUNSAFEPTR] = true
okforeq[types.TINTER] = true
okforeq[types.TCHAN] = true
okforeq[types.TSTRING] = true
okforeq[types.TBOOL] = true
okforeq[types.TMAP] = true // nil only; refined in typecheck
okforeq[types.TFUNC] = true // nil only; refined in typecheck
okforeq[types.TSLICE] = true // nil only; refined in typecheck
okforeq[types.TARRAY] = true // only if element type is comparable; refined in typecheck
okforeq[types.TSTRUCT] = true // only if all struct fields are comparable; refined in typecheck
okforcmp[TSTRING] = true
okforcmp[types.TSTRING] = true
var i int
for i = 0; i < len(okfor); i++ {
@ -269,51 +267,51 @@ func typeinit() {
}
// binary
okfor[OADD] = okforadd[:]
okfor[OAND] = okforand[:]
okfor[OANDAND] = okforbool[:]
okfor[OANDNOT] = okforand[:]
okfor[ODIV] = okforarith[:]
okfor[OEQ] = okforeq[:]
okfor[OGE] = okforcmp[:]
okfor[OGT] = okforcmp[:]
okfor[OLE] = okforcmp[:]
okfor[OLT] = okforcmp[:]
okfor[OMOD] = okforand[:]
okfor[OMUL] = okforarith[:]
okfor[ONE] = okforeq[:]
okfor[OOR] = okforand[:]
okfor[OOROR] = okforbool[:]
okfor[OSUB] = okforarith[:]
okfor[OXOR] = okforand[:]
okfor[OLSH] = okforand[:]
okfor[ORSH] = okforand[:]
okfor[ir.OADD] = okforadd[:]
okfor[ir.OAND] = okforand[:]
okfor[ir.OANDAND] = okforbool[:]
okfor[ir.OANDNOT] = okforand[:]
okfor[ir.ODIV] = okforarith[:]
okfor[ir.OEQ] = okforeq[:]
okfor[ir.OGE] = okforcmp[:]
okfor[ir.OGT] = okforcmp[:]
okfor[ir.OLE] = okforcmp[:]
okfor[ir.OLT] = okforcmp[:]
okfor[ir.OMOD] = okforand[:]
okfor[ir.OMUL] = okforarith[:]
okfor[ir.ONE] = okforeq[:]
okfor[ir.OOR] = okforand[:]
okfor[ir.OOROR] = okforbool[:]
okfor[ir.OSUB] = okforarith[:]
okfor[ir.OXOR] = okforand[:]
okfor[ir.OLSH] = okforand[:]
okfor[ir.ORSH] = okforand[:]
// unary
okfor[OBITNOT] = okforand[:]
okfor[ONEG] = okforarith[:]
okfor[ONOT] = okforbool[:]
okfor[OPLUS] = okforarith[:]
okfor[ir.OBITNOT] = okforand[:]
okfor[ir.ONEG] = okforarith[:]
okfor[ir.ONOT] = okforbool[:]
okfor[ir.OPLUS] = okforarith[:]
// special
okfor[OCAP] = okforcap[:]
okfor[OLEN] = okforlen[:]
okfor[ir.OCAP] = okforcap[:]
okfor[ir.OLEN] = okforlen[:]
// comparison
iscmp[OLT] = true
iscmp[OGT] = true
iscmp[OGE] = true
iscmp[OLE] = true
iscmp[OEQ] = true
iscmp[ONE] = true
iscmp[ir.OLT] = true
iscmp[ir.OGT] = true
iscmp[ir.OGE] = true
iscmp[ir.OLE] = true
iscmp[ir.OEQ] = true
iscmp[ir.ONE] = true
types.Types[TINTER] = types.New(TINTER) // empty interface
types.Types[types.TINTER] = types.New(types.TINTER) // empty interface
// simple aliases
simtype[TMAP] = TPTR
simtype[TCHAN] = TPTR
simtype[TFUNC] = TPTR
simtype[TUNSAFEPTR] = TPTR
simtype[types.TMAP] = types.TPTR
simtype[types.TCHAN] = types.TPTR
simtype[types.TFUNC] = types.TPTR
simtype[types.TUNSAFEPTR] = types.TPTR
slicePtrOffset = 0
sliceLenOffset = Rnd(slicePtrOffset+int64(Widthptr), int64(Widthptr))
@ -323,29 +321,29 @@ func typeinit() {
// string is same as slice wo the cap
sizeofString = Rnd(sliceLenOffset+int64(Widthptr), int64(Widthptr))
dowidth(types.Types[TSTRING])
dowidth(types.Types[types.TSTRING])
dowidth(types.UntypedString)
}
func makeErrorInterface() *types.Type {
sig := functypefield(fakeRecvField(), nil, []*types.Field{
types.NewField(src.NoXPos, nil, types.Types[TSTRING]),
types.NewField(src.NoXPos, nil, types.Types[types.TSTRING]),
})
method := types.NewField(src.NoXPos, lookup("Error"), sig)
t := types.New(TINTER)
t := types.New(types.TINTER)
t.SetInterface([]*types.Field{method})
return t
}
func lexinit1() {
// error type
s := builtinpkg.Lookup("error")
s := ir.BuiltinPkg.Lookup("error")
types.Errortype = makeErrorInterface()
types.Errortype.Sym = s
types.Errortype.Orig = makeErrorInterface()
s.Def = asTypesNode(typenod(types.Errortype))
s.Def = ir.AsTypesNode(typenod(types.Errortype))
dowidth(types.Errortype)
// We create separate byte and rune types for better error messages
@ -357,24 +355,24 @@ func lexinit1() {
// type aliases, albeit at the cost of having to deal with it everywhere).
// byte alias
s = builtinpkg.Lookup("byte")
types.Bytetype = types.New(TUINT8)
s = ir.BuiltinPkg.Lookup("byte")
types.Bytetype = types.New(types.TUINT8)
types.Bytetype.Sym = s
s.Def = asTypesNode(typenod(types.Bytetype))
asNode(s.Def).Name = new(Name)
s.Def = ir.AsTypesNode(typenod(types.Bytetype))
ir.AsNode(s.Def).Name = new(ir.Name)
dowidth(types.Bytetype)
// rune alias
s = builtinpkg.Lookup("rune")
types.Runetype = types.New(TINT32)
s = ir.BuiltinPkg.Lookup("rune")
types.Runetype = types.New(types.TINT32)
types.Runetype.Sym = s
s.Def = asTypesNode(typenod(types.Runetype))
asNode(s.Def).Name = new(Name)
s.Def = ir.AsTypesNode(typenod(types.Runetype))
ir.AsNode(s.Def).Name = new(ir.Name)
dowidth(types.Runetype)
// backend-dependent builtin types (e.g. int).
for _, s := range &typedefs {
s1 := builtinpkg.Lookup(s.name)
s1 := ir.BuiltinPkg.Lookup(s.name)
sameas := s.sameas32
if Widthptr == 8 {
@ -386,9 +384,9 @@ func lexinit1() {
t := types.New(s.etype)
t.Sym = s1
types.Types[s.etype] = t
s1.Def = asTypesNode(typenod(t))
asNode(s1.Def).Name = new(Name)
s1.Origpkg = builtinpkg
s1.Def = ir.AsTypesNode(typenod(t))
ir.AsNode(s1.Def).Name = new(ir.Name)
s1.Origpkg = ir.BuiltinPkg
dowidth(t)
}
@ -400,7 +398,7 @@ func finishUniverse() {
// that we silently skip symbols that are already declared in the
// package block rather than emitting a redeclared symbol error.
for _, s := range builtinpkg.Syms {
for _, s := range ir.BuiltinPkg.Syms {
if s.Def == nil {
continue
}
@ -413,8 +411,8 @@ func finishUniverse() {
s1.Block = s.Block
}
nodfp = newname(lookup(".fp"))
nodfp.Type = types.Types[TINT32]
nodfp.SetClass(PPARAM)
nodfp = NewName(lookup(".fp"))
nodfp.Type = types.Types[types.TINT32]
nodfp.SetClass(ir.PPARAM)
nodfp.Name.SetUsed(true)
}

View file

@ -4,12 +4,15 @@
package gc
import "cmd/compile/internal/base"
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
)
// evalunsafe evaluates a package unsafe operation and returns the result.
func evalunsafe(n *Node) int64 {
func evalunsafe(n *ir.Node) int64 {
switch n.Op {
case OALIGNOF, OSIZEOF:
case ir.OALIGNOF, ir.OSIZEOF:
n.Left = typecheck(n.Left, ctxExpr)
n.Left = defaultlit(n.Left, nil)
tr := n.Left.Type
@ -17,14 +20,14 @@ func evalunsafe(n *Node) int64 {
return 0
}
dowidth(tr)
if n.Op == OALIGNOF {
if n.Op == ir.OALIGNOF {
return int64(tr.Align)
}
return tr.Width
case OOFFSETOF:
case ir.OOFFSETOF:
// must be a selector.
if n.Left.Op != OXDOT {
if n.Left.Op != ir.OXDOT {
base.Errorf("invalid expression %v", n)
return 0
}
@ -40,9 +43,9 @@ func evalunsafe(n *Node) int64 {
return 0
}
switch n.Left.Op {
case ODOT, ODOTPTR:
case ir.ODOT, ir.ODOTPTR:
break
case OCALLPART:
case ir.OCALLPART:
base.Errorf("invalid expression %v: argument is a method value", n)
return 0
default:
@ -54,7 +57,7 @@ func evalunsafe(n *Node) int64 {
var v int64
for r := n.Left; r != sbase; r = r.Left {
switch r.Op {
case ODOTPTR:
case ir.ODOTPTR:
// For Offsetof(s.f), s may itself be a pointer,
// but accessing f must not otherwise involve
// indirection via embedded pointer types.
@ -63,10 +66,10 @@ func evalunsafe(n *Node) int64 {
return 0
}
fallthrough
case ODOT:
case ir.ODOT:
v += r.Xoffset
default:
Dump("unsafenmagic", n.Left)
ir.Dump("unsafenmagic", n.Left)
base.Fatalf("impossible %#v node after dot insertion", r.Op)
}
}

View file

@ -12,12 +12,6 @@ import (
"cmd/compile/internal/base"
)
// Line returns n's position as a string. If n has been inlined,
// it uses the outermost position where n has been inlined.
func (n *Node) Line() string {
return base.FmtPos(n.Pos)
}
var (
memprofilerate int64
traceHandler func(string)

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
package ir
type bitset8 uint8

View file

@ -1,6 +1,6 @@
// Code generated by "stringer -type=Class"; DO NOT EDIT.
package gc
package ir
import "strconv"

View file

@ -6,22 +6,23 @@
// for debugging purposes. The code is customized for Node graphs
// and may be used for an alternative view of the node structure.
package gc
package ir
import (
"cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
"io"
"os"
"reflect"
"regexp"
"cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
)
// dump is like fdump but prints to stderr.
func dump(root interface{}, filter string, depth int) {
fdump(os.Stderr, root, filter, depth)
func DumpAny(root interface{}, filter string, depth int) {
FDumpAny(os.Stderr, root, filter, depth)
}
// fdump prints the structure of a rooted data structure
@ -41,7 +42,7 @@ func dump(root interface{}, filter string, depth int) {
// rather than their type; struct fields with zero values or
// non-matching field names are omitted, and "…" means recursion
// depth has been reached or struct fields have been omitted.
func fdump(w io.Writer, root interface{}, filter string, depth int) {
func FDumpAny(w io.Writer, root interface{}, filter string, depth int) {
if root == nil {
fmt.Fprintln(w, "nil")
return
@ -151,7 +152,7 @@ func (p *dumper) dump(x reflect.Value, depth int) {
return
case *types.Node:
x = reflect.ValueOf(asNode(v))
x = reflect.ValueOf(AsNode(v))
}
switch x.Kind() {

View file

@ -2,13 +2,10 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
package ir
import (
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
"go/constant"
"io"
@ -16,6 +13,10 @@ import (
"strings"
"sync"
"unicode/utf8"
"cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/src"
)
// A FmtFlag value is a set of flags (or 0).
@ -98,7 +99,7 @@ func fmtFlag(s fmt.State, verb rune) FmtFlag {
// *types.Sym, *types.Type, and *Node types use the flags below to set the format mode
const (
FErr fmtMode = iota
FErr FmtMode = iota
FDbg
FTypeId
FTypeIdName // same as FTypeId, but use package name instead of prefix
@ -131,7 +132,7 @@ const (
// %- v type identifiers with package name instead of prefix (typesym, dcommontype, typehash)
// update returns the results of applying f to mode.
func (f FmtFlag) update(mode fmtMode) (FmtFlag, fmtMode) {
func (f FmtFlag) update(mode FmtMode) (FmtFlag, FmtMode) {
switch {
case f&FmtSign != 0:
mode = FDbg
@ -147,7 +148,7 @@ func (f FmtFlag) update(mode fmtMode) (FmtFlag, fmtMode) {
return f, mode
}
var goopnames = []string{
var OpNames = []string{
OADDR: "&",
OADD: "+",
OADDSTR: "+",
@ -217,7 +218,7 @@ func (o Op) GoString() string {
return fmt.Sprintf("%#v", o)
}
func (o Op) format(s fmt.State, verb rune, mode fmtMode) {
func (o Op) format(s fmt.State, verb rune, mode FmtMode) {
switch verb {
case 'v':
o.oconv(s, fmtFlag(s, verb), mode)
@ -227,10 +228,10 @@ func (o Op) format(s fmt.State, verb rune, mode fmtMode) {
}
}
func (o Op) oconv(s fmt.State, flag FmtFlag, mode fmtMode) {
func (o Op) oconv(s fmt.State, flag FmtFlag, mode FmtMode) {
if flag&FmtSharp != 0 || mode != FDbg {
if int(o) < len(goopnames) && goopnames[o] != "" {
fmt.Fprint(s, goopnames[o])
if int(o) < len(OpNames) && OpNames[o] != "" {
fmt.Fprint(s, OpNames[o])
return
}
}
@ -239,66 +240,73 @@ func (o Op) oconv(s fmt.State, flag FmtFlag, mode fmtMode) {
fmt.Fprint(s, o.String())
}
type fmtMode int
type FmtMode int
type fmtNode struct {
x *Node
m fmtMode
m FmtMode
}
func (f *fmtNode) Format(s fmt.State, verb rune) { f.x.format(s, verb, f.m) }
type fmtOp struct {
x Op
m fmtMode
m FmtMode
}
func (f *fmtOp) Format(s fmt.State, verb rune) { f.x.format(s, verb, f.m) }
type fmtType struct {
x *types.Type
m fmtMode
m FmtMode
}
func (f *fmtType) Format(s fmt.State, verb rune) { typeFormat(f.x, s, verb, f.m) }
type fmtSym struct {
x *types.Sym
m fmtMode
m FmtMode
}
func (f *fmtSym) Format(s fmt.State, verb rune) { symFormat(f.x, s, verb, f.m) }
type fmtNodes struct {
x Nodes
m fmtMode
m FmtMode
}
func (f *fmtNodes) Format(s fmt.State, verb rune) { f.x.format(s, verb, f.m) }
func (n *Node) Format(s fmt.State, verb rune) { n.format(s, verb, FErr) }
func (o Op) Format(s fmt.State, verb rune) { o.format(s, verb, FErr) }
func (n *Node) Format(s fmt.State, verb rune) {
FmtNode(n, s, verb)
}
func FmtNode(n *Node, s fmt.State, verb rune) {
n.format(s, verb, FErr)
}
func (o Op) Format(s fmt.State, verb rune) { o.format(s, verb, FErr) }
// func (t *types.Type) Format(s fmt.State, verb rune) // in package types
// func (y *types.Sym) Format(s fmt.State, verb rune) // in package types { y.format(s, verb, FErr) }
func (n Nodes) Format(s fmt.State, verb rune) { n.format(s, verb, FErr) }
func (m fmtMode) Fprintf(s fmt.State, format string, args ...interface{}) {
func (m FmtMode) Fprintf(s fmt.State, format string, args ...interface{}) {
m.prepareArgs(args)
fmt.Fprintf(s, format, args...)
}
func (m fmtMode) Sprintf(format string, args ...interface{}) string {
func (m FmtMode) Sprintf(format string, args ...interface{}) string {
m.prepareArgs(args)
return fmt.Sprintf(format, args...)
}
func (m fmtMode) Sprint(args ...interface{}) string {
func (m FmtMode) Sprint(args ...interface{}) string {
m.prepareArgs(args)
return fmt.Sprint(args...)
}
func (m fmtMode) prepareArgs(args []interface{}) {
func (m FmtMode) prepareArgs(args []interface{}) {
for i, arg := range args {
switch arg := arg.(type) {
case Op:
@ -319,13 +327,13 @@ func (m fmtMode) prepareArgs(args []interface{}) {
}
}
func (n *Node) format(s fmt.State, verb rune, mode fmtMode) {
func (n *Node) format(s fmt.State, verb rune, mode FmtMode) {
switch verb {
case 'v', 'S', 'L':
n.nconv(s, fmtFlag(s, verb), mode)
nconvFmt(n, s, fmtFlag(s, verb), mode)
case 'j':
n.jconv(s, fmtFlag(s, verb))
jconvFmt(n, s, fmtFlag(s, verb))
default:
fmt.Fprintf(s, "%%!%c(*Node=%p)", verb, n)
@ -336,7 +344,7 @@ func (n *Node) format(s fmt.State, verb rune, mode fmtMode) {
var EscFmt func(n *Node, short bool) string
// *Node details
func (n *Node) jconv(s fmt.State, flag FmtFlag) {
func jconvFmt(n *Node, s fmt.State, flag FmtFlag) {
short := flag&FmtShort != 0
// Useful to see which nodes in an AST printout are actually identical
@ -363,7 +371,7 @@ func (n *Node) jconv(s fmt.State, flag FmtFlag) {
fmt.Fprintf(s, " l(%s%d)", pfx, n.Pos.Line())
}
if !short && n.Xoffset != BADWIDTH {
if !short && n.Xoffset != types.BADWIDTH {
fmt.Fprintf(s, " x(%d)", n.Xoffset)
}
@ -430,7 +438,7 @@ func (n *Node) jconv(s fmt.State, flag FmtFlag) {
}
}
func vconv(v constant.Value, flag FmtFlag) string {
func FmtConst(v constant.Value, flag FmtFlag) string {
if flag&FmtSharp == 0 && v.Kind() == constant.Complex {
real, imag := constant.Real(v), constant.Imag(v)
@ -473,17 +481,17 @@ s%^ ........*\]%&~%g
s%~ %%g
*/
func symfmt(b *bytes.Buffer, s *types.Sym, flag FmtFlag, mode fmtMode) {
func symfmt(b *bytes.Buffer, s *types.Sym, flag FmtFlag, mode FmtMode) {
if flag&FmtShort == 0 {
switch mode {
case FErr: // This is for the user
if s.Pkg == builtinpkg || s.Pkg == localpkg {
if s.Pkg == BuiltinPkg || s.Pkg == LocalPkg {
b.WriteString(s.Name)
return
}
// If the name was used by multiple packages, display the full path,
if s.Pkg.Name != "" && numImport[s.Pkg.Name] > 1 {
if s.Pkg.Name != "" && NumImport[s.Pkg.Name] > 1 {
fmt.Fprintf(b, "%q.%s", s.Pkg.Path, s.Name)
return
}
@ -534,28 +542,28 @@ func symfmt(b *bytes.Buffer, s *types.Sym, flag FmtFlag, mode fmtMode) {
b.WriteString(s.Name)
}
var basicnames = []string{
TINT: "int",
TUINT: "uint",
TINT8: "int8",
TUINT8: "uint8",
TINT16: "int16",
TUINT16: "uint16",
TINT32: "int32",
TUINT32: "uint32",
TINT64: "int64",
TUINT64: "uint64",
TUINTPTR: "uintptr",
TFLOAT32: "float32",
TFLOAT64: "float64",
TCOMPLEX64: "complex64",
TCOMPLEX128: "complex128",
TBOOL: "bool",
TANY: "any",
TSTRING: "string",
TNIL: "nil",
TIDEAL: "untyped number",
TBLANK: "blank",
var BasicTypeNames = []string{
types.TINT: "int",
types.TUINT: "uint",
types.TINT8: "int8",
types.TUINT8: "uint8",
types.TINT16: "int16",
types.TUINT16: "uint16",
types.TINT32: "int32",
types.TUINT32: "uint32",
types.TINT64: "int64",
types.TUINT64: "uint64",
types.TUINTPTR: "uintptr",
types.TFLOAT32: "float32",
types.TFLOAT64: "float64",
types.TCOMPLEX64: "complex64",
types.TCOMPLEX128: "complex128",
types.TBOOL: "bool",
types.TANY: "any",
types.TSTRING: "string",
types.TNIL: "nil",
types.TIDEAL: "untyped number",
types.TBLANK: "blank",
}
var fmtBufferPool = sync.Pool{
@ -564,7 +572,7 @@ var fmtBufferPool = sync.Pool{
},
}
func tconv(t *types.Type, flag FmtFlag, mode fmtMode) string {
func tconv(t *types.Type, flag FmtFlag, mode FmtMode) string {
buf := fmtBufferPool.Get().(*bytes.Buffer)
buf.Reset()
defer fmtBufferPool.Put(buf)
@ -577,7 +585,7 @@ func tconv(t *types.Type, flag FmtFlag, mode fmtMode) string {
// flag and mode control exactly what is printed.
// Any types x that are already in the visited map get printed as @%d where %d=visited[x].
// See #16897 before changing the implementation of tconv.
func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited map[*types.Type]int) {
func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode FmtMode, visited map[*types.Type]int) {
if off, ok := visited[t]; ok {
// We've seen this type before, so we're trying to print it recursively.
// Print a reference to it instead.
@ -648,7 +656,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
return
}
if t.Sym.Pkg == localpkg && t.Vargen != 0 {
if t.Sym.Pkg == LocalPkg && t.Vargen != 0 {
b.WriteString(mode.Sprintf("%v·%d", t.Sym, t.Vargen))
return
}
@ -658,7 +666,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
return
}
if int(t.Etype) < len(basicnames) && basicnames[t.Etype] != "" {
if int(t.Etype) < len(BasicTypeNames) && BasicTypeNames[t.Etype] != "" {
var name string
switch t {
case types.UntypedBool:
@ -674,7 +682,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
case types.UntypedComplex:
name = "untyped complex"
default:
name = basicnames[t.Etype]
name = BasicTypeNames[t.Etype]
}
b.WriteString(name)
return
@ -701,7 +709,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
defer delete(visited, t)
switch t.Etype {
case TPTR:
case types.TPTR:
b.WriteByte('*')
switch mode {
case FTypeId, FTypeIdName:
@ -712,17 +720,17 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
}
tconv2(b, t.Elem(), 0, mode, visited)
case TARRAY:
case types.TARRAY:
b.WriteByte('[')
b.WriteString(strconv.FormatInt(t.NumElem(), 10))
b.WriteByte(']')
tconv2(b, t.Elem(), 0, mode, visited)
case TSLICE:
case types.TSLICE:
b.WriteString("[]")
tconv2(b, t.Elem(), 0, mode, visited)
case TCHAN:
case types.TCHAN:
switch t.ChanDir() {
case types.Crecv:
b.WriteString("<-chan ")
@ -741,13 +749,13 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
}
}
case TMAP:
case types.TMAP:
b.WriteString("map[")
tconv2(b, t.Key(), 0, mode, visited)
b.WriteByte(']')
tconv2(b, t.Elem(), 0, mode, visited)
case TINTER:
case types.TINTER:
if t.IsEmptyInterface() {
b.WriteString("interface {}")
break
@ -779,7 +787,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
}
b.WriteByte('}')
case TFUNC:
case types.TFUNC:
if flag&FmtShort != 0 {
// no leading func
} else {
@ -805,7 +813,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
tconv2(b, t.Results(), 0, mode, visited)
}
case TSTRUCT:
case types.TSTRUCT:
if m := t.StructType().Map; m != nil {
mt := m.MapType()
// Format the bucket struct for map[x]y as map.bucket[x]y.
@ -856,17 +864,17 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
b.WriteByte('}')
}
case TFORW:
case types.TFORW:
b.WriteString("undefined")
if t.Sym != nil {
b.WriteByte(' ')
sconv2(b, t.Sym, 0, mode)
}
case TUNSAFEPTR:
case types.TUNSAFEPTR:
b.WriteString("unsafe.Pointer")
case Txxx:
case types.Txxx:
b.WriteString("Txxx")
default:
// Don't know how to handle - fall back to detailed prints.
@ -875,7 +883,7 @@ func tconv2(b *bytes.Buffer, t *types.Type, flag FmtFlag, mode fmtMode, visited
}
// Statements which may be rendered with a simplestmt as init.
func stmtwithinit(op Op) bool {
func StmtWithInit(op Op) bool {
switch op {
case OIF, OFOR, OFORUNTIL, OSWITCH:
return true
@ -884,20 +892,20 @@ func stmtwithinit(op Op) bool {
return false
}
func (n *Node) stmtfmt(s fmt.State, mode fmtMode) {
func stmtFmt(n *Node, s fmt.State, mode FmtMode) {
// some statements allow for an init, but at most one,
// but we may have an arbitrary number added, eg by typecheck
// and inlining. If it doesn't fit the syntax, emit an enclosing
// block starting with the init statements.
// if we can just say "for" n->ninit; ... then do so
simpleinit := n.Ninit.Len() == 1 && n.Ninit.First().Ninit.Len() == 0 && stmtwithinit(n.Op)
simpleinit := n.Ninit.Len() == 1 && n.Ninit.First().Ninit.Len() == 0 && StmtWithInit(n.Op)
// otherwise, print the inits as separate statements
complexinit := n.Ninit.Len() != 0 && !simpleinit && (mode != FErr)
// but if it was for if/for/switch, put in an extra surrounding block to limit the scope
extrablock := complexinit && stmtwithinit(n.Op)
extrablock := complexinit && StmtWithInit(n.Op)
if extrablock {
fmt.Fprint(s, "{")
@ -1064,7 +1072,7 @@ func (n *Node) stmtfmt(s fmt.State, mode fmtMode) {
}
}
var opprec = []int{
var OpPrec = []int{
OALIGNOF: 8,
OAPPEND: 8,
OBYTES2STR: 8,
@ -1184,7 +1192,7 @@ var opprec = []int{
OEND: 0,
}
func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
func exprFmt(n *Node, s fmt.State, prec int, mode FmtMode) {
for n != nil && n.Implicit() && (n.Op == ODEREF || n.Op == OADDR) {
n = n.Left
}
@ -1194,7 +1202,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
return
}
nprec := opprec[n.Op]
nprec := OpPrec[n.Op]
if n.Op == OTYPE && n.Sym != nil {
nprec = 8
}
@ -1214,7 +1222,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
case OLITERAL: // this is a bit of a mess
if mode == FErr {
if n.Orig != nil && n.Orig != n {
n.Orig.exprfmt(s, prec, mode)
exprFmt(n.Orig, s, prec, mode)
return
}
if n.Sym != nil {
@ -1252,7 +1260,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
fmt.Fprintf(s, "'\\U%08x'", uint64(x))
}
} else {
fmt.Fprint(s, vconv(n.Val(), fmtFlag(s, 'v')))
fmt.Fprint(s, FmtConst(n.Val(), fmtFlag(s, 'v')))
}
if needUnparen {
@ -1369,7 +1377,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
mode.Fprintf(s, "%v:%v", n.Sym, n.Left)
case OCALLPART:
n.Left.exprfmt(s, nprec, mode)
exprFmt(n.Left, s, nprec, mode)
if n.Right == nil || n.Right.Sym == nil {
fmt.Fprint(s, ".<nil>")
return
@ -1377,7 +1385,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
mode.Fprintf(s, ".%0S", n.Right.Sym)
case OXDOT, ODOT, ODOTPTR, ODOTINTER, ODOTMETH:
n.Left.exprfmt(s, nprec, mode)
exprFmt(n.Left, s, nprec, mode)
if n.Sym == nil {
fmt.Fprint(s, ".<nil>")
return
@ -1385,7 +1393,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
mode.Fprintf(s, ".%0S", n.Sym)
case ODOTTYPE, ODOTTYPE2:
n.Left.exprfmt(s, nprec, mode)
exprFmt(n.Left, s, nprec, mode)
if n.Right != nil {
mode.Fprintf(s, ".(%v)", n.Right)
return
@ -1393,24 +1401,24 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
mode.Fprintf(s, ".(%v)", n.Type)
case OINDEX, OINDEXMAP:
n.Left.exprfmt(s, nprec, mode)
exprFmt(n.Left, s, nprec, mode)
mode.Fprintf(s, "[%v]", n.Right)
case OSLICE, OSLICESTR, OSLICEARR, OSLICE3, OSLICE3ARR:
n.Left.exprfmt(s, nprec, mode)
exprFmt(n.Left, s, nprec, mode)
fmt.Fprint(s, "[")
low, high, max := n.SliceBounds()
if low != nil {
fmt.Fprint(s, low.modeString(mode))
fmt.Fprint(s, modeString(low, mode))
}
fmt.Fprint(s, ":")
if high != nil {
fmt.Fprint(s, high.modeString(mode))
fmt.Fprint(s, modeString(high, mode))
}
if n.Op.IsSlice3() {
fmt.Fprint(s, ":")
if max != nil {
fmt.Fprint(s, max.modeString(mode))
fmt.Fprint(s, modeString(max, mode))
}
}
fmt.Fprint(s, "]")
@ -1474,7 +1482,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
mode.Fprintf(s, "%#v(%.v)", n.Op, n.List)
case OCALL, OCALLFUNC, OCALLINTER, OCALLMETH, OGETG:
n.Left.exprfmt(s, nprec, mode)
exprFmt(n.Left, s, nprec, mode)
if n.IsDDD() {
mode.Fprintf(s, "(%.v...)", n.List)
return
@ -1505,7 +1513,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
if n.Left != nil && n.Left.Op == n.Op {
fmt.Fprint(s, " ")
}
n.Left.exprfmt(s, nprec+1, mode)
exprFmt(n.Left, s, nprec+1, mode)
// Binary
case OADD,
@ -1528,16 +1536,16 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
OSEND,
OSUB,
OXOR:
n.Left.exprfmt(s, nprec, mode)
exprFmt(n.Left, s, nprec, mode)
mode.Fprintf(s, " %#v ", n.Op)
n.Right.exprfmt(s, nprec+1, mode)
exprFmt(n.Right, s, nprec+1, mode)
case OADDSTR:
for i, n1 := range n.List.Slice() {
if i != 0 {
fmt.Fprint(s, " + ")
}
n1.exprfmt(s, nprec, mode)
exprFmt(n1, s, nprec, mode)
}
case ODDD:
mode.Fprintf(s, "...")
@ -1546,7 +1554,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
}
}
func (n *Node) nodefmt(s fmt.State, flag FmtFlag, mode fmtMode) {
func nodeFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) {
t := n.Type
// We almost always want the original.
@ -1556,7 +1564,7 @@ func (n *Node) nodefmt(s fmt.State, flag FmtFlag, mode fmtMode) {
}
if flag&FmtLong != 0 && t != nil {
if t.Etype == TNIL {
if t.Etype == types.TNIL {
fmt.Fprint(s, "nil")
} else if n.Op == ONAME && n.Name.AutoTemp() {
mode.Fprintf(s, "%v value", t)
@ -1568,15 +1576,15 @@ func (n *Node) nodefmt(s fmt.State, flag FmtFlag, mode fmtMode) {
// TODO inlining produces expressions with ninits. we can't print these yet.
if opprec[n.Op] < 0 {
n.stmtfmt(s, mode)
if OpPrec[n.Op] < 0 {
stmtFmt(n, s, mode)
return
}
n.exprfmt(s, 0, mode)
exprFmt(n, s, 0, mode)
}
func (n *Node) nodedump(s fmt.State, flag FmtFlag, mode fmtMode) {
func nodeDumpFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) {
recur := flag&FmtShort == 0
if recur {
@ -1647,7 +1655,7 @@ func (n *Node) nodedump(s fmt.State, flag FmtFlag, mode fmtMode) {
if n.Op == ODCLFUNC && n.Func != nil && n.Func.Dcl != nil && len(n.Func.Dcl) != 0 {
indent(s)
// The dcls for a func or closure
mode.Fprintf(s, "%v-dcl%v", n.Op, asNodes(n.Func.Dcl))
mode.Fprintf(s, "%v-dcl%v", n.Op, AsNodes(n.Func.Dcl))
}
if n.List.Len() != 0 {
indent(s)
@ -1667,7 +1675,7 @@ func (n *Node) nodedump(s fmt.State, flag FmtFlag, mode fmtMode) {
}
// "%S" suppresses qualifying with package
func symFormat(s *types.Sym, f fmt.State, verb rune, mode fmtMode) {
func symFormat(s *types.Sym, f fmt.State, verb rune, mode FmtMode) {
switch verb {
case 'v', 'S':
fmt.Fprint(f, sconv(s, fmtFlag(f, verb), mode))
@ -1677,10 +1685,10 @@ func symFormat(s *types.Sym, f fmt.State, verb rune, mode fmtMode) {
}
}
func smodeString(s *types.Sym, mode fmtMode) string { return sconv(s, 0, mode) }
func smodeString(s *types.Sym, mode FmtMode) string { return sconv(s, 0, mode) }
// See #16897 before changing the implementation of sconv.
func sconv(s *types.Sym, flag FmtFlag, mode fmtMode) string {
func sconv(s *types.Sym, flag FmtFlag, mode FmtMode) string {
if flag&FmtLong != 0 {
panic("linksymfmt")
}
@ -1701,7 +1709,7 @@ func sconv(s *types.Sym, flag FmtFlag, mode fmtMode) string {
return types.InternString(buf.Bytes())
}
func sconv2(b *bytes.Buffer, s *types.Sym, flag FmtFlag, mode fmtMode) {
func sconv2(b *bytes.Buffer, s *types.Sym, flag FmtFlag, mode FmtMode) {
if flag&FmtLong != 0 {
panic("linksymfmt")
}
@ -1718,7 +1726,7 @@ func sconv2(b *bytes.Buffer, s *types.Sym, flag FmtFlag, mode fmtMode) {
symfmt(b, s, flag, mode)
}
func fldconv(b *bytes.Buffer, f *types.Field, flag FmtFlag, mode fmtMode, visited map[*types.Type]int, funarg types.Funarg) {
func fldconv(b *bytes.Buffer, f *types.Field, flag FmtFlag, mode FmtMode, visited map[*types.Type]int, funarg types.Funarg) {
if f == nil {
b.WriteString("<T>")
return
@ -1734,12 +1742,12 @@ func fldconv(b *bytes.Buffer, f *types.Field, flag FmtFlag, mode fmtMode, visite
// Take the name from the original.
if mode == FErr {
s = origSym(s)
s = OrigSym(s)
}
if s != nil && f.Embedded == 0 {
if funarg != types.FunargNone {
name = asNode(f.Nname).modeString(mode)
name = modeString(AsNode(f.Nname), mode)
} else if flag&FmtLong != 0 {
name = mode.Sprintf("%0S", s)
if !types.IsExported(name) && flag&FmtUnsigned == 0 {
@ -1775,7 +1783,7 @@ func fldconv(b *bytes.Buffer, f *types.Field, flag FmtFlag, mode fmtMode, visite
// "%L" print definition, not name
// "%S" omit 'func' and receiver from function types, short type names
func typeFormat(t *types.Type, s fmt.State, verb rune, mode fmtMode) {
func typeFormat(t *types.Type, s fmt.State, verb rune, mode FmtMode) {
switch verb {
case 'v', 'S', 'L':
fmt.Fprint(s, tconv(t, fmtFlag(s, verb), mode))
@ -1784,12 +1792,12 @@ func typeFormat(t *types.Type, s fmt.State, verb rune, mode fmtMode) {
}
}
func (n *Node) String() string { return fmt.Sprint(n) }
func (n *Node) modeString(mode fmtMode) string { return mode.Sprint(n) }
func (n *Node) String() string { return fmt.Sprint(n) }
func modeString(n *Node, mode FmtMode) string { return mode.Sprint(n) }
// "%L" suffix with "(type %T)" where possible
// "%+S" in debug mode, don't recurse, no multiline output
func (n *Node) nconv(s fmt.State, flag FmtFlag, mode fmtMode) {
func nconvFmt(n *Node, s fmt.State, flag FmtFlag, mode FmtMode) {
if n == nil {
fmt.Fprint(s, "<N>")
return
@ -1799,11 +1807,11 @@ func (n *Node) nconv(s fmt.State, flag FmtFlag, mode fmtMode) {
switch mode {
case FErr:
n.nodefmt(s, flag, mode)
nodeFmt(n, s, flag, mode)
case FDbg:
dumpdepth++
n.nodedump(s, flag, mode)
nodeDumpFmt(n, s, flag, mode)
dumpdepth--
default:
@ -1811,7 +1819,7 @@ func (n *Node) nconv(s fmt.State, flag FmtFlag, mode fmtMode) {
}
}
func (l Nodes) format(s fmt.State, verb rune, mode fmtMode) {
func (l Nodes) format(s fmt.State, verb rune, mode FmtMode) {
switch verb {
case 'v':
l.hconv(s, fmtFlag(s, verb), mode)
@ -1826,7 +1834,7 @@ func (n Nodes) String() string {
}
// Flags: all those of %N plus '.': separate with comma's instead of semicolons.
func (l Nodes) hconv(s fmt.State, flag FmtFlag, mode fmtMode) {
func (l Nodes) hconv(s fmt.State, flag FmtFlag, mode FmtMode) {
if l.Len() == 0 && mode == FDbg {
fmt.Fprint(s, "<nil>")
return
@ -1841,18 +1849,18 @@ func (l Nodes) hconv(s fmt.State, flag FmtFlag, mode fmtMode) {
}
for i, n := range l.Slice() {
fmt.Fprint(s, n.modeString(mode))
fmt.Fprint(s, modeString(n, mode))
if i+1 < l.Len() {
fmt.Fprint(s, sep)
}
}
}
func dumplist(s string, l Nodes) {
func DumpList(s string, l Nodes) {
fmt.Printf("%s%+v\n", s, l)
}
func fdumplist(w io.Writer, s string, l Nodes) {
func FDumpList(w io.Writer, s string, l Nodes) {
fmt.Fprintf(w, "%s%+v\n", s, l)
}
@ -1877,3 +1885,30 @@ func ellipsisIf(b bool) string {
}
return ""
}
// numImport tracks how often a package with a given name is imported.
// It is used to provide a better error message (by using the package
// path to disambiguate) if a package that appears multiple times with
// the same name appears in an error message.
var NumImport = make(map[string]int)
func InstallTypeFormats() {
types.Sconv = func(s *types.Sym, flag, mode int) string {
return sconv(s, FmtFlag(flag), FmtMode(mode))
}
types.Tconv = func(t *types.Type, flag, mode int) string {
return tconv(t, FmtFlag(flag), FmtMode(mode))
}
types.FormatSym = func(sym *types.Sym, s fmt.State, verb rune, mode int) {
symFormat(sym, s, verb, FmtMode(mode))
}
types.FormatType = func(t *types.Type, s fmt.State, verb rune, mode int) {
typeFormat(t, s, verb, FmtMode(mode))
}
}
// Line returns n's position as a string. If n has been inlined,
// it uses the outermost position where n has been inlined.
func Line(n *Node) string {
return base.FmtPos(n.Pos)
}

View file

@ -0,0 +1,12 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ir
import "cmd/compile/internal/types"
var LocalPkg *types.Pkg // package being compiled
// builtinpkg is a fake package that declares the universe block.
var BuiltinPkg *types.Pkg

View file

@ -4,17 +4,20 @@
// “Abstract” syntax representation.
package gc
package ir
import (
"go/constant"
"sort"
"strings"
"unsafe"
"cmd/compile/internal/base"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/objabi"
"cmd/internal/src"
"go/constant"
"sort"
)
// A Node is a single node in the syntax tree.
@ -290,7 +293,7 @@ func (n *Node) SetVal(v constant.Value) {
base.Fatalf("have Opt")
}
if n.Op == OLITERAL {
assertRepresents(n.Type, v)
AssertValidTypeForConst(n.Type, v)
}
n.SetHasVal(true)
n.E = &v
@ -333,7 +336,7 @@ func (n *Node) SetIota(x int64) {
// mayBeShared reports whether n may occur in multiple places in the AST.
// Extra care must be taken when mutating such a node.
func (n *Node) mayBeShared() bool {
func MayBeShared(n *Node) bool {
switch n.Op {
case ONAME, OLITERAL, ONIL, OTYPE:
return true
@ -342,7 +345,7 @@ func (n *Node) mayBeShared() bool {
}
// funcname returns the name (without the package) of the function n.
func (n *Node) funcname() string {
func FuncName(n *Node) string {
if n == nil || n.Func == nil || n.Func.Nname == nil {
return "<nil>"
}
@ -353,7 +356,7 @@ func (n *Node) funcname() string {
// This differs from the compiler's internal convention where local functions lack a package
// because the ultimate consumer of this is a human looking at an IDE; package is only empty
// if the compilation package is actually the empty string.
func (n *Node) pkgFuncName() string {
func PkgFuncName(n *Node) string {
var s *types.Sym
if n == nil {
return "<nil>"
@ -681,7 +684,7 @@ type Func struct {
FieldTrack map[*types.Sym]struct{}
DebugInfo *ssa.FuncDebug
lsym *obj.LSym
LSym *obj.LSym
Inl *Inline
@ -693,13 +696,13 @@ type Func struct {
Pragma PragmaFlag // go:xxx function annotations
flags bitset16
numDefers int // number of defer calls in the function
numReturns int // number of explicit returns in the function
NumDefers int // number of defer calls in the function
NumReturns int // number of explicit returns in the function
// nwbrCalls records the LSyms of functions called by this
// function for go:nowritebarrierrec analysis. Only filled in
// if nowritebarrierrecCheck != nil.
nwbrCalls *[]nowritebarrierrecCallSym
NWBRCalls *[]SymAndPos
}
// An Inline holds fields used for function bodies that can be inlined.
@ -764,7 +767,7 @@ func (f *Func) SetExportInline(b bool) { f.flags.set(funcExportInlin
func (f *Func) SetInstrumentBody(b bool) { f.flags.set(funcInstrumentBody, b) }
func (f *Func) SetOpenCodedDeferDisallowed(b bool) { f.flags.set(funcOpenCodedDeferDisallowed, b) }
func (f *Func) setWBPos(pos src.XPos) {
func (f *Func) SetWBPos(pos src.XPos) {
if base.Debug.WB != 0 {
base.WarnfAt(pos, "write barrier")
}
@ -996,7 +999,7 @@ const (
type Nodes struct{ slice *[]*Node }
// asNodes returns a slice of *Node as a Nodes value.
func asNodes(s []*Node) Nodes {
func AsNodes(s []*Node) Nodes {
return Nodes{&s}
}
@ -1136,38 +1139,38 @@ func (n *Nodes) AppendNodes(n2 *Nodes) {
// inspect invokes f on each node in an AST in depth-first order.
// If f(n) returns false, inspect skips visiting n's children.
func inspect(n *Node, f func(*Node) bool) {
func Inspect(n *Node, f func(*Node) bool) {
if n == nil || !f(n) {
return
}
inspectList(n.Ninit, f)
inspect(n.Left, f)
inspect(n.Right, f)
inspectList(n.List, f)
inspectList(n.Nbody, f)
inspectList(n.Rlist, f)
InspectList(n.Ninit, f)
Inspect(n.Left, f)
Inspect(n.Right, f)
InspectList(n.List, f)
InspectList(n.Nbody, f)
InspectList(n.Rlist, f)
}
func inspectList(l Nodes, f func(*Node) bool) {
func InspectList(l Nodes, f func(*Node) bool) {
for _, n := range l.Slice() {
inspect(n, f)
Inspect(n, f)
}
}
// nodeQueue is a FIFO queue of *Node. The zero value of nodeQueue is
// a ready-to-use empty queue.
type nodeQueue struct {
type NodeQueue struct {
ring []*Node
head, tail int
}
// empty reports whether q contains no Nodes.
func (q *nodeQueue) empty() bool {
func (q *NodeQueue) Empty() bool {
return q.head == q.tail
}
// pushRight appends n to the right of the queue.
func (q *nodeQueue) pushRight(n *Node) {
func (q *NodeQueue) PushRight(n *Node) {
if len(q.ring) == 0 {
q.ring = make([]*Node, 16)
} else if q.head+len(q.ring) == q.tail {
@ -1191,8 +1194,8 @@ func (q *nodeQueue) pushRight(n *Node) {
// popLeft pops a node from the left of the queue. It panics if q is
// empty.
func (q *nodeQueue) popLeft() *Node {
if q.empty() {
func (q *NodeQueue) PopLeft() *Node {
if q.Empty() {
panic("dequeue empty")
}
n := q.ring[q.head%len(q.ring)]
@ -1226,3 +1229,342 @@ func (s NodeSet) Sorted(less func(*Node, *Node) bool) []*Node {
sort.Slice(res, func(i, j int) bool { return less(res[i], res[j]) })
return res
}
func Nod(op Op, nleft, nright *Node) *Node {
return NodAt(base.Pos, op, nleft, nright)
}
func NodAt(pos src.XPos, op Op, nleft, nright *Node) *Node {
var n *Node
switch op {
case ODCLFUNC:
var x struct {
n Node
f Func
}
n = &x.n
n.Func = &x.f
n.Func.Decl = n
case ONAME:
base.Fatalf("use newname instead")
case OLABEL, OPACK:
var x struct {
n Node
m Name
}
n = &x.n
n.Name = &x.m
default:
n = new(Node)
}
n.Op = op
n.Left = nleft
n.Right = nright
n.Pos = pos
n.Xoffset = types.BADWIDTH
n.Orig = n
return n
}
// newnamel returns a new ONAME Node associated with symbol s at position pos.
// The caller is responsible for setting n.Name.Curfn.
func NewNameAt(pos src.XPos, s *types.Sym) *Node {
if s == nil {
base.Fatalf("newnamel nil")
}
var x struct {
n Node
m Name
p Param
}
n := &x.n
n.Name = &x.m
n.Name.Param = &x.p
n.Op = ONAME
n.Pos = pos
n.Orig = n
n.Sym = s
return n
}
// The Class of a variable/function describes the "storage class"
// of a variable or function. During parsing, storage classes are
// called declaration contexts.
type Class uint8
//go:generate stringer -type=Class
const (
Pxxx Class = iota // no class; used during ssa conversion to indicate pseudo-variables
PEXTERN // global variables
PAUTO // local variables
PAUTOHEAP // local variables or parameters moved to heap
PPARAM // input arguments
PPARAMOUT // output results
PFUNC // global functions
// Careful: Class is stored in three bits in Node.flags.
_ = uint((1 << 3) - iota) // static assert for iota <= (1 << 3)
)
type PragmaFlag int16
const (
// Func pragmas.
Nointerface PragmaFlag = 1 << iota
Noescape // func parameters don't escape
Norace // func must not have race detector annotations
Nosplit // func should not execute on separate stack
Noinline // func should not be inlined
NoCheckPtr // func should not be instrumented by checkptr
CgoUnsafeArgs // treat a pointer to one arg as a pointer to them all
UintptrEscapes // pointers converted to uintptr escape
// Runtime-only func pragmas.
// See ../../../../runtime/README.md for detailed descriptions.
Systemstack // func must run on system stack
Nowritebarrier // emit compiler error instead of write barrier
Nowritebarrierrec // error on write barrier in this or recursive callees
Yeswritebarrierrec // cancels Nowritebarrierrec in this function and callees
// Runtime and cgo type pragmas
NotInHeap // values of this type must not be heap allocated
// Go command pragmas
GoBuildPragma
)
type SymAndPos struct {
Sym *obj.LSym // LSym of callee
Pos src.XPos // line of call
}
func AsNode(n *types.Node) *Node { return (*Node)(unsafe.Pointer(n)) }
func AsTypesNode(n *Node) *types.Node { return (*types.Node)(unsafe.Pointer(n)) }
var BlankNode *Node
// origSym returns the original symbol written by the user.
func OrigSym(s *types.Sym) *types.Sym {
if s == nil {
return nil
}
if len(s.Name) > 1 && s.Name[0] == '~' {
switch s.Name[1] {
case 'r': // originally an unnamed result
return nil
case 'b': // originally the blank identifier _
// TODO(mdempsky): Does s.Pkg matter here?
return BlankNode.Sym
}
return s
}
if strings.HasPrefix(s.Name, ".anon") {
// originally an unnamed or _ name (see subr.go: structargs)
return nil
}
return s
}
// SliceBounds returns n's slice bounds: low, high, and max in expr[low:high:max].
// n must be a slice expression. max is nil if n is a simple slice expression.
func (n *Node) SliceBounds() (low, high, max *Node) {
if n.List.Len() == 0 {
return nil, nil, nil
}
switch n.Op {
case OSLICE, OSLICEARR, OSLICESTR:
s := n.List.Slice()
return s[0], s[1], nil
case OSLICE3, OSLICE3ARR:
s := n.List.Slice()
return s[0], s[1], s[2]
}
base.Fatalf("SliceBounds op %v: %v", n.Op, n)
return nil, nil, nil
}
// SetSliceBounds sets n's slice bounds, where n is a slice expression.
// n must be a slice expression. If max is non-nil, n must be a full slice expression.
func (n *Node) SetSliceBounds(low, high, max *Node) {
switch n.Op {
case OSLICE, OSLICEARR, OSLICESTR:
if max != nil {
base.Fatalf("SetSliceBounds %v given three bounds", n.Op)
}
s := n.List.Slice()
if s == nil {
if low == nil && high == nil {
return
}
n.List.Set2(low, high)
return
}
s[0] = low
s[1] = high
return
case OSLICE3, OSLICE3ARR:
s := n.List.Slice()
if s == nil {
if low == nil && high == nil && max == nil {
return
}
n.List.Set3(low, high, max)
return
}
s[0] = low
s[1] = high
s[2] = max
return
}
base.Fatalf("SetSliceBounds op %v: %v", n.Op, n)
}
// IsSlice3 reports whether o is a slice3 op (OSLICE3, OSLICE3ARR).
// o must be a slicing op.
func (o Op) IsSlice3() bool {
switch o {
case OSLICE, OSLICEARR, OSLICESTR:
return false
case OSLICE3, OSLICE3ARR:
return true
}
base.Fatalf("IsSlice3 op %v", o)
return false
}
func IsConst(n *Node, ct constant.Kind) bool {
return ConstType(n) == ct
}
// Int64Val returns n as an int64.
// n must be an integer or rune constant.
func (n *Node) Int64Val() int64 {
if !IsConst(n, constant.Int) {
base.Fatalf("Int64Val(%v)", n)
}
x, ok := constant.Int64Val(n.Val())
if !ok {
base.Fatalf("Int64Val(%v)", n)
}
return x
}
// CanInt64 reports whether it is safe to call Int64Val() on n.
func (n *Node) CanInt64() bool {
if !IsConst(n, constant.Int) {
return false
}
// if the value inside n cannot be represented as an int64, the
// return value of Int64 is undefined
_, ok := constant.Int64Val(n.Val())
return ok
}
// Uint64Val returns n as an uint64.
// n must be an integer or rune constant.
func (n *Node) Uint64Val() uint64 {
if !IsConst(n, constant.Int) {
base.Fatalf("Uint64Val(%v)", n)
}
x, ok := constant.Uint64Val(n.Val())
if !ok {
base.Fatalf("Uint64Val(%v)", n)
}
return x
}
// BoolVal returns n as a bool.
// n must be a boolean constant.
func (n *Node) BoolVal() bool {
if !IsConst(n, constant.Bool) {
base.Fatalf("BoolVal(%v)", n)
}
return constant.BoolVal(n.Val())
}
// StringVal returns the value of a literal string Node as a string.
// n must be a string constant.
func (n *Node) StringVal() string {
if !IsConst(n, constant.String) {
base.Fatalf("StringVal(%v)", n)
}
return constant.StringVal(n.Val())
}
// rawcopy returns a shallow copy of n.
// Note: copy or sepcopy (rather than rawcopy) is usually the
// correct choice (see comment with Node.copy, below).
func (n *Node) RawCopy() *Node {
copy := *n
return &copy
}
// sepcopy returns a separate shallow copy of n, with the copy's
// Orig pointing to itself.
func SepCopy(n *Node) *Node {
copy := *n
copy.Orig = &copy
return &copy
}
// copy returns shallow copy of n and adjusts the copy's Orig if
// necessary: In general, if n.Orig points to itself, the copy's
// Orig should point to itself as well. Otherwise, if n is modified,
// the copy's Orig node appears modified, too, and then doesn't
// represent the original node anymore.
// (This caused the wrong complit Op to be used when printing error
// messages; see issues #26855, #27765).
func Copy(n *Node) *Node {
copy := *n
if n.Orig == n {
copy.Orig = &copy
}
return &copy
}
// isNil reports whether n represents the universal untyped zero value "nil".
func IsNil(n *Node) bool {
// Check n.Orig because constant propagation may produce typed nil constants,
// which don't exist in the Go spec.
return n.Orig.Op == ONIL
}
func IsBlank(n *Node) bool {
if n == nil {
return false
}
return n.Sym.IsBlank()
}
// IsMethod reports whether n is a method.
// n must be a function or a method.
func IsMethod(n *Node) bool {
return n.Type.Recv() != nil
}
func (n *Node) Typ() *types.Type {
return n.Type
}
func (n *Node) StorageClass() ssa.StorageClass {
switch n.Class() {
case PPARAM:
return ssa.ClassParam
case PPARAMOUT:
return ssa.ClassParamOut
case PAUTO:
return ssa.ClassAuto
default:
base.Fatalf("untranslatable storage class for %v: %s", n, n.Class())
return 0
}
}

View file

@ -1,6 +1,6 @@
// Code generated by "stringer -type=Op -trimprefix=O"; DO NOT EDIT.
package gc
package ir
import "strconv"

View file

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
package ir
import (
"reflect"

View file

@ -0,0 +1,120 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ir
import (
"go/constant"
"math"
"cmd/compile/internal/base"
"cmd/compile/internal/types"
)
func ConstType(n *Node) constant.Kind {
if n == nil || n.Op != OLITERAL {
return constant.Unknown
}
return n.Val().Kind()
}
// ValueInterface returns the constant value stored in n as an interface{}.
// It returns int64s for ints and runes, float64s for floats,
// and complex128s for complex values.
func ConstValue(n *Node) interface{} {
switch v := n.Val(); v.Kind() {
default:
base.Fatalf("unexpected constant: %v", v)
panic("unreachable")
case constant.Bool:
return constant.BoolVal(v)
case constant.String:
return constant.StringVal(v)
case constant.Int:
return Int64Val(n.Type, v)
case constant.Float:
return Float64Val(v)
case constant.Complex:
return complex(Float64Val(constant.Real(v)), Float64Val(constant.Imag(v)))
}
}
// int64Val returns v converted to int64.
// Note: if t is uint64, very large values will be converted to negative int64.
func Int64Val(t *types.Type, v constant.Value) int64 {
if t.IsUnsigned() {
if x, ok := constant.Uint64Val(v); ok {
return int64(x)
}
} else {
if x, ok := constant.Int64Val(v); ok {
return x
}
}
base.Fatalf("%v out of range for %v", v, t)
panic("unreachable")
}
func Float64Val(v constant.Value) float64 {
if x, _ := constant.Float64Val(v); !math.IsInf(x, 0) {
return x + 0 // avoid -0 (should not be needed, but be conservative)
}
base.Fatalf("bad float64 value: %v", v)
panic("unreachable")
}
func AssertValidTypeForConst(t *types.Type, v constant.Value) {
if !ValidTypeForConst(t, v) {
base.Fatalf("%v does not represent %v", t, v)
}
}
func ValidTypeForConst(t *types.Type, v constant.Value) bool {
switch v.Kind() {
case constant.Unknown:
return OKForConst[t.Etype]
case constant.Bool:
return t.IsBoolean()
case constant.String:
return t.IsString()
case constant.Int:
return t.IsInteger()
case constant.Float:
return t.IsFloat()
case constant.Complex:
return t.IsComplex()
}
base.Fatalf("unexpected constant kind: %v", v)
panic("unreachable")
}
// nodlit returns a new untyped constant with value v.
func NewLiteral(v constant.Value) *Node {
n := Nod(OLITERAL, nil, nil)
if k := v.Kind(); k != constant.Unknown {
n.Type = idealType(k)
n.SetVal(v)
}
return n
}
func idealType(ct constant.Kind) *types.Type {
switch ct {
case constant.String:
return types.UntypedString
case constant.Bool:
return types.UntypedBool
case constant.Int:
return types.UntypedInt
case constant.Float:
return types.UntypedFloat
case constant.Complex:
return types.UntypedComplex
}
base.Fatalf("unexpected Ctype: %v", ct)
return nil
}
var OKForConst [types.NTYPE]bool

View file

@ -9,6 +9,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
@ -288,7 +289,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym:
wantreg = "SB"
gc.AddAux(&p.From, v)
case *gc.Node:
case *ir.Node:
wantreg = "SP"
gc.AddAux(&p.From, v)
case nil:

View file

@ -9,6 +9,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
@ -262,7 +263,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym:
wantreg = "SB"
gc.AddAux(&p.From, v)
case *gc.Node:
case *ir.Node:
wantreg = "SP"
gc.AddAux(&p.From, v)
case nil:

View file

@ -7,6 +7,7 @@ package ppc64
import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
@ -751,7 +752,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
p.To.Reg = v.Reg()
}
case *obj.LSym, *gc.Node:
case *obj.LSym, *ir.Node:
p := s.Prog(ppc64.AMOVD)
p.From.Type = obj.TYPE_ADDR
p.From.Reg = v.Args[0].Reg()

View file

@ -7,6 +7,7 @@ package riscv64
import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
@ -323,7 +324,7 @@ func ssaGenValue(s *gc.SSAGenState, v *ssa.Value) {
case *obj.LSym:
wantreg = "SB"
gc.AddAux(&p.From, v)
case *gc.Node:
case *ir.Node:
wantreg = "SP"
gc.AddAux(&p.From, v)
case nil:

View file

@ -7,6 +7,7 @@ package wasm
import (
"cmd/compile/internal/base"
"cmd/compile/internal/gc"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
@ -236,7 +237,7 @@ func ssaGenValueOnStack(s *gc.SSAGenState, v *ssa.Value, extend bool) {
switch v.Aux.(type) {
case *obj.LSym:
gc.AddAux(&p.From, v)
case *gc.Node:
case *ir.Node:
p.From.Reg = v.Args[0].Reg()
gc.AddAux(&p.From, v)
default:

View file

@ -42,6 +42,7 @@ var bootstrapDirs = []string{
"cmd/compile/internal/arm",
"cmd/compile/internal/arm64",
"cmd/compile/internal/gc",
"cmd/compile/internal/ir",
"cmd/compile/internal/logopt",
"cmd/compile/internal/mips",
"cmd/compile/internal/mips64",