cmd/compile: factor out Pkg, Sym, and Type into package types

- created new package cmd/compile/internal/types
- moved Pkg, Sym, Type to new package
- to break cycles, for now we need the (ugly) types/utils.go
  file which contains a handful of functions that must be installed
  early by the gc frontend
- to break cycles, for now we need two functions to convert between
  *gc.Node and *types.Node (the latter is a dummy type)
- adjusted the gc's code to use the new package and the conversion
  functions as needed
- made several Pkg, Sym, and Type methods functions as needed
- renamed constructors typ, typPtr, typArray, etc. to types.New,
  types.NewPtr, types.NewArray, etc.

Passes toolstash-check -all.

Change-Id: I8adfa5e85c731645d0a7fd2030375ed6ebf54b72
Reviewed-on: https://go-review.googlesource.com/39855
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Robert Griesemer 2017-04-04 17:54:02 -07:00
parent 19bd145d07
commit f68f292820
48 changed files with 2433 additions and 2005 deletions

View file

@ -557,8 +557,6 @@ func init() {
// To print out a new table, run: go test -run Formats -v.
var knownFormats = map[string]string{
"*bytes.Buffer %s": "",
"*cmd/compile/internal/gc.Field %p": "",
"*cmd/compile/internal/gc.Field %v": "",
"*cmd/compile/internal/gc.Mpflt %v": "",
"*cmd/compile/internal/gc.Mpint %v": "",
"*cmd/compile/internal/gc.Node %#v": "",
@ -570,20 +568,6 @@ var knownFormats = map[string]string{
"*cmd/compile/internal/gc.Node %j": "",
"*cmd/compile/internal/gc.Node %p": "",
"*cmd/compile/internal/gc.Node %v": "",
"*cmd/compile/internal/gc.Sym %+v": "",
"*cmd/compile/internal/gc.Sym %-v": "",
"*cmd/compile/internal/gc.Sym %0S": "",
"*cmd/compile/internal/gc.Sym %S": "",
"*cmd/compile/internal/gc.Sym %p": "",
"*cmd/compile/internal/gc.Sym %v": "",
"*cmd/compile/internal/gc.Type %#v": "",
"*cmd/compile/internal/gc.Type %+v": "",
"*cmd/compile/internal/gc.Type %-S": "",
"*cmd/compile/internal/gc.Type %0S": "",
"*cmd/compile/internal/gc.Type %L": "",
"*cmd/compile/internal/gc.Type %S": "",
"*cmd/compile/internal/gc.Type %p": "",
"*cmd/compile/internal/gc.Type %v": "",
"*cmd/compile/internal/ssa.Block %s": "",
"*cmd/compile/internal/ssa.Block %v": "",
"*cmd/compile/internal/ssa.Func %s": "",
@ -591,6 +575,22 @@ var knownFormats = map[string]string{
"*cmd/compile/internal/ssa.Value %s": "",
"*cmd/compile/internal/ssa.Value %v": "",
"*cmd/compile/internal/ssa.sparseTreeMapEntry %v": "",
"*cmd/compile/internal/types.Field %p": "",
"*cmd/compile/internal/types.Field %v": "",
"*cmd/compile/internal/types.Sym %+v": "",
"*cmd/compile/internal/types.Sym %-v": "",
"*cmd/compile/internal/types.Sym %0S": "",
"*cmd/compile/internal/types.Sym %S": "",
"*cmd/compile/internal/types.Sym %p": "",
"*cmd/compile/internal/types.Sym %v": "",
"*cmd/compile/internal/types.Type %#v": "",
"*cmd/compile/internal/types.Type %+v": "",
"*cmd/compile/internal/types.Type %-S": "",
"*cmd/compile/internal/types.Type %0S": "",
"*cmd/compile/internal/types.Type %L": "",
"*cmd/compile/internal/types.Type %S": "",
"*cmd/compile/internal/types.Type %p": "",
"*cmd/compile/internal/types.Type %v": "",
"*cmd/internal/obj.Addr %v": "",
"*cmd/internal/obj.LSym %v": "",
"*cmd/internal/obj.Prog %s": "",
@ -613,9 +613,6 @@ var knownFormats = map[string]string{
"cmd/compile/internal/gc.Class %d": "",
"cmd/compile/internal/gc.Ctype %d": "",
"cmd/compile/internal/gc.Ctype %v": "",
"cmd/compile/internal/gc.EType %d": "",
"cmd/compile/internal/gc.EType %s": "",
"cmd/compile/internal/gc.EType %v": "",
"cmd/compile/internal/gc.Level %d": "",
"cmd/compile/internal/gc.Level %v": "",
"cmd/compile/internal/gc.Node %#v": "",
@ -652,6 +649,9 @@ var knownFormats = map[string]string{
"cmd/compile/internal/syntax.token %d": "",
"cmd/compile/internal/syntax.token %q": "",
"cmd/compile/internal/syntax.token %s": "",
"cmd/compile/internal/types.EType %d": "",
"cmd/compile/internal/types.EType %s": "",
"cmd/compile/internal/types.EType %v": "",
"cmd/internal/src.Pos %s": "",
"cmd/internal/src.Pos %v": "",
"cmd/internal/src.XPos %v": "",

View file

@ -4,7 +4,10 @@
package gc
import "fmt"
import (
"cmd/compile/internal/types"
"fmt"
)
// AlgKind describes the kind of algorithms used for comparing and
// hashing a Type.
@ -35,21 +38,21 @@ const (
)
// IsComparable reports whether t is a comparable type.
func (t *Type) IsComparable() bool {
func IsComparable(t *types.Type) bool {
a, _ := algtype1(t)
return a != ANOEQ
}
// IsRegularMemory reports whether t can be compared/hashed as regular memory.
func (t *Type) IsRegularMemory() bool {
func IsRegularMemory(t *types.Type) bool {
a, _ := algtype1(t)
return a == AMEM
}
// IncomparableField returns an incomparable Field of struct Type t, if any.
func (t *Type) IncomparableField() *Field {
func IncomparableField(t *types.Type) *types.Field {
for _, f := range t.FieldSlice() {
if !f.Type.IsComparable() {
if !IsComparable(f.Type) {
return f
}
}
@ -58,7 +61,7 @@ func (t *Type) IncomparableField() *Field {
// algtype is like algtype1, except it returns the fixed-width AMEMxx variants
// instead of the general AMEM kind when possible.
func algtype(t *Type) AlgKind {
func algtype(t *types.Type) AlgKind {
a, _ := algtype1(t)
if a == AMEM {
switch t.Width {
@ -83,7 +86,7 @@ func algtype(t *Type) AlgKind {
// algtype1 returns the AlgKind used for comparing and hashing Type t.
// If it returns ANOEQ, it also returns the component type of t that
// makes it incomparable.
func algtype1(t *Type) (AlgKind, *Type) {
func algtype1(t *types.Type) (AlgKind, *types.Type) {
if t.Broke() {
return AMEM, nil
}
@ -181,7 +184,7 @@ func algtype1(t *Type) (AlgKind, *Type) {
}
// Generate a helper function to compute the hash of a value of type t.
func genhash(sym *Sym, t *Type) {
func genhash(sym *types.Sym, t *types.Type) {
if Debug['r'] != 0 {
fmt.Printf("genhash %v %v\n", sym, t)
}
@ -198,13 +201,13 @@ func genhash(sym *Sym, t *Type) {
tfn := nod(OTFUNC, nil, nil)
fn.Func.Nname.Name.Param.Ntype = tfn
n := namedfield("p", typPtr(t))
n := namedfield("p", types.NewPtr(t))
tfn.List.Append(n)
np := n.Left
n = namedfield("h", Types[TUINTPTR])
n = namedfield("h", types.Types[TUINTPTR])
tfn.List.Append(n)
nh := n.Left
n = anonfield(Types[TUINTPTR]) // return value
n = anonfield(types.Types[TUINTPTR]) // return value
tfn.Rlist.Append(n)
funchdr(fn)
@ -217,7 +220,7 @@ func genhash(sym *Sym, t *Type) {
default:
Fatalf("genhash %v", t)
case TARRAY:
case types.TARRAY:
// An array of pure memory would be handled by the
// standard algorithm, so the element type must not be
// pure memory.
@ -225,7 +228,7 @@ func genhash(sym *Sym, t *Type) {
n := nod(ORANGE, nil, nod(OIND, np, nil))
ni := newname(lookup("i"))
ni.Type = Types[TINT]
ni.Type = types.Types[TINT]
n.List.Set1(ni)
n.SetColas(true)
colasdefn(n.List.Slice(), n)
@ -244,7 +247,7 @@ func genhash(sym *Sym, t *Type) {
fn.Nbody.Append(n)
case TSTRUCT:
case types.TSTRUCT:
// Walk the struct using memhash for runs of AMEM
// and calling specific hash functions for the others.
for i, fields := 0, t.FieldSlice(); i < len(fields); {
@ -257,7 +260,7 @@ func genhash(sym *Sym, t *Type) {
}
// Hash non-memory fields with appropriate hash function.
if !f.Type.IsRegularMemory() {
if !IsRegularMemory(f.Type) {
hashel := hashfor(f.Type)
call := nod(OCALL, hashel, nil)
nx := nodSym(OXDOT, np, f.Sym) // TODO: fields from other packages?
@ -322,8 +325,8 @@ func genhash(sym *Sym, t *Type) {
safemode = old_safemode
}
func hashfor(t *Type) *Node {
var sym *Sym
func hashfor(t *types.Type) *Node {
var sym *types.Sym
switch a, _ := algtype1(t); a {
case AMEM:
@ -349,9 +352,9 @@ func hashfor(t *Type) *Node {
n := newname(sym)
n.Class = PFUNC
tfn := nod(OTFUNC, nil, nil)
tfn.List.Append(anonfield(typPtr(t)))
tfn.List.Append(anonfield(Types[TUINTPTR]))
tfn.Rlist.Append(anonfield(Types[TUINTPTR]))
tfn.List.Append(anonfield(types.NewPtr(t)))
tfn.List.Append(anonfield(types.Types[TUINTPTR]))
tfn.Rlist.Append(anonfield(types.Types[TUINTPTR]))
tfn = typecheck(tfn, Etype)
n.Type = tfn.Type
return n
@ -359,7 +362,7 @@ func hashfor(t *Type) *Node {
// geneq generates a helper function to
// check equality of two values of type t.
func geneq(sym *Sym, t *Type) {
func geneq(sym *types.Sym, t *types.Type) {
if Debug['r'] != 0 {
fmt.Printf("geneq %v %v\n", sym, t)
}
@ -376,13 +379,13 @@ func geneq(sym *Sym, t *Type) {
tfn := nod(OTFUNC, nil, nil)
fn.Func.Nname.Name.Param.Ntype = tfn
n := namedfield("p", typPtr(t))
n := namedfield("p", types.NewPtr(t))
tfn.List.Append(n)
np := n.Left
n = namedfield("q", typPtr(t))
n = namedfield("q", types.NewPtr(t))
tfn.List.Append(n)
nq := n.Left
n = anonfield(Types[TBOOL])
n = anonfield(types.Types[TBOOL])
tfn.Rlist.Append(n)
funchdr(fn)
@ -404,7 +407,7 @@ func geneq(sym *Sym, t *Type) {
nrange := nod(ORANGE, nil, nod(OIND, np, nil))
ni := newname(lookup("i"))
ni.Type = Types[TINT]
ni.Type = types.Types[TINT]
nrange.List.Set1(ni)
nrange.SetColas(true)
colasdefn(nrange.List.Slice(), nrange)
@ -452,7 +455,7 @@ func geneq(sym *Sym, t *Type) {
}
// Compare non-memory fields with field equality.
if !f.Type.IsRegularMemory() {
if !IsRegularMemory(f.Type) {
and(eqfield(np, nq, f.Sym))
i++
continue
@ -521,7 +524,7 @@ func geneq(sym *Sym, t *Type) {
// eqfield returns the node
// p.field == q.field
func eqfield(p *Node, q *Node, field *Sym) *Node {
func eqfield(p *Node, q *Node, field *types.Sym) *Node {
nx := nodSym(OXDOT, p, field)
ny := nodSym(OXDOT, q, field)
ne := nod(OEQ, nx, ny)
@ -530,7 +533,7 @@ func eqfield(p *Node, q *Node, field *Sym) *Node {
// eqmem returns the node
// memequal(&p.field, &q.field [, size])
func eqmem(p *Node, q *Node, field *Sym, size int64) *Node {
func eqmem(p *Node, q *Node, field *types.Sym, size int64) *Node {
nx := nod(OADDR, nodSym(OXDOT, p, field), nil)
nx.Etype = 1 // does not escape
ny := nod(OADDR, nodSym(OXDOT, q, field), nil)
@ -549,7 +552,7 @@ func eqmem(p *Node, q *Node, field *Sym, size int64) *Node {
return call
}
func eqmemfunc(size int64, t *Type) (fn *Node, needsize bool) {
func eqmemfunc(size int64, t *types.Type) (fn *Node, needsize bool) {
switch size {
default:
fn = syslook("memequal")
@ -567,7 +570,7 @@ func eqmemfunc(size int64, t *Type) (fn *Node, needsize bool) {
// t is the parent struct type, and start is the field index at which to start the run.
// size is the length in bytes of the memory included in the run.
// next is the index just after the end of the memory run.
func memrun(t *Type, start int) (size int64, next int) {
func memrun(t *types.Type, start int) (size int64, next int) {
next = start
for {
next++
@ -579,7 +582,7 @@ func memrun(t *Type, start int) (size int64, next int) {
break
}
// Also, stop before a blank or non-memory field.
if f := t.Field(next); isblanksym(f.Sym) || !f.Type.IsRegularMemory() {
if f := t.Field(next); isblanksym(f.Sym) || !IsRegularMemory(f.Type) {
break
}
}
@ -588,7 +591,7 @@ func memrun(t *Type, start int) (size int64, next int) {
// ispaddedfield reports whether the i'th field of struct type t is followed
// by padding.
func ispaddedfield(t *Type, i int) bool {
func ispaddedfield(t *types.Type, i int) bool {
if !t.IsStruct() {
Fatalf("ispaddedfield called non-struct %v", t)
}

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"sort"
)
@ -21,8 +22,8 @@ func Rnd(o int64, r int64) int64 {
// expandiface computes the method set for interface type t by
// expanding embedded interfaces.
func expandiface(t *Type) {
var fields []*Field
func expandiface(t *types.Type) {
var fields []*types.Field
for _, m := range t.Methods().Slice() {
if m.Sym != nil {
fields = append(fields, m)
@ -30,7 +31,7 @@ func expandiface(t *Type) {
}
if !m.Type.IsInterface() {
yyerrorl(m.Nname.Pos, "interface contains embedded non-interface %v", m.Type)
yyerrorl(asNode(m.Nname).Pos, "interface contains embedded non-interface %v", m.Type)
m.SetBroke(true)
t.SetBroke(true)
// Add to fields so that error messages
@ -45,7 +46,7 @@ func expandiface(t *Type) {
// (including broken ones, if any) and add to t's
// method set.
for _, t1 := range m.Type.Fields().Slice() {
f := newField()
f := types.NewField()
f.Type = t1.Type
f.SetBroke(t1.Broke())
f.Sym = t1.Sym
@ -57,10 +58,10 @@ func expandiface(t *Type) {
// Access fields directly to avoid recursively calling dowidth
// within Type.Fields().
t.Extra.(*InterType).fields.Set(fields)
t.Extra.(*types.InterType).Fields.Set(fields)
}
func offmod(t *Type) {
func offmod(t *types.Type) {
o := int32(0)
for _, f := range t.Fields().Slice() {
f.Offset = int64(o)
@ -72,7 +73,7 @@ func offmod(t *Type) {
}
}
func widstruct(errtype *Type, t *Type, o int64, flag int) int64 {
func widstruct(errtype *types.Type, t *types.Type, o int64, flag int) int64 {
starto := o
maxalign := int32(flag)
if maxalign < 1 {
@ -94,7 +95,7 @@ func widstruct(errtype *Type, t *Type, o int64, flag int) int64 {
o = Rnd(o, int64(f.Type.Align))
}
f.Offset = o
if f.Nname != nil {
if asNode(f.Nname) != nil {
// addrescapes has similar code to update these offsets.
// Usually addrescapes runs after widstruct,
// in which case we could drop this,
@ -102,11 +103,11 @@ func widstruct(errtype *Type, t *Type, o int64, flag int) int64 {
// NOTE(rsc): This comment may be stale.
// It's possible the ordering has changed and this is
// now the common case. I'm not sure.
if f.Nname.Name.Param.Stackcopy != nil {
f.Nname.Name.Param.Stackcopy.Xoffset = o
f.Nname.Xoffset = 0
if asNode(f.Nname).Name.Param.Stackcopy != nil {
asNode(f.Nname).Name.Param.Stackcopy.Xoffset = o
asNode(f.Nname).Xoffset = 0
} else {
f.Nname.Xoffset = o
asNode(f.Nname).Xoffset = o
}
}
@ -150,7 +151,7 @@ func widstruct(errtype *Type, t *Type, o int64, flag int) int64 {
return o
}
func dowidth(t *Type) {
func dowidth(t *types.Type) {
if Widthptr == 0 {
Fatalf("dowidth without betypeinit")
}
@ -162,7 +163,7 @@ func dowidth(t *Type) {
if t.Width == -2 {
if !t.Broke() {
t.SetBroke(true)
yyerrorl(t.nod.Pos, "invalid recursive type %v", t)
yyerrorl(asNode(t.Nod).Pos, "invalid recursive type %v", t)
}
t.Width = 0
@ -183,8 +184,8 @@ func dowidth(t *Type) {
defercalc++
lno := lineno
if t.nod != nil {
lineno = t.nod.Pos
if asNode(t.Nod) != nil {
lineno = asNode(t.Nod).Pos
}
t.Width = -2
@ -253,7 +254,7 @@ func dowidth(t *Type) {
// make fake type to check later to
// trigger channel argument check.
t1 := typChanArgs(t)
t1 := types.NewChanArgs(t)
checkwidth(t1)
case TCHANARGS:
@ -290,7 +291,7 @@ func dowidth(t *Type) {
if t.Elem() == nil {
break
}
if t.isDDDArray() {
if t.IsDDDArray() {
if !t.Broke() {
yyerror("use of [...] array outside of array literal")
t.SetBroke(true)
@ -325,7 +326,7 @@ func dowidth(t *Type) {
// make fake type to check later to
// trigger function argument computation.
case TFUNC:
t1 := typFuncArgs(t)
t1 := types.NewFuncArgs(t)
checkwidth(t1)
w = int64(Widthptr) // width of func type is pointer
@ -336,7 +337,7 @@ func dowidth(t *Type) {
w = widstruct(t1, t1.Recvs(), 0, 0)
w = widstruct(t1, t1.Params(), w, Widthreg)
w = widstruct(t1, t1.Results(), w, Widthreg)
t1.Extra.(*FuncType).Argwid = w
t1.Extra.(*types.FuncType).Argwid = w
if w%int64(Widthreg) != 0 {
Warn("bad type %v %d\n", t1, w)
}
@ -388,9 +389,9 @@ func dowidth(t *Type) {
// is needed immediately. checkwidth makes sure the
// size is evaluated eventually.
var deferredTypeStack []*Type
var deferredTypeStack []*types.Type
func checkwidth(t *Type) {
func checkwidth(t *types.Type) {
if t == nil {
return
}

View file

@ -114,6 +114,7 @@ package gc
import (
"bufio"
"bytes"
"cmd/compile/internal/types"
"encoding/binary"
"fmt"
"math/big"
@ -164,8 +165,8 @@ type exporter struct {
// object -> index maps, indexed in order of serialization
strIndex map[string]int
pkgIndex map[*Pkg]int
typIndex map[*Type]int
pkgIndex map[*types.Pkg]int
typIndex map[*types.Type]int
funcList []*Func
// position encoding
@ -184,8 +185,8 @@ func export(out *bufio.Writer, trace bool) int {
p := exporter{
out: out,
strIndex: map[string]int{"": 0}, // empty string is mapped to 0
pkgIndex: make(map[*Pkg]int),
typIndex: make(map[*Type]int),
pkgIndex: make(map[*types.Pkg]int),
typIndex: make(map[*types.Type]int),
posInfoFormat: true,
trace: trace,
}
@ -333,7 +334,7 @@ func export(out *bufio.Writer, trace bool) int {
p.tracef("\n")
}
if sym.isAlias() {
if IsAlias(sym) {
Fatalf("exporter: unexpected type alias %v in inlined function body", sym)
}
@ -395,7 +396,7 @@ func export(out *bufio.Writer, trace bool) int {
return p.written
}
func (p *exporter) pkg(pkg *Pkg) {
func (p *exporter) pkg(pkg *types.Pkg) {
if pkg == nil {
Fatalf("exporter: unexpected nil pkg")
}
@ -418,7 +419,7 @@ func (p *exporter) pkg(pkg *Pkg) {
p.string(pkg.Path)
}
func unidealType(typ *Type, val Val) *Type {
func unidealType(typ *types.Type, val Val) *types.Type {
// Untyped (ideal) constants get their own type. This decouples
// the constant type from the encoding of the constant value.
if typ == nil || typ.IsUntyped() {
@ -427,7 +428,7 @@ func unidealType(typ *Type, val Val) *Type {
return typ
}
func (p *exporter) obj(sym *Sym) {
func (p *exporter) obj(sym *types.Sym) {
// Exported objects may be from different packages because they
// may be re-exported via an exported alias or as dependencies in
// exported inlined function bodies. Thus, exported object names
@ -440,7 +441,7 @@ func (p *exporter) obj(sym *Sym) {
// pulled in via inlined function bodies. In that case the package
// qualifier is not needed. Possible space optimization.)
n := sym.Def
n := asNode(sym.Def)
switch n.Op {
case OLITERAL:
// constant
@ -467,7 +468,7 @@ func (p *exporter) obj(sym *Sym) {
Fatalf("exporter: export of incomplete type %v", sym)
}
if sym.isAlias() {
if IsAlias(sym) {
p.tag(aliasTag)
p.pos(n)
p.qualifiedName(sym)
@ -489,15 +490,15 @@ func (p *exporter) obj(sym *Sym) {
p.pos(n)
p.qualifiedName(sym)
sig := sym.Def.Type
inlineable := isInlineable(sym.Def)
sig := asNode(sym.Def).Type
inlineable := isInlineable(asNode(sym.Def))
p.paramList(sig.Params(), inlineable)
p.paramList(sig.Results(), inlineable)
var f *Func
if inlineable {
f = sym.Def.Func
f = asNode(sym.Def).Func
reexportdeplist(f.Inl)
}
p.funcList = append(p.funcList, f)
@ -506,7 +507,7 @@ func (p *exporter) obj(sym *Sym) {
p.tag(varTag)
p.pos(n)
p.qualifiedName(sym)
p.typ(sym.Def.Type)
p.typ(asNode(sym.Def).Type)
}
default:
@ -579,9 +580,9 @@ func isInlineable(n *Node) bool {
return false
}
var errorInterface *Type // lazily initialized
var errorInterface *types.Type // lazily initialized
func (p *exporter) typ(t *Type) {
func (p *exporter) typ(t *types.Type) {
if t == nil {
Fatalf("exporter: nil type")
}
@ -633,7 +634,7 @@ func (p *exporter) typ(t *Type) {
// write underlying type
orig := t.Orig
if orig == errortype {
if orig == types.Errortype {
// The error type is the only predeclared type which has
// a composite underlying type. When we encode that type,
// make sure to encode the underlying interface rather than
@ -654,7 +655,7 @@ func (p *exporter) typ(t *Type) {
// sort methods for reproducible export format
// TODO(gri) Determine if they are already sorted
// in which case we can drop this step.
var methods []*Field
var methods []*types.Field
for _, m := range t.Methods().Slice() {
methods = append(methods, m)
}
@ -673,11 +674,11 @@ func (p *exporter) typ(t *Type) {
Fatalf("invalid symbol name: %s (%v)", m.Sym.Name, m.Sym)
}
p.pos(m.Nname)
p.pos(asNode(m.Nname))
p.fieldSym(m.Sym, false)
sig := m.Type
mfn := sig.Nname()
mfn := asNode(sig.FuncType().Nname)
inlineable := isInlineable(mfn)
p.paramList(sig.Recvs(), inlineable)
@ -703,7 +704,7 @@ func (p *exporter) typ(t *Type) {
// otherwise we have a type literal
switch t.Etype {
case TARRAY:
if t.isDDDArray() {
if t.IsDDDArray() {
Fatalf("array bounds should be known at export time: %v", t)
}
p.tag(arrayTag)
@ -751,12 +752,12 @@ func (p *exporter) typ(t *Type) {
}
}
func (p *exporter) qualifiedName(sym *Sym) {
func (p *exporter) qualifiedName(sym *types.Sym) {
p.string(sym.Name)
p.pkg(sym.Pkg)
}
func (p *exporter) fieldList(t *Type) {
func (p *exporter) fieldList(t *types.Type) {
if p.trace && t.NumFields() > 0 {
p.tracef("fields {>")
defer p.tracef("<\n} ")
@ -771,15 +772,15 @@ func (p *exporter) fieldList(t *Type) {
}
}
func (p *exporter) field(f *Field) {
p.pos(f.Nname)
func (p *exporter) field(f *types.Field) {
p.pos(asNode(f.Nname))
p.fieldName(f)
p.typ(f.Type)
p.string(f.Note)
}
func (p *exporter) methodList(t *Type) {
var embeddeds, methods []*Field
func (p *exporter) methodList(t *types.Type) {
var embeddeds, methods []*types.Field
for _, m := range t.Methods().Slice() {
if m.Sym != nil {
@ -797,7 +798,7 @@ func (p *exporter) methodList(t *Type) {
if p.trace {
p.tracef("\n")
}
p.pos(m.Nname)
p.pos(asNode(m.Nname))
p.typ(m.Type)
}
if p.trace && len(embeddeds) > 0 {
@ -819,14 +820,14 @@ func (p *exporter) methodList(t *Type) {
}
}
func (p *exporter) method(m *Field) {
p.pos(m.Nname)
func (p *exporter) method(m *types.Field) {
p.pos(asNode(m.Nname))
p.methodName(m.Sym)
p.paramList(m.Type.Params(), false)
p.paramList(m.Type.Results(), false)
}
func (p *exporter) fieldName(t *Field) {
func (p *exporter) fieldName(t *types.Field) {
name := t.Sym.Name
if t.Embedded != 0 {
// anonymous field - we distinguish between 3 cases:
@ -853,14 +854,14 @@ func (p *exporter) fieldName(t *Field) {
}
// methodName is like qualifiedName but it doesn't record the package for exported names.
func (p *exporter) methodName(sym *Sym) {
func (p *exporter) methodName(sym *types.Sym) {
p.string(sym.Name)
if !exportname(sym.Name) {
p.pkg(sym.Pkg)
}
}
func basetypeName(t *Type) string {
func basetypeName(t *types.Type) string {
s := t.Sym
if s == nil && t.IsPtr() {
s = t.Elem().Sym // deref
@ -871,7 +872,7 @@ func basetypeName(t *Type) string {
return "" // unnamed type
}
func (p *exporter) paramList(params *Type, numbered bool) {
func (p *exporter) paramList(params *types.Type, numbered bool) {
if !params.IsFuncArgStruct() {
Fatalf("exporter: parameter list expected")
}
@ -894,11 +895,11 @@ func (p *exporter) paramList(params *Type, numbered bool) {
}
}
func (p *exporter) param(q *Field, n int, numbered bool) {
func (p *exporter) param(q *types.Field, n int, numbered bool) {
t := q.Type
if q.Isddd() {
// create a fake type to encode ... just for the p.typ call
t = typDDDField(t.Elem())
t = types.NewDDDField(t.Elem())
}
p.typ(t)
if n > 0 {
@ -937,7 +938,7 @@ func (p *exporter) param(q *Field, n int, numbered bool) {
p.string(q.Note)
}
func parName(f *Field, numbered bool) string {
func parName(f *types.Field, numbered bool) string {
s := f.Sym
if s == nil {
return ""
@ -945,9 +946,9 @@ func parName(f *Field, numbered bool) string {
// Take the name from the original, lest we substituted it with ~r%d or ~b%d.
// ~r%d is a (formerly) unnamed result.
if f.Nname != nil {
if f.Nname.Orig != nil {
s = f.Nname.Orig.Sym
if asNode(f.Nname) != nil {
if asNode(f.Nname).Orig != nil {
s = asNode(f.Nname).Orig.Sym
if s != nil && s.Name[0] == '~' {
if s.Name[1] == 'r' { // originally an unnamed result
return "" // s = nil
@ -974,8 +975,8 @@ func parName(f *Field, numbered bool) string {
// from other names in their context after inlining (i.e., the parameter numbering
// is a form of parameter rewriting). See issue 4326 for an example and test case.
if numbered {
if !strings.Contains(name, "·") && f.Nname != nil && f.Nname.Name != nil && f.Nname.Name.Vargen > 0 {
name = fmt.Sprintf("%s·%d", name, f.Nname.Name.Vargen) // append Vargen
if !strings.Contains(name, "·") && asNode(f.Nname) != nil && asNode(f.Nname).Name != nil && asNode(f.Nname).Name.Vargen > 0 {
name = fmt.Sprintf("%s·%d", name, asNode(f.Nname).Name.Vargen) // append Vargen
}
} else {
if i := strings.Index(name, "·"); i > 0 {
@ -1551,7 +1552,7 @@ func (p *exporter) exprsOrNil(a, b *Node) {
}
}
func (p *exporter) fieldSym(s *Sym, short bool) {
func (p *exporter) fieldSym(s *types.Sym, short bool) {
name := s.Name
// remove leading "type." in method names ("(T).m" -> "m")
@ -1858,59 +1859,59 @@ var tagString = [...]string{
// untype returns the "pseudo" untyped type for a Ctype (import/export use only).
// (we can't use an pre-initialized array because we must be sure all types are
// set up)
func untype(ctype Ctype) *Type {
func untype(ctype Ctype) *types.Type {
switch ctype {
case CTINT:
return idealint
return types.Idealint
case CTRUNE:
return idealrune
return types.Idealrune
case CTFLT:
return idealfloat
return types.Idealfloat
case CTCPLX:
return idealcomplex
return types.Idealcomplex
case CTSTR:
return idealstring
return types.Idealstring
case CTBOOL:
return idealbool
return types.Idealbool
case CTNIL:
return Types[TNIL]
return types.Types[TNIL]
}
Fatalf("exporter: unknown Ctype")
return nil
}
var predecl []*Type // initialized lazily
var predecl []*types.Type // initialized lazily
func predeclared() []*Type {
func predeclared() []*types.Type {
if predecl == nil {
// initialize lazily to be sure that all
// elements have been initialized before
predecl = []*Type{
predecl = []*types.Type{
// basic types
Types[TBOOL],
Types[TINT],
Types[TINT8],
Types[TINT16],
Types[TINT32],
Types[TINT64],
Types[TUINT],
Types[TUINT8],
Types[TUINT16],
Types[TUINT32],
Types[TUINT64],
Types[TUINTPTR],
Types[TFLOAT32],
Types[TFLOAT64],
Types[TCOMPLEX64],
Types[TCOMPLEX128],
Types[TSTRING],
types.Types[TBOOL],
types.Types[TINT],
types.Types[TINT8],
types.Types[TINT16],
types.Types[TINT32],
types.Types[TINT64],
types.Types[TUINT],
types.Types[TUINT8],
types.Types[TUINT16],
types.Types[TUINT32],
types.Types[TUINT64],
types.Types[TUINTPTR],
types.Types[TFLOAT32],
types.Types[TFLOAT64],
types.Types[TCOMPLEX64],
types.Types[TCOMPLEX128],
types.Types[TSTRING],
// basic type aliases
bytetype,
runetype,
types.Bytetype,
types.Runetype,
// error
errortype,
types.Errortype,
// untyped types
untype(CTBOOL),
@ -1922,13 +1923,13 @@ func predeclared() []*Type {
untype(CTNIL),
// package unsafe
Types[TUNSAFEPTR],
types.Types[TUNSAFEPTR],
// invalid type (package contains errors)
Types[Txxx],
types.Types[Txxx],
// any type, for builtin export data
Types[TANY],
types.Types[TANY],
}
}
return predecl

View file

@ -10,6 +10,7 @@ package gc
import (
"bufio"
"cmd/compile/internal/types"
"cmd/internal/src"
"encoding/binary"
"fmt"
@ -25,19 +26,19 @@ import (
type importer struct {
in *bufio.Reader
imp *Pkg // imported package
imp *types.Pkg // imported package
buf []byte // reused for reading strings
version int // export format version
// object lists, in order of deserialization
strList []string
pkgList []*Pkg
typList []*Type
pkgList []*types.Pkg
typList []*types.Type
funcList []*Node // nil entry means already declared
trackAllTypes bool
// for delayed type verification
cmpList []struct{ pt, t *Type }
cmpList []struct{ pt, t *types.Type }
// position encoding
posInfoFormat bool
@ -51,7 +52,7 @@ type importer struct {
}
// Import populates imp from the serialized package data read from in.
func Import(imp *Pkg, in *bufio.Reader) {
func Import(imp *types.Pkg, in *bufio.Reader) {
inimport = true
defer func() { inimport = false }()
@ -255,7 +256,7 @@ func (p *importer) verifyTypes() {
// the same name appears in an error message.
var numImport = make(map[string]int)
func (p *importer) pkg() *Pkg {
func (p *importer) pkg() *types.Pkg {
// if the package was seen before, i is its index (>= 0)
i := p.tagOrIndex()
if i >= 0 {
@ -307,10 +308,10 @@ func (p *importer) pkg() *Pkg {
return pkg
}
func idealType(typ *Type) *Type {
func idealType(typ *types.Type) *types.Type {
if typ.IsUntyped() {
// canonicalize ideal types
typ = Types[TIDEAL]
typ = types.Types[TIDEAL]
}
return typ
}
@ -347,10 +348,10 @@ func (p *importer) obj(tag int) {
sig := functypefield(nil, params, result)
importsym(p.imp, sym, ONAME)
if sym.Def != nil && sym.Def.Op == ONAME {
if asNode(sym.Def) != nil && asNode(sym.Def).Op == ONAME {
// function was imported before (via another import)
if !eqtype(sig, sym.Def.Type) {
p.formatErrorf("inconsistent definition for func %v during import\n\t%v\n\t%v", sym, sym.Def.Type, sig)
if !eqtype(sig, asNode(sym.Def).Type) {
p.formatErrorf("inconsistent definition for func %v during import\n\t%v\n\t%v", sym, asNode(sym.Def).Type, sig)
}
p.funcList = append(p.funcList, nil)
break
@ -398,8 +399,8 @@ func (p *importer) pos() src.XPos {
return xpos
}
func (p *importer) newtyp(etype EType) *Type {
t := typ(etype)
func (p *importer) newtyp(etype types.EType) *types.Type {
t := types.New(etype)
if p.trackAllTypes {
p.typList = append(p.typList, t)
}
@ -407,19 +408,19 @@ func (p *importer) newtyp(etype EType) *Type {
}
// importtype declares that pt, an imported named type, has underlying type t.
func (p *importer) importtype(pt, t *Type) {
func (p *importer) importtype(pt, t *types.Type) {
if pt.Etype == TFORW {
copytype(pt.nod, t)
copytype(asNode(pt.Nod), t)
pt.Sym.Importdef = p.imp
pt.Sym.Lastlineno = lineno
declare(pt.nod, PEXTERN)
declare(asNode(pt.Nod), PEXTERN)
checkwidth(pt)
} else {
// pt.Orig and t must be identical.
if p.trackAllTypes {
// If we track all types, t may not be fully set up yet.
// Collect the types and verify identity later.
p.cmpList = append(p.cmpList, struct{ pt, t *Type }{pt, t})
p.cmpList = append(p.cmpList, struct{ pt, t *types.Type }{pt, t})
} else if !eqtype(pt.Orig, t) {
yyerror("inconsistent definition for type %v during import\n\t%L (in %q)\n\t%L (in %q)", pt.Sym, pt, pt.Sym.Importdef.Path, t, p.imp.Path)
}
@ -430,7 +431,7 @@ func (p *importer) importtype(pt, t *Type) {
}
}
func (p *importer) typ() *Type {
func (p *importer) typ() *types.Type {
// if the type was seen before, i is its index (>= 0)
i := p.tagOrIndex()
if i >= 0 {
@ -438,7 +439,7 @@ func (p *importer) typ() *Type {
}
// otherwise, i is the type tag (< 0)
var t *Type
var t *types.Type
switch i {
case namedTag:
p.pos()
@ -488,7 +489,7 @@ func (p *importer) typ() *Type {
// (dotmeth's type).Nname.Inl, and dotmeth's type has been pulled
// out by typecheck's lookdot as this $$.ttype. So by providing
// this back link here we avoid special casing there.
n.Type.SetNname(n)
n.Type.FuncType().Nname = asTypesNode(n)
if Debug['E'] > 0 {
fmt.Printf("import [%q] meth %v \n", p.imp.Path, n)
@ -504,16 +505,16 @@ func (p *importer) typ() *Type {
t = p.newtyp(TARRAY)
bound := p.int64()
elem := p.typ()
t.Extra = &ArrayType{Elem: elem, Bound: bound}
t.Extra = &types.ArrayType{Elem: elem, Bound: bound}
case sliceTag:
t = p.newtyp(TSLICE)
elem := p.typ()
t.Extra = SliceType{Elem: elem}
t.Extra = types.SliceType{Elem: elem}
case dddTag:
t = p.newtyp(TDDDFIELD)
t.Extra = DDDFieldType{T: p.typ()}
t.Extra = types.DDDFieldType{T: p.typ()}
case structTag:
t = p.newtyp(TSTRUCT)
@ -521,8 +522,8 @@ func (p *importer) typ() *Type {
checkwidth(t)
case pointerTag:
t = p.newtyp(Tptr)
t.Extra = PtrType{Elem: p.typ()}
t = p.newtyp(types.Tptr)
t.Extra = types.PtrType{Elem: p.typ()}
case signatureTag:
t = p.newtyp(TFUNC)
@ -532,7 +533,7 @@ func (p *importer) typ() *Type {
case interfaceTag:
if ml := p.methodList(); len(ml) == 0 {
t = Types[TINTER]
t = types.Types[TINTER]
} else {
t = p.newtyp(TINTER)
t.SetInterface(ml)
@ -547,7 +548,7 @@ func (p *importer) typ() *Type {
case chanTag:
t = p.newtyp(TCHAN)
ct := t.ChanType()
ct.Dir = ChanDir(p.int())
ct.Dir = types.ChanDir(p.int())
ct.Elem = p.typ()
default:
@ -561,15 +562,15 @@ func (p *importer) typ() *Type {
return t
}
func (p *importer) qualifiedName() *Sym {
func (p *importer) qualifiedName() *types.Sym {
name := p.string()
pkg := p.pkg()
return pkg.Lookup(name)
}
func (p *importer) fieldList() (fields []*Field) {
func (p *importer) fieldList() (fields []*types.Field) {
if n := p.int(); n > 0 {
fields = make([]*Field, n)
fields = make([]*types.Field, n)
for i := range fields {
fields[i] = p.field()
}
@ -577,13 +578,13 @@ func (p *importer) fieldList() (fields []*Field) {
return
}
func (p *importer) field() *Field {
func (p *importer) field() *types.Field {
p.pos()
sym, alias := p.fieldName()
typ := p.typ()
note := p.string()
f := newField()
f := types.NewField()
if sym.Name == "" {
// anonymous field: typ must be T or *T and T must be a type name
s := typ.Sym
@ -598,18 +599,18 @@ func (p *importer) field() *Field {
}
f.Sym = sym
f.Nname = newname(sym)
f.Nname = asTypesNode(newname(sym))
f.Type = typ
f.Note = note
return f
}
func (p *importer) methodList() (methods []*Field) {
func (p *importer) methodList() (methods []*types.Field) {
for n := p.int(); n > 0; n-- {
f := newField()
f.Nname = newname(nblank.Sym)
f.Nname.Pos = p.pos()
f := types.NewField()
f.Nname = asTypesNode(newname(nblank.Sym))
asNode(f.Nname).Pos = p.pos()
f.Type = p.typ()
methods = append(methods, f)
}
@ -621,20 +622,20 @@ func (p *importer) methodList() (methods []*Field) {
return
}
func (p *importer) method() *Field {
func (p *importer) method() *types.Field {
p.pos()
sym := p.methodName()
params := p.paramList()
result := p.paramList()
f := newField()
f := types.NewField()
f.Sym = sym
f.Nname = newname(sym)
f.Nname = asTypesNode(newname(sym))
f.Type = functypefield(fakethisfield(), params, result)
return f
}
func (p *importer) fieldName() (*Sym, bool) {
func (p *importer) fieldName() (*types.Sym, bool) {
name := p.string()
if p.version == 0 && name == "_" {
// version 0 didn't export a package for _ field names
@ -663,7 +664,7 @@ func (p *importer) fieldName() (*Sym, bool) {
return pkg.Lookup(name), alias
}
func (p *importer) methodName() *Sym {
func (p *importer) methodName() *types.Sym {
name := p.string()
if p.version == 0 && name == "_" {
// version 0 didn't export a package for _ method names
@ -677,7 +678,7 @@ func (p *importer) methodName() *Sym {
return pkg.Lookup(name)
}
func (p *importer) paramList() []*Field {
func (p *importer) paramList() []*types.Field {
i := p.int()
if i == 0 {
return nil
@ -689,19 +690,19 @@ func (p *importer) paramList() []*Field {
named = false
}
// i > 0
fs := make([]*Field, i)
fs := make([]*types.Field, i)
for i := range fs {
fs[i] = p.param(named)
}
return fs
}
func (p *importer) param(named bool) *Field {
f := newField()
func (p *importer) param(named bool) *types.Field {
f := types.NewField()
f.Type = p.typ()
if f.Type.Etype == TDDDFIELD {
// TDDDFIELD indicates wrapped ... slice type
f.Type = typSlice(f.Type.DDDField())
f.Type = types.NewSlice(f.Type.DDDField())
f.SetIsddd(true)
}
@ -717,7 +718,7 @@ func (p *importer) param(named bool) *Field {
pkg = p.pkg()
}
f.Sym = pkg.Lookup(name)
f.Nname = newname(f.Sym)
f.Nname = asTypesNode(newname(f.Sym))
}
// TODO(gri) This is compiler-specific (escape info).
@ -727,7 +728,7 @@ func (p *importer) param(named bool) *Field {
return f
}
func (p *importer) value(typ *Type) (x Val) {
func (p *importer) value(typ *types.Type) (x Val) {
switch tag := p.tagOrIndex(); tag {
case falseTag:
x.U = false
@ -738,13 +739,13 @@ func (p *importer) value(typ *Type) (x Val) {
case int64Tag:
u := new(Mpint)
u.SetInt64(p.int64())
u.Rune = typ == idealrune
u.Rune = typ == types.Idealrune
x.U = u
case floatTag:
f := newMpflt()
p.float(f)
if typ == idealint || typ.IsInteger() {
if typ == types.Idealint || typ.IsInteger() {
// uncommon case: large int encoded as float
u := new(Mpint)
u.SetFloat(f)
@ -885,7 +886,7 @@ func (p *importer) node() *Node {
// (issue 16317).
if typ.IsUnsafePtr() {
n = nod(OCONV, n, nil)
n.Type = Types[TUINTPTR]
n.Type = types.Types[TUINTPTR]
}
n = nod(OCONV, n, nil)
n.Type = typ
@ -1059,7 +1060,7 @@ func (p *importer) node() *Node {
case OASOP:
n := nodl(p.pos(), OASOP, nil, nil)
n.Etype = EType(p.int())
n.Etype = types.EType(p.int())
n.Left = p.expr()
if !p.bool() {
n.Right = nodintconst(1)
@ -1189,7 +1190,7 @@ func (p *importer) exprsOrNil() (a, b *Node) {
return
}
func (p *importer) fieldSym() *Sym {
func (p *importer) fieldSym() *types.Sym {
name := p.string()
pkg := localpkg
if !exportname(name) {
@ -1198,7 +1199,7 @@ func (p *importer) fieldSym() *Sym {
return pkg.Lookup(name)
}
func (p *importer) sym() *Sym {
func (p *importer) sym() *types.Sym {
name := p.string()
pkg := localpkg
if name != "_" {

View file

@ -2,6 +2,8 @@
package gc
import "cmd/compile/internal/types"
var runtimeDecls = [...]struct {
name string
tag int
@ -145,58 +147,58 @@ var runtimeDecls = [...]struct {
{"support_popcnt", varTag, 11},
}
func runtimeTypes() []*Type {
var typs [112]*Type
typs[0] = bytetype
typs[1] = typPtr(typs[0])
typs[2] = Types[TANY]
typs[3] = typPtr(typs[2])
func runtimeTypes() []*types.Type {
var typs [112]*types.Type
typs[0] = types.Bytetype
typs[1] = types.NewPtr(typs[0])
typs[2] = types.Types[TANY]
typs[3] = types.NewPtr(typs[2])
typs[4] = functype(nil, []*Node{anonfield(typs[1])}, []*Node{anonfield(typs[3])})
typs[5] = functype(nil, nil, nil)
typs[6] = Types[TINTER]
typs[6] = types.Types[TINTER]
typs[7] = functype(nil, []*Node{anonfield(typs[6])}, nil)
typs[8] = Types[TINT32]
typs[9] = typPtr(typs[8])
typs[8] = types.Types[TINT32]
typs[9] = types.NewPtr(typs[8])
typs[10] = functype(nil, []*Node{anonfield(typs[9])}, []*Node{anonfield(typs[6])})
typs[11] = Types[TBOOL]
typs[11] = types.Types[TBOOL]
typs[12] = functype(nil, []*Node{anonfield(typs[11])}, nil)
typs[13] = Types[TFLOAT64]
typs[13] = types.Types[TFLOAT64]
typs[14] = functype(nil, []*Node{anonfield(typs[13])}, nil)
typs[15] = Types[TINT64]
typs[15] = types.Types[TINT64]
typs[16] = functype(nil, []*Node{anonfield(typs[15])}, nil)
typs[17] = Types[TUINT64]
typs[17] = types.Types[TUINT64]
typs[18] = functype(nil, []*Node{anonfield(typs[17])}, nil)
typs[19] = Types[TCOMPLEX128]
typs[19] = types.Types[TCOMPLEX128]
typs[20] = functype(nil, []*Node{anonfield(typs[19])}, nil)
typs[21] = Types[TSTRING]
typs[21] = types.Types[TSTRING]
typs[22] = functype(nil, []*Node{anonfield(typs[21])}, nil)
typs[23] = functype(nil, []*Node{anonfield(typs[2])}, nil)
typs[24] = typArray(typs[0], 32)
typs[25] = typPtr(typs[24])
typs[24] = types.NewArray(typs[0], 32)
typs[25] = types.NewPtr(typs[24])
typs[26] = functype(nil, []*Node{anonfield(typs[25]), anonfield(typs[21]), anonfield(typs[21])}, []*Node{anonfield(typs[21])})
typs[27] = functype(nil, []*Node{anonfield(typs[25]), anonfield(typs[21]), anonfield(typs[21]), anonfield(typs[21])}, []*Node{anonfield(typs[21])})
typs[28] = functype(nil, []*Node{anonfield(typs[25]), anonfield(typs[21]), anonfield(typs[21]), anonfield(typs[21]), anonfield(typs[21])}, []*Node{anonfield(typs[21])})
typs[29] = functype(nil, []*Node{anonfield(typs[25]), anonfield(typs[21]), anonfield(typs[21]), anonfield(typs[21]), anonfield(typs[21]), anonfield(typs[21])}, []*Node{anonfield(typs[21])})
typs[30] = typSlice(typs[21])
typs[30] = types.NewSlice(typs[21])
typs[31] = functype(nil, []*Node{anonfield(typs[25]), anonfield(typs[30])}, []*Node{anonfield(typs[21])})
typs[32] = Types[TINT]
typs[32] = types.Types[TINT]
typs[33] = functype(nil, []*Node{anonfield(typs[21]), anonfield(typs[21])}, []*Node{anonfield(typs[32])})
typs[34] = functype(nil, []*Node{anonfield(typs[21]), anonfield(typs[21])}, []*Node{anonfield(typs[11])})
typs[35] = typArray(typs[0], 4)
typs[36] = typPtr(typs[35])
typs[35] = types.NewArray(typs[0], 4)
typs[36] = types.NewPtr(typs[35])
typs[37] = functype(nil, []*Node{anonfield(typs[36]), anonfield(typs[15])}, []*Node{anonfield(typs[21])})
typs[38] = typSlice(typs[0])
typs[38] = types.NewSlice(typs[0])
typs[39] = functype(nil, []*Node{anonfield(typs[25]), anonfield(typs[38])}, []*Node{anonfield(typs[21])})
typs[40] = functype(nil, []*Node{anonfield(typs[38])}, []*Node{anonfield(typs[21])})
typs[41] = runetype
typs[42] = typSlice(typs[41])
typs[41] = types.Runetype
typs[42] = types.NewSlice(typs[41])
typs[43] = functype(nil, []*Node{anonfield(typs[25]), anonfield(typs[42])}, []*Node{anonfield(typs[21])})
typs[44] = functype(nil, []*Node{anonfield(typs[25]), anonfield(typs[21])}, []*Node{anonfield(typs[38])})
typs[45] = typArray(typs[41], 32)
typs[46] = typPtr(typs[45])
typs[45] = types.NewArray(typs[41], 32)
typs[46] = types.NewPtr(typs[45])
typs[47] = functype(nil, []*Node{anonfield(typs[46]), anonfield(typs[21])}, []*Node{anonfield(typs[42])})
typs[48] = functype(nil, []*Node{anonfield(typs[21]), anonfield(typs[32])}, []*Node{anonfield(typs[41]), anonfield(typs[32])})
typs[49] = Types[TUINTPTR]
typs[49] = types.Types[TUINTPTR]
typs[50] = functype(nil, []*Node{anonfield(typs[2]), anonfield(typs[2]), anonfield(typs[49])}, []*Node{anonfield(typs[32])})
typs[51] = functype(nil, []*Node{anonfield(typs[2]), anonfield(typs[2])}, []*Node{anonfield(typs[32])})
typs[52] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[2])}, []*Node{anonfield(typs[2])})
@ -204,10 +206,10 @@ func runtimeTypes() []*Type {
typs[54] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[2])}, []*Node{anonfield(typs[2]), anonfield(typs[11])})
typs[55] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[1]), anonfield(typs[1])}, nil)
typs[56] = functype(nil, []*Node{anonfield(typs[1])}, nil)
typs[57] = typPtr(typs[49])
typs[58] = Types[TUNSAFEPTR]
typs[57] = types.NewPtr(typs[49])
typs[58] = types.Types[TUNSAFEPTR]
typs[59] = functype(nil, []*Node{anonfield(typs[57]), anonfield(typs[58]), anonfield(typs[58])}, []*Node{anonfield(typs[11])})
typs[60] = typMap(typs[2], typs[2])
typs[60] = types.NewMap(typs[2], typs[2])
typs[61] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[3]), anonfield(typs[3])}, []*Node{anonfield(typs[60])})
typs[62] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[60]), anonfield(typs[3])}, []*Node{anonfield(typs[3])})
typs[63] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[60]), anonfield(typs[2])}, []*Node{anonfield(typs[3])})
@ -218,14 +220,14 @@ func runtimeTypes() []*Type {
typs[68] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[60]), anonfield(typs[3])}, nil)
typs[69] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[60]), anonfield(typs[2])}, nil)
typs[70] = functype(nil, []*Node{anonfield(typs[3])}, nil)
typs[71] = typChan(typs[2], Cboth)
typs[71] = types.NewChan(typs[2], types.Cboth)
typs[72] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15])}, []*Node{anonfield(typs[71])})
typs[73] = typChan(typs[2], Crecv)
typs[73] = types.NewChan(typs[2], types.Crecv)
typs[74] = functype(nil, []*Node{anonfield(typs[73]), anonfield(typs[3])}, nil)
typs[75] = functype(nil, []*Node{anonfield(typs[73]), anonfield(typs[3])}, []*Node{anonfield(typs[11])})
typs[76] = typChan(typs[2], Csend)
typs[76] = types.NewChan(typs[2], types.Csend)
typs[77] = functype(nil, []*Node{anonfield(typs[76]), anonfield(typs[3])}, nil)
typs[78] = typArray(typs[0], 3)
typs[78] = types.NewArray(typs[0], 3)
typs[79] = tostruct([]*Node{namedfield("enabled", typs[11]), namedfield("pad", typs[78]), namedfield("needed", typs[11]), namedfield("cgo", typs[11]), namedfield("alignme", typs[17])})
typs[80] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[2])}, nil)
typs[81] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[3]), anonfield(typs[3])}, nil)
@ -233,13 +235,13 @@ func runtimeTypes() []*Type {
typs[83] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[2]), anonfield(typs[2])}, []*Node{anonfield(typs[32])})
typs[84] = functype(nil, []*Node{anonfield(typs[76]), anonfield(typs[3])}, []*Node{anonfield(typs[11])})
typs[85] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[73])}, []*Node{anonfield(typs[11])})
typs[86] = typPtr(typs[11])
typs[86] = types.NewPtr(typs[11])
typs[87] = functype(nil, []*Node{anonfield(typs[3]), anonfield(typs[86]), anonfield(typs[73])}, []*Node{anonfield(typs[11])})
typs[88] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[8])}, nil)
typs[89] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[76]), anonfield(typs[3])}, nil)
typs[90] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[73]), anonfield(typs[3]), anonfield(typs[86])}, nil)
typs[91] = functype(nil, []*Node{anonfield(typs[1])}, []*Node{anonfield(typs[32])})
typs[92] = typSlice(typs[2])
typs[92] = types.NewSlice(typs[2])
typs[93] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[32]), anonfield(typs[32])}, []*Node{anonfield(typs[92])})
typs[94] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[15]), anonfield(typs[15])}, []*Node{anonfield(typs[92])})
typs[95] = functype(nil, []*Node{anonfield(typs[1]), anonfield(typs[92]), anonfield(typs[32])}, []*Node{anonfield(typs[92])})
@ -251,7 +253,7 @@ func runtimeTypes() []*Type {
typs[101] = functype(nil, []*Node{anonfield(typs[17]), anonfield(typs[17])}, []*Node{anonfield(typs[17])})
typs[102] = functype(nil, []*Node{anonfield(typs[13])}, []*Node{anonfield(typs[15])})
typs[103] = functype(nil, []*Node{anonfield(typs[13])}, []*Node{anonfield(typs[17])})
typs[104] = Types[TUINT32]
typs[104] = types.Types[TUINT32]
typs[105] = functype(nil, []*Node{anonfield(typs[13])}, []*Node{anonfield(typs[104])})
typs[106] = functype(nil, []*Node{anonfield(typs[15])}, []*Node{anonfield(typs[13])})
typs[107] = functype(nil, []*Node{anonfield(typs[17])}, []*Node{anonfield(typs[13])})

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/src"
)
@ -76,7 +77,7 @@ func (l *cfLabel) defined() bool { return l.defNode != nil }
func (l *cfLabel) used() bool { return l.useNode != nil }
// label returns the label associated with sym, creating it if necessary.
func (c *controlflow) label(sym *Sym) *cfLabel {
func (c *controlflow) label(sym *types.Sym) *cfLabel {
lab := c.labels[sym.Name]
if lab == nil {
lab = new(cfLabel)
@ -253,8 +254,8 @@ func (c *controlflow) checkgoto(from *Node, to *Node) {
// Decide what to complain about. Unwind to.Sym until where it
// forked from from.Sym, and keep track of the innermost block
// and declaration we jumped into/over.
var block *Sym
var dcl *Sym
var block *types.Sym
var dcl *types.Sym
// If to.Sym is longer, unwind until it's the same length.
ts := to.Sym
@ -290,7 +291,7 @@ func (c *controlflow) checkgoto(from *Node, to *Node) {
// dcldepth returns the declaration depth for a dclstack Sym; that is,
// the sum of the block nesting level and the number of declarations
// in scope.
func dcldepth(s *Sym) int {
func dcldepth(s *types.Sym) int {
n := 0
for ; s != nil; s = s.Link {
n++

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"fmt"
)
@ -155,7 +156,7 @@ func typecheckclosure(func_ *Node, top int) {
var closurename_closgen int
func closurename(n *Node) *Sym {
func closurename(n *Node) *types.Sym {
if n.Sym != nil {
return n.Sym
}
@ -282,7 +283,7 @@ func capturevars(xfunc *Node) {
}
if Debug['m'] > 1 {
var name *Sym
var name *types.Sym
if v.Name.Curfn != nil && v.Name.Curfn.Func.Nname != nil {
name = v.Name.Curfn.Func.Nname.Sym
}
@ -327,35 +328,35 @@ func transformclosure(xfunc *Node) {
f := xfunc.Func.Nname
// We are going to insert captured variables before input args.
var params []*Field
var params []*types.Field
var decls []*Node
for _, v := range func_.Func.Cvars.Slice() {
if v.Op == OXXX {
continue
}
fld := newField()
fld.Funarg = FunargParams
fld := types.NewField()
fld.Funarg = types.FunargParams
if v.Name.Byval() {
// If v is captured by value, we merely downgrade it to PPARAM.
v.Class = PPARAM
fld.Nname = v
fld.Nname = asTypesNode(v)
} else {
// If v of type T is captured by reference,
// we introduce function param &v *T
// and v remains PAUTOHEAP with &v heapaddr
// (accesses will implicitly deref &v).
addr := newname(lookup("&" + v.Sym.Name))
addr.Type = typPtr(v.Type)
addr.Type = types.NewPtr(v.Type)
addr.Class = PPARAM
v.Name.Param.Heapaddr = addr
fld.Nname = addr
fld.Nname = asTypesNode(addr)
}
fld.Type = fld.Nname.Type
fld.Sym = fld.Nname.Sym
fld.Type = asNode(fld.Nname).Type
fld.Sym = asNode(fld.Nname).Sym
params = append(params, fld)
decls = append(decls, fld.Nname)
decls = append(decls, asNode(fld.Nname))
}
if len(params) > 0 {
@ -380,7 +381,7 @@ func transformclosure(xfunc *Node) {
cv.Type = v.Type
if !v.Name.Byval() {
cv.Type = typPtr(v.Type)
cv.Type = types.NewPtr(v.Type)
}
offset = Rnd(offset, int64(cv.Type.Align))
cv.Xoffset = offset
@ -395,7 +396,7 @@ func transformclosure(xfunc *Node) {
// Declare variable holding addresses taken from closure
// and initialize in entry prologue.
addr := newname(lookup("&" + v.Sym.Name))
addr.Type = typPtr(v.Type)
addr.Type = types.NewPtr(v.Type)
addr.Class = PAUTO
addr.SetUsed(true)
addr.Name.Curfn = xfunc
@ -473,7 +474,7 @@ func walkclosure(func_ *Node, init *Nodes) *Node {
typ := nod(OTSTRUCT, nil, nil)
typ.List.Set1(namedfield(".F", Types[TUINTPTR]))
typ.List.Set1(namedfield(".F", types.Types[TUINTPTR]))
for _, v := range func_.Func.Cvars.Slice() {
if v.Op == OXXX {
continue
@ -513,7 +514,7 @@ func walkclosure(func_ *Node, init *Nodes) *Node {
return walkexpr(clos, init)
}
func typecheckpartialcall(fn *Node, sym *Sym) {
func typecheckpartialcall(fn *Node, sym *types.Sym) {
switch fn.Op {
case ODOTINTER, ODOTMETH:
break
@ -530,9 +531,9 @@ func typecheckpartialcall(fn *Node, sym *Sym) {
fn.Type = xfunc.Type
}
var makepartialcall_gopkg *Pkg
var makepartialcall_gopkg *types.Pkg
func makepartialcall(fn *Node, t0 *Type, meth *Sym) *Node {
func makepartialcall(fn *Node, t0 *types.Type, meth *types.Sym) *Node {
var p string
rcvrtype := fn.Left.Type
@ -549,7 +550,7 @@ func makepartialcall(fn *Node, t0 *Type, meth *Sym) *Node {
Fatalf("missing base type for %v", rcvrtype)
}
var spkg *Pkg
var spkg *types.Pkg
if basetype.Sym != nil {
spkg = basetype.Sym.Pkg
}
@ -563,7 +564,7 @@ func makepartialcall(fn *Node, t0 *Type, meth *Sym) *Node {
sym := spkg.Lookup(p)
if sym.Uniq() {
return sym.Def
return asNode(sym.Def)
}
sym.SetUniq(true)
@ -629,7 +630,7 @@ func makepartialcall(fn *Node, t0 *Type, meth *Sym) *Node {
ptr.Type = rcvrtype
body = append(body, nod(OAS, ptr, cv))
} else {
ptr.Type = typPtr(rcvrtype)
ptr.Type = types.NewPtr(rcvrtype)
body = append(body, nod(OAS, ptr, nod(OADDR, cv, nil)))
}
@ -650,7 +651,7 @@ func makepartialcall(fn *Node, t0 *Type, meth *Sym) *Node {
xfunc.Nbody.Set(body)
xfunc = typecheck(xfunc, Etop)
sym.Def = xfunc
sym.Def = asTypesNode(xfunc)
xtop = append(xtop, xfunc)
Curfn = savecurfn
@ -674,7 +675,7 @@ func walkpartialcall(n *Node, init *Nodes) *Node {
}
typ := nod(OTSTRUCT, nil, nil)
typ.List.Set1(namedfield("F", Types[TUINTPTR]))
typ.List.Set1(namedfield("F", types.Types[TUINTPTR]))
typ.List.Append(namedfield("R", n.Left.Type))
clos := nod(OCOMPLIT, nil, nod(OIND, typ, nil))

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/src"
"math/big"
"strings"
@ -132,7 +133,7 @@ func (n *Node) Bool() bool {
// truncate float literal fv to 32-bit or 64-bit precision
// according to type; return truncated value.
func truncfltlit(oldv *Mpflt, t *Type) *Mpflt {
func truncfltlit(oldv *Mpflt, t *types.Type) *Mpflt {
if t == nil {
return oldv
}
@ -147,7 +148,7 @@ func truncfltlit(oldv *Mpflt, t *Type) *Mpflt {
// convert large precision literal floating
// into limited precision (float64 or float32)
switch t.Etype {
case TFLOAT64:
case types.TFLOAT64:
d := fv.Float64()
fv.SetFloat64(d)
@ -172,7 +173,7 @@ const (
// implicit conversion.
// The result of convlit MUST be assigned back to n, e.g.
// n.Left = convlit(n.Left, t)
func convlit(n *Node, t *Type) *Node {
func convlit(n *Node, t *types.Type) *Node {
return convlit1(n, t, false, noReuse)
}
@ -180,7 +181,7 @@ func convlit(n *Node, t *Type) *Node {
// It returns a new node if necessary.
// The result of convlit1 MUST be assigned back to n, e.g.
// n.Left = convlit1(n.Left, t, explicit, reuse)
func convlit1(n *Node, t *Type, explicit bool, reuse canReuseNode) *Node {
func convlit1(n *Node, t *types.Type, explicit bool, reuse canReuseNode) *Node {
if n == nil || t == nil || n.Type == nil || t.IsUntyped() || n.Type == t {
return n
}
@ -198,11 +199,11 @@ func convlit1(n *Node, t *Type, explicit bool, reuse canReuseNode) *Node {
switch n.Op {
default:
if n.Type == idealbool {
if n.Type == types.Idealbool {
if t.IsBoolean() {
n.Type = t
} else {
n.Type = Types[TBOOL]
n.Type = types.Types[TBOOL]
}
}
@ -240,17 +241,17 @@ func convlit1(n *Node, t *Type, explicit bool, reuse canReuseNode) *Node {
default:
// If trying to convert to non-complex type,
// leave as complex128 and let typechecker complain.
t = Types[TCOMPLEX128]
t = types.Types[TCOMPLEX128]
fallthrough
case TCOMPLEX128:
case types.TCOMPLEX128:
n.Type = t
n.Left = convlit(n.Left, Types[TFLOAT64])
n.Right = convlit(n.Right, Types[TFLOAT64])
n.Left = convlit(n.Left, types.Types[TFLOAT64])
n.Right = convlit(n.Right, types.Types[TFLOAT64])
case TCOMPLEX64:
n.Type = t
n.Left = convlit(n.Left, Types[TFLOAT32])
n.Right = convlit(n.Right, Types[TFLOAT32])
n.Left = convlit(n.Left, types.Types[TFLOAT32])
n.Right = convlit(n.Right, types.Types[TFLOAT32])
}
}
@ -263,14 +264,14 @@ func convlit1(n *Node, t *Type, explicit bool, reuse canReuseNode) *Node {
}
ct := consttype(n)
var et EType
var et types.EType
if ct < 0 {
goto bad
}
et = t.Etype
if et == TINTER {
if ct == CTNIL && n.Type == Types[TNIL] {
if ct == CTNIL && n.Type == types.Types[TNIL] {
n.Type = t
return n
}
@ -361,7 +362,7 @@ func convlit1(n *Node, t *Type, explicit bool, reuse canReuseNode) *Node {
case CTCPLX:
overflow(n.Val(), t)
}
} else if et == TSTRING && (ct == CTINT || ct == CTRUNE) && explicit {
} else if et == types.TSTRING && (ct == CTINT || ct == CTRUNE) && explicit {
n.SetVal(tostr(n.Val()))
} else {
goto bad
@ -492,7 +493,7 @@ func toint(v Val) Val {
return v
}
func doesoverflow(v Val, t *Type) bool {
func doesoverflow(v Val, t *types.Type) bool {
switch u := v.U.(type) {
case *Mpint:
if !t.IsInteger() {
@ -517,7 +518,7 @@ func doesoverflow(v Val, t *Type) bool {
return false
}
func overflow(v Val, t *Type) {
func overflow(v Val, t *types.Type) {
// v has already been converted
// to appropriate form for t.
if t == nil || t.Etype == TIDEAL {
@ -702,7 +703,7 @@ func evconst(n *Node) {
nr := n.Right
var rv Val
var lno src.XPos
var wr EType
var wr types.EType
var v Val
var norig *Node
var nn *Node
@ -750,7 +751,7 @@ func evconst(n *Node) {
case OCOM_ | CTINT_,
OCOM_ | CTRUNE_:
var et EType = Txxx
var et types.EType = Txxx
if nl.Type != nil {
et = nl.Type.Etype
}
@ -836,7 +837,7 @@ func evconst(n *Node) {
// right must be unsigned.
// left can be ideal.
case OLSH, ORSH:
nr = defaultlit(nr, Types[TUINT])
nr = defaultlit(nr, types.Types[TUINT])
n.Right = nr
if nr.Type != nil && (nr.Type.IsSigned() || !nr.Type.IsInteger()) {
@ -1219,16 +1220,16 @@ func nodlit(v Val) *Node {
Fatalf("nodlit ctype %d", v.Ctype())
case CTSTR:
n.Type = idealstring
n.Type = types.Idealstring
case CTBOOL:
n.Type = idealbool
n.Type = types.Idealbool
case CTINT, CTRUNE, CTFLT, CTCPLX:
n.Type = Types[TIDEAL]
n.Type = types.Types[TIDEAL]
case CTNIL:
n.Type = Types[TNIL]
n.Type = types.Types[TNIL]
}
return n
@ -1240,7 +1241,7 @@ func nodcplxlit(r Val, i Val) *Node {
c := new(Mpcplx)
n := nod(OLITERAL, nil, nil)
n.Type = Types[TIDEAL]
n.Type = types.Types[TIDEAL]
n.SetVal(Val{c})
if r.Ctype() != CTFLT || i.Ctype() != CTFLT {
@ -1318,13 +1319,13 @@ func idealkind(n *Node) Ctype {
// The result of defaultlit MUST be assigned back to n, e.g.
// n.Left = defaultlit(n.Left, t)
func defaultlit(n *Node, t *Type) *Node {
func defaultlit(n *Node, t *types.Type) *Node {
return defaultlitreuse(n, t, noReuse)
}
// The result of defaultlitreuse MUST be assigned back to n, e.g.
// n.Left = defaultlitreuse(n.Left, t, reuse)
func defaultlitreuse(n *Node, t *Type, reuse canReuseNode) *Node {
func defaultlitreuse(n *Node, t *types.Type, reuse canReuseNode) *Node {
if n == nil || !n.Type.IsUntyped() {
return n
}
@ -1337,7 +1338,7 @@ func defaultlitreuse(n *Node, t *Type, reuse canReuseNode) *Node {
lno := setlineno(n)
ctype := idealkind(n)
var t1 *Type
var t1 *types.Type
switch ctype {
default:
if t != nil {
@ -1356,7 +1357,7 @@ func defaultlitreuse(n *Node, t *Type, reuse canReuseNode) *Node {
}
if n.Val().Ctype() == CTSTR {
t1 := Types[TSTRING]
t1 := types.Types[TSTRING]
n = convlit1(n, t1, false, reuse)
break
}
@ -1367,26 +1368,26 @@ func defaultlitreuse(n *Node, t *Type, reuse canReuseNode) *Node {
Fatalf("defaultlit: idealkind is CTxxx: %+v", n)
case CTBOOL:
t1 := Types[TBOOL]
t1 := types.Types[TBOOL]
if t != nil && t.IsBoolean() {
t1 = t
}
n = convlit1(n, t1, false, reuse)
case CTINT:
t1 = Types[TINT]
t1 = types.Types[TINT]
goto num
case CTRUNE:
t1 = runetype
t1 = types.Runetype
goto num
case CTFLT:
t1 = Types[TFLOAT64]
t1 = types.Types[TFLOAT64]
goto num
case CTCPLX:
t1 = Types[TCOMPLEX128]
t1 = types.Types[TCOMPLEX128]
goto num
}
@ -1446,32 +1447,32 @@ func defaultlit2(l *Node, r *Node, force bool) (*Node, *Node) {
}
if l.Type.IsBoolean() {
l = convlit(l, Types[TBOOL])
r = convlit(r, Types[TBOOL])
l = convlit(l, types.Types[TBOOL])
r = convlit(r, types.Types[TBOOL])
}
lkind := idealkind(l)
rkind := idealkind(r)
if lkind == CTCPLX || rkind == CTCPLX {
l = convlit(l, Types[TCOMPLEX128])
r = convlit(r, Types[TCOMPLEX128])
l = convlit(l, types.Types[TCOMPLEX128])
r = convlit(r, types.Types[TCOMPLEX128])
return l, r
}
if lkind == CTFLT || rkind == CTFLT {
l = convlit(l, Types[TFLOAT64])
r = convlit(r, Types[TFLOAT64])
l = convlit(l, types.Types[TFLOAT64])
r = convlit(r, types.Types[TFLOAT64])
return l, r
}
if lkind == CTRUNE || rkind == CTRUNE {
l = convlit(l, runetype)
r = convlit(r, runetype)
l = convlit(l, types.Runetype)
r = convlit(r, types.Runetype)
return l, r
}
l = convlit(l, Types[TINT])
r = convlit(r, Types[TINT])
l = convlit(l, types.Types[TINT])
r = convlit(r, types.Types[TINT])
return l, r
}
@ -1673,13 +1674,13 @@ func isgoconst(n *Node) bool {
}
case ONAME:
l := n.Sym.Def
l := asNode(n.Sym.Def)
if l != nil && l.Op == OLITERAL && n.Val().Ctype() != CTNIL {
return true
}
case ONONAME:
if n.Sym.Def != nil && n.Sym.Def.Op == OIOTA {
if asNode(n.Sym.Def) != nil && asNode(n.Sym.Def).Op == OIOTA {
return true
}

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
"strings"
@ -30,9 +31,9 @@ var block int32 // current block number
// Finally, the Syms in this list are not "real" Syms as they don't actually
// represent object names. Sym is just a convenient type for saving shadowed
// Sym definitions, and only a subset of its fields are actually used.
var dclstack *Sym
var dclstack *types.Sym
func dcopy(a, b *Sym) {
func dcopy(a, b *types.Sym) {
a.Pkg = b.Pkg
a.Name = b.Name
a.Def = b.Def
@ -40,8 +41,8 @@ func dcopy(a, b *Sym) {
a.Lastlineno = b.Lastlineno
}
func push() *Sym {
d := new(Sym)
func push() *types.Sym {
d := new(types.Sym)
d.Lastlineno = lineno
d.Link = dclstack
dclstack = d
@ -50,7 +51,7 @@ func push() *Sym {
// pushdcl pushes the current declaration for symbol s (if any) so that
// it can be shadowed by a new declaration within a nested block scope.
func pushdcl(s *Sym) *Sym {
func pushdcl(s *types.Sym) *types.Sym {
d := push()
dcopy(d, s)
return d
@ -111,7 +112,7 @@ func testdclstack() {
}
// redeclare emits a diagnostic about symbol s being redeclared somewhere.
func redeclare(s *Sym, where string) {
func redeclare(s *types.Sym, where string) {
if !s.Lastlineno.IsKnown() {
var tmp string
if s.Origpkg != nil {
@ -208,7 +209,7 @@ func declare(n *Node, ctxt Class) {
s.Block = block
s.Lastlineno = lineno
s.Def = n
s.Def = asTypesNode(n)
n.Name.Vargen = int32(gen)
n.Name.Funcdepth = funcdepth
n.Class = ctxt
@ -216,7 +217,7 @@ func declare(n *Node, ctxt Class) {
autoexport(n, ctxt)
}
func addvar(n *Node, t *Type, ctxt Class) {
func addvar(n *Node, t *types.Type, ctxt Class) {
if n == nil || n.Sym == nil || (n.Op != ONAME && n.Op != ONONAME) || t == nil {
Fatalf("addvar: n=%v t=%v nil", n, t)
}
@ -284,7 +285,7 @@ func variter(vl []*Node, t *Node, el []*Node) []*Node {
}
// newnoname returns a new ONONAME Node associated with symbol s.
func newnoname(s *Sym) *Node {
func newnoname(s *types.Sym) *Node {
if s == nil {
Fatalf("newnoname nil")
}
@ -297,7 +298,7 @@ func newnoname(s *Sym) *Node {
// newfuncname generates a new name node for a function or method.
// TODO(rsc): Use an ODCLFUNC node instead. See comment in CL 7360.
func newfuncname(s *Sym) *Node {
func newfuncname(s *types.Sym) *Node {
n := newname(s)
n.Func = new(Func)
n.Func.SetIsHiddenClosure(Curfn != nil)
@ -306,37 +307,37 @@ func newfuncname(s *Sym) *Node {
// this generates a new name node for a name
// being declared.
func dclname(s *Sym) *Node {
func dclname(s *types.Sym) *Node {
n := newname(s)
n.Op = ONONAME // caller will correct it
return n
}
func typenod(t *Type) *Node {
func typenod(t *types.Type) *Node {
// if we copied another type with *t = *u
// then t->nod might be out of date, so
// check t->nod->type too
if t.nod == nil || t.nod.Type != t {
t.nod = nod(OTYPE, nil, nil)
t.nod.Type = t
t.nod.Sym = t.Sym
if asNode(t.Nod) == nil || asNode(t.Nod).Type != t {
t.Nod = asTypesNode(nod(OTYPE, nil, nil))
asNode(t.Nod).Type = t
asNode(t.Nod).Sym = t.Sym
}
return t.nod
return asNode(t.Nod)
}
func anonfield(typ *Type) *Node {
func anonfield(typ *types.Type) *Node {
return nod(ODCLFIELD, nil, typenod(typ))
}
func namedfield(s string, typ *Type) *Node {
func namedfield(s string, typ *types.Type) *Node {
return nod(ODCLFIELD, newname(lookup(s)), typenod(typ))
}
// oldname returns the Node that declares symbol s in the current scope.
// If no such Node currently exists, an ONONAME Node is returned instead.
func oldname(s *Sym) *Node {
n := s.Def
func oldname(s *types.Sym) *Node {
n := asNode(s.Def)
if n == nil {
// Maybe a top-level declaration will come along later to
// define s. resolve will check s.Def again once all input
@ -557,34 +558,34 @@ func funcargs(nt *Node) {
// Same as funcargs, except run over an already constructed TFUNC.
// This happens during import, where the hidden_fndcl rule has
// used functype directly to parse the function's type.
func funcargs2(t *Type) {
func funcargs2(t *types.Type) {
if t.Etype != TFUNC {
Fatalf("funcargs2 %v", t)
}
for _, ft := range t.Recvs().Fields().Slice() {
if ft.Nname == nil || ft.Nname.Sym == nil {
if asNode(ft.Nname) == nil || asNode(ft.Nname).Sym == nil {
continue
}
n := ft.Nname // no need for newname(ft->nname->sym)
n := asNode(ft.Nname) // no need for newname(ft->nname->sym)
n.Type = ft.Type
declare(n, PPARAM)
}
for _, ft := range t.Params().Fields().Slice() {
if ft.Nname == nil || ft.Nname.Sym == nil {
if asNode(ft.Nname) == nil || asNode(ft.Nname).Sym == nil {
continue
}
n := ft.Nname
n := asNode(ft.Nname)
n.Type = ft.Type
declare(n, PPARAM)
}
for _, ft := range t.Results().Fields().Slice() {
if ft.Nname == nil || ft.Nname.Sym == nil {
if asNode(ft.Nname) == nil || asNode(ft.Nname).Sym == nil {
continue
}
n := ft.Nname
n := asNode(ft.Nname)
n.Type = ft.Type
declare(n, PPARAMOUT)
}
@ -620,7 +621,7 @@ func funcbody(n *Node) {
// structs, functions, and methods.
// they don't belong here, but where do they belong?
func checkembeddedtype(t *Type) {
func checkembeddedtype(t *types.Type) {
if t == nil {
return
}
@ -639,7 +640,7 @@ func checkembeddedtype(t *Type) {
}
}
func structfield(n *Node) *Field {
func structfield(n *Node) *types.Field {
lno := lineno
lineno = n.Pos
@ -647,7 +648,7 @@ func structfield(n *Node) *Field {
Fatalf("structfield: oops %v\n", n)
}
f := newField()
f := types.NewField()
f.SetIsddd(n.Isddd())
if n.Right != nil {
@ -678,9 +679,9 @@ func structfield(n *Node) *Field {
}
if n.Left != nil && n.Left.Op == ONAME {
f.Nname = n.Left
f.Nname = asTypesNode(n.Left)
f.Embedded = n.Embedded
f.Sym = f.Nname.Sym
f.Sym = asNode(f.Nname).Sym
}
lineno = lno
@ -689,15 +690,15 @@ func structfield(n *Node) *Field {
// checkdupfields emits errors for duplicately named fields or methods in
// a list of struct or interface types.
func checkdupfields(what string, ts ...*Type) {
seen := make(map[*Sym]bool)
func checkdupfields(what string, ts ...*types.Type) {
seen := make(map[*types.Sym]bool)
for _, t := range ts {
for _, f := range t.Fields().Slice() {
if f.Sym == nil || isblanksym(f.Sym) || f.Nname == nil {
if f.Sym == nil || isblanksym(f.Sym) || asNode(f.Nname) == nil {
continue
}
if seen[f.Sym] {
yyerrorl(f.Nname.Pos, "duplicate %s %s", what, f.Sym.Name)
yyerrorl(asNode(f.Nname).Pos, "duplicate %s %s", what, f.Sym.Name)
continue
}
seen[f.Sym] = true
@ -707,18 +708,18 @@ func checkdupfields(what string, ts ...*Type) {
// convert a parsed id/type list into
// a type for struct/interface/arglist
func tostruct(l []*Node) *Type {
t := typ(TSTRUCT)
func tostruct(l []*Node) *types.Type {
t := types.New(TSTRUCT)
tostruct0(t, l)
return t
}
func tostruct0(t *Type, l []*Node) {
func tostruct0(t *types.Type, l []*Node) {
if t == nil || !t.IsStruct() {
Fatalf("struct expected")
}
fields := make([]*Field, len(l))
fields := make([]*types.Field, len(l))
for i, n := range l {
f := structfield(n)
if f.Broke() {
@ -735,11 +736,11 @@ func tostruct0(t *Type, l []*Node) {
}
}
func tofunargs(l []*Node, funarg Funarg) *Type {
t := typ(TSTRUCT)
func tofunargs(l []*Node, funarg types.Funarg) *types.Type {
t := types.New(TSTRUCT)
t.StructType().Funarg = funarg
fields := make([]*Field, len(l))
fields := make([]*types.Field, len(l))
for i, n := range l {
f := structfield(n)
f.Funarg = funarg
@ -757,23 +758,23 @@ func tofunargs(l []*Node, funarg Funarg) *Type {
return t
}
func tofunargsfield(fields []*Field, funarg Funarg) *Type {
t := typ(TSTRUCT)
func tofunargsfield(fields []*types.Field, funarg types.Funarg) *types.Type {
t := types.New(TSTRUCT)
t.StructType().Funarg = funarg
for _, f := range fields {
f.Funarg = funarg
// esc.go needs to find f given a PPARAM to add the tag.
if f.Nname != nil && f.Nname.Class == PPARAM {
f.Nname.Name.Param.Field = f
if asNode(f.Nname) != nil && asNode(f.Nname).Class == PPARAM {
asNode(f.Nname).Name.Param.Field = f
}
}
t.SetFields(fields)
return t
}
func interfacefield(n *Node) *Field {
func interfacefield(n *Node) *types.Field {
lno := lineno
lineno = n.Pos
@ -796,14 +797,14 @@ func interfacefield(n *Node) *Field {
n.Right = nil
}
f := newField()
f := types.NewField()
if n.Left != nil {
f.Nname = n.Left
f.Sym = f.Nname.Sym
f.Nname = asTypesNode(n.Left)
f.Sym = asNode(f.Nname).Sym
} else {
// Placeholder ONAME just to hold Pos.
// TODO(mdempsky): Add Pos directly to Field instead.
f.Nname = newname(nblank.Sym)
f.Nname = asTypesNode(newname(nblank.Sym))
}
f.Type = n.Type
@ -815,21 +816,21 @@ func interfacefield(n *Node) *Field {
return f
}
func tointerface(l []*Node) *Type {
func tointerface(l []*Node) *types.Type {
if len(l) == 0 {
return Types[TINTER]
return types.Types[TINTER]
}
t := typ(TINTER)
t := types.New(TINTER)
tointerface0(t, l)
return t
}
func tointerface0(t *Type, l []*Node) *Type {
func tointerface0(t *types.Type, l []*Node) *types.Type {
if t == nil || !t.IsInterface() {
Fatalf("interface expected")
}
var fields []*Field
var fields []*types.Field
for _, n := range l {
f := interfacefield(n)
if f.Broke() {
@ -842,7 +843,7 @@ func tointerface0(t *Type, l []*Node) *Type {
return t
}
func embedded(s *Sym, pkg *Pkg) *Node {
func embedded(s *types.Sym, pkg *types.Pkg) *Node {
const (
CenterDot = 0xB7
)
@ -870,20 +871,20 @@ func embedded(s *Sym, pkg *Pkg) *Node {
}
// thisT is the singleton type used for interface method receivers.
var thisT *Type
var thisT *types.Type
func fakethis() *Node {
if thisT == nil {
thisT = typPtr(typ(TSTRUCT))
thisT = types.NewPtr(types.New(TSTRUCT))
}
return anonfield(thisT)
}
func fakethisfield() *Field {
func fakethisfield() *types.Field {
if thisT == nil {
thisT = typPtr(typ(TSTRUCT))
thisT = types.NewPtr(types.New(TSTRUCT))
}
f := newField()
f := types.NewField()
f.Type = thisT
return f
}
@ -891,18 +892,18 @@ func fakethisfield() *Field {
// Is this field a method on an interface?
// Those methods have thisT as the receiver.
// (See fakethis above.)
func isifacemethod(f *Type) bool {
func isifacemethod(f *types.Type) bool {
return f.Recv().Type == thisT
}
// turn a parsed function declaration into a type
func functype(this *Node, in, out []*Node) *Type {
t := typ(TFUNC)
func functype(this *Node, in, out []*Node) *types.Type {
t := types.New(TFUNC)
functype0(t, this, in, out)
return t
}
func functype0(t *Type, this *Node, in, out []*Node) {
func functype0(t *types.Type, this *Node, in, out []*Node) {
if t == nil || t.Etype != TFUNC {
Fatalf("function type expected")
}
@ -911,9 +912,9 @@ func functype0(t *Type, this *Node, in, out []*Node) {
if this != nil {
rcvr = []*Node{this}
}
t.FuncType().Receiver = tofunargs(rcvr, FunargRcvr)
t.FuncType().Results = tofunargs(out, FunargResults)
t.FuncType().Params = tofunargs(in, FunargParams)
t.FuncType().Receiver = tofunargs(rcvr, types.FunargRcvr)
t.FuncType().Results = tofunargs(out, types.FunargResults)
t.FuncType().Params = tofunargs(in, types.FunargParams)
checkdupfields("argument", t.Recvs(), t.Results(), t.Params())
@ -930,33 +931,33 @@ func functype0(t *Type, this *Node, in, out []*Node) {
}
}
func functypefield(this *Field, in, out []*Field) *Type {
t := typ(TFUNC)
func functypefield(this *types.Field, in, out []*types.Field) *types.Type {
t := types.New(TFUNC)
functypefield0(t, this, in, out)
return t
}
func functypefield0(t *Type, this *Field, in, out []*Field) {
var rcvr []*Field
func functypefield0(t *types.Type, this *types.Field, in, out []*types.Field) {
var rcvr []*types.Field
if this != nil {
rcvr = []*Field{this}
rcvr = []*types.Field{this}
}
t.FuncType().Receiver = tofunargsfield(rcvr, FunargRcvr)
t.FuncType().Results = tofunargsfield(out, FunargRcvr)
t.FuncType().Params = tofunargsfield(in, FunargRcvr)
t.FuncType().Receiver = tofunargsfield(rcvr, types.FunargRcvr)
t.FuncType().Results = tofunargsfield(out, types.FunargRcvr)
t.FuncType().Params = tofunargsfield(in, types.FunargRcvr)
t.FuncType().Outnamed = false
if len(out) > 0 && out[0].Nname != nil && out[0].Nname.Orig != nil {
s := out[0].Nname.Orig.Sym
if len(out) > 0 && asNode(out[0].Nname) != nil && asNode(out[0].Nname).Orig != nil {
s := asNode(out[0].Nname).Orig.Sym
if s != nil && (s.Name[0] != '~' || s.Name[1] != 'r') { // ~r%d is the name invented for an unnamed result
t.FuncType().Outnamed = true
}
}
}
var methodsym_toppkg *Pkg
var methodsym_toppkg *types.Pkg
func methodsym(nsym *Sym, t0 *Type, iface bool) *Sym {
func methodsym(nsym *types.Sym, t0 *types.Type, iface bool) *types.Sym {
if t0 == nil {
Fatalf("methodsym: nil receiver type")
}
@ -974,7 +975,7 @@ func methodsym(nsym *Sym, t0 *Type, iface bool) *Sym {
// if t0 == *t and t0 has a sym,
// we want to see *t, not t0, in the method name.
if t != t0 && t0.Sym != nil {
t0 = typPtr(t)
t0 = types.NewPtr(t)
}
suffix := ""
@ -985,7 +986,7 @@ func methodsym(nsym *Sym, t0 *Type, iface bool) *Sym {
}
}
var spkg *Pkg
var spkg *types.Pkg
if s != nil {
spkg = s.Pkg
}
@ -1013,7 +1014,7 @@ func methodsym(nsym *Sym, t0 *Type, iface bool) *Sym {
// methodname is a misnomer because this now returns a Sym, rather
// than an ONAME.
// TODO(mdempsky): Reconcile with methodsym.
func methodname(s *Sym, recv *Type) *Sym {
func methodname(s *types.Sym, recv *types.Type) *types.Sym {
star := false
if recv.IsPtr() {
star = true
@ -1040,7 +1041,7 @@ func methodname(s *Sym, recv *Type) *Sym {
// Add a method, declared as a function.
// - msym is the method symbol
// - t is function type (with receiver)
func addmethod(msym *Sym, t *Type, local, nointerface bool) {
func addmethod(msym *types.Sym, t *types.Type, local, nointerface bool) {
if msym == nil {
Fatalf("no method symbol")
}
@ -1111,9 +1112,9 @@ func addmethod(msym *Sym, t *Type, local, nointerface bool) {
return
}
f := newField()
f := types.NewField()
f.Sym = msym
f.Nname = newname(msym)
f.Nname = asTypesNode(newname(msym))
f.Type = t
f.SetNointerface(nointerface)
@ -1143,13 +1144,13 @@ func funccompile(n *Node) {
dclcontext = PEXTERN
}
func (s *Sym) funcsymname() string {
func funcsymname(s *types.Sym) string {
return s.Name + "·f"
}
// funcsym returns s·f.
func funcsym(s *Sym) *Sym {
sf, existed := s.Pkg.LookupOK(s.funcsymname())
func funcsym(s *types.Sym) *types.Sym {
sf, existed := s.Pkg.LookupOK(funcsymname(s))
// Don't export s·f when compiling for dynamic linking.
// When dynamically linking, the necessary function
// symbols will be created explicitly with makefuncsym.
@ -1169,7 +1170,7 @@ func funcsym(s *Sym) *Sym {
// but DUPOK doesn't work across shared library boundaries.
// So instead, when dynamic linking, we only create
// the s·f stubs in s's package.
func makefuncsym(s *Sym) {
func makefuncsym(s *types.Sym) {
if !Ctxt.Flag_dynlink {
Fatalf("makefuncsym dynlink")
}
@ -1181,7 +1182,7 @@ func makefuncsym(s *Sym) {
// not get a funcsym.
return
}
if _, existed := s.Pkg.LookupOK(s.funcsymname()); !existed {
if _, existed := s.Pkg.LookupOK(funcsymname(s)); !existed {
funcsyms = append(funcsyms, s)
}
}
@ -1289,7 +1290,7 @@ func (c *nowritebarrierrecChecker) visitcode(n *Node) {
func (c *nowritebarrierrecChecker) visitcall(n *Node) {
fn := n.Left
if n.Op == OCALLMETH {
fn = n.Left.Sym.Def
fn = asNode(n.Left.Sym.Def)
}
if fn == nil || fn.Op != ONAME || fn.Class != PFUNC || fn.Name.Defn == nil {
return

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"fmt"
"strconv"
"strings"
@ -131,7 +132,7 @@ func (v *bottomUpVisitor) visitcode(n *Node, min uint32) uint32 {
if n.Op == OCALLFUNC || n.Op == OCALLMETH {
fn := n.Left
if n.Op == OCALLMETH {
fn = n.Left.Sym.Def
fn = asNode(n.Left.Sym.Def)
}
if fn != nil && fn.Op == ONAME && fn.Class == PFUNC && fn.Name.Defn != nil {
m := v.visit(fn.Name.Defn)
@ -458,7 +459,7 @@ func (e *EscState) stepAssignWhere(dst, src *Node, why string, where *Node) *Esc
}
// funcSym returns fn.Func.Nname.Sym if no nils are encountered along the way.
func funcSym(fn *Node) *Sym {
func funcSym(fn *Node) *types.Sym {
if fn == nil || fn.Func.Nname == nil {
return nil
}
@ -466,7 +467,7 @@ func funcSym(fn *Node) *Sym {
}
// curfnSym returns n.Curfn.Nname.Sym if no nils are encountered along the way.
func (e *EscState) curfnSym(n *Node) *Sym {
func (e *EscState) curfnSym(n *Node) *types.Sym {
nE := e.nodeEscState(n)
return funcSym(nE.Curfn)
}
@ -563,7 +564,7 @@ func (e *EscState) escfunc(fn *Node) {
case PPARAM:
lnE.Loopdepth = 1
if ln.Type != nil && !haspointers(ln.Type) {
if ln.Type != nil && !types.Haspointers(ln.Type) {
break
}
if Curfn.Nbody.Len() == 0 && !Curfn.Noescape() {
@ -621,7 +622,7 @@ func (e *EscState) escloopdepth(n *Node) {
// after escape analysis. in the future, maybe pull label & goto analysis out of walk and put before esc
// if(n.Left.Sym.Label != nil)
// fatal("escape analysis messed up analyzing label: %+N", n);
n.Left.Sym.Label = &nonlooping
n.Left.Sym.Label = asTypesNode(&nonlooping)
case OGOTO:
if n.Left == nil || n.Left.Sym == nil {
@ -630,8 +631,8 @@ func (e *EscState) escloopdepth(n *Node) {
// If we come past one that's uninitialized, this must be a (harmless) forward jump
// but if it's set to nonlooping the label must have preceded this goto.
if n.Left.Sym.Label == &nonlooping {
n.Left.Sym.Label = &looping
if asNode(n.Left.Sym.Label) == &nonlooping {
n.Left.Sym.Label = asTypesNode(&looping)
}
}
@ -720,11 +721,11 @@ func (e *EscState) esc(n *Node, parent *Node) {
}
case OLABEL:
if n.Left.Sym.Label == &nonlooping {
if asNode(n.Left.Sym.Label) == &nonlooping {
if Debug['m'] > 2 {
fmt.Printf("%v:%v non-looping label\n", linestr(lineno), n)
}
} else if n.Left.Sym.Label == &looping {
} else if asNode(n.Left.Sym.Label) == &looping {
if Debug['m'] > 2 {
fmt.Printf("%v: %v looping label\n", linestr(lineno), n)
}
@ -1158,7 +1159,7 @@ func (e *EscState) escassign(dst, src *Node, step *EscStep) {
a := nod(OADDR, src, nil)
a.Pos = src.Pos
e.nodeEscState(a).Loopdepth = e.nodeEscState(src).Loopdepth
a.Type = typPtr(src.Type)
a.Type = types.NewPtr(src.Type)
e.escflows(dst, a, e.stepAssign(nil, originalDst, src, dstwhy))
// Flowing multiple returns to a single dst happens when
@ -1170,7 +1171,7 @@ func (e *EscState) escassign(dst, src *Node, step *EscStep) {
// A non-pointer escaping from a struct does not concern us.
case ODOT:
if src.Type != nil && !haspointers(src.Type) {
if src.Type != nil && !types.Haspointers(src.Type) {
break
}
fallthrough
@ -1191,7 +1192,7 @@ func (e *EscState) escassign(dst, src *Node, step *EscStep) {
case ODOTTYPE,
ODOTTYPE2:
if src.Type != nil && !haspointers(src.Type) {
if src.Type != nil && !types.Haspointers(src.Type) {
break
}
e.escassign(dst, src.Left, e.stepAssign(step, originalDst, src, dstwhy))
@ -1396,7 +1397,7 @@ func (e *EscState) addDereference(n *Node) *Node {
e.nodeEscState(ind).Loopdepth = e.nodeEscState(n).Loopdepth
ind.Pos = n.Pos
t := n.Type
if t.IsKind(Tptr) {
if t.IsKind(types.Tptr) {
// This should model our own sloppy use of OIND to encode
// decreasing levels of indirection; i.e., "indirecting" an array
// might yield the type of an element. To be enhanced...
@ -1440,7 +1441,7 @@ func escNoteOutputParamFlow(e uint16, vargen int32, level Level) uint16 {
return (e &^ (bitsMaskForTag << shift)) | encodedFlow
}
func (e *EscState) initEscRetval(call *Node, fntype *Type) {
func (e *EscState) initEscRetval(call *Node, fntype *types.Type) {
cE := e.nodeEscState(call)
cE.Retval.Set(nil) // Suspect this is not nil for indirect calls.
for i, f := range fntype.Results().Fields().Slice() {
@ -1464,7 +1465,7 @@ func (e *EscState) initEscRetval(call *Node, fntype *Type) {
// different for methods vs plain functions and for imported vs
// this-package
func (e *EscState) esccall(call *Node, parent *Node) {
var fntype *Type
var fntype *types.Type
var indirect bool
var fn *Node
switch call.Op {
@ -1477,7 +1478,7 @@ func (e *EscState) esccall(call *Node, parent *Node) {
indirect = fn.Op != ONAME || fn.Class != PFUNC
case OCALLMETH:
fn = call.Left.Sym.Def
fn = asNode(call.Left.Sym.Def)
if fn != nil {
fntype = fn.Type
} else {
@ -1514,7 +1515,7 @@ func (e *EscState) esccall(call *Node, parent *Node) {
if call.Op != OCALLFUNC {
rf := fntype.Recv()
r := call.Left.Left
if haspointers(rf.Type) {
if types.Haspointers(rf.Type) {
e.escassignSinkWhy(call, r, "receiver in indirect call")
}
} else { // indirect and OCALLFUNC = could be captured variables, too. (#14409)
@ -1555,8 +1556,8 @@ func (e *EscState) esccall(call *Node, parent *Node) {
if n.Isddd() && !call.Isddd() {
// Introduce ODDDARG node to represent ... allocation.
arg = nod(ODDDARG, nil, nil)
arr := typArray(n.Type.Elem(), int64(len(args)))
arg.Type = typPtr(arr) // make pointer so it will be tracked
arr := types.NewArray(n.Type.Elem(), int64(len(args)))
arg.Type = types.NewPtr(arr) // make pointer so it will be tracked
arg.Pos = call.Pos
e.track(arg)
call.Right = arg
@ -1603,7 +1604,7 @@ func (e *EscState) esccall(call *Node, parent *Node) {
if call.Op != OCALLFUNC {
rf := fntype.Recv()
r := call.Left.Left
if haspointers(rf.Type) {
if types.Haspointers(rf.Type) {
e.escassignfromtag(rf.Note, cE.Retval, r, call)
}
}
@ -1620,8 +1621,8 @@ func (e *EscState) esccall(call *Node, parent *Node) {
// Introduce ODDDARG node to represent ... allocation.
arg = nod(ODDDARG, nil, nil)
arg.Pos = call.Pos
arr := typArray(param.Type.Elem(), int64(len(rest)))
arg.Type = typPtr(arr) // make pointer so it will be tracked
arr := types.NewArray(param.Type.Elem(), int64(len(rest)))
arg.Type = types.NewPtr(arr) // make pointer so it will be tracked
e.track(arg)
call.Right = arg
@ -1643,7 +1644,7 @@ func (e *EscState) esccall(call *Node, parent *Node) {
}
}
if haspointers(param.Type) && e.escassignfromtag(note, cE.Retval, arg, call)&EscMask == EscNone && parent.Op != ODEFER && parent.Op != OPROC {
if types.Haspointers(param.Type) && e.escassignfromtag(note, cE.Retval, arg, call)&EscMask == EscNone && parent.Op != ODEFER && parent.Op != OPROC {
a := arg
for a.Op == OCONVNOP {
a = a.Left
@ -1673,7 +1674,7 @@ func (e *EscState) escflows(dst, src *Node, why *EscStep) {
}
// Don't bother building a graph for scalars.
if src.Type != nil && !haspointers(src.Type) && !isReflectHeaderDataField(src) {
if src.Type != nil && !types.Haspointers(src.Type) && !isReflectHeaderDataField(src) {
if Debug['m'] > 3 {
fmt.Printf("%v::NOT flows:: %S <- %S\n", linestr(lineno), dst, src)
}
@ -2036,7 +2037,7 @@ const uintptrEscapesTag = "uintptr-escapes"
func (e *EscState) esctag(fn *Node) {
fn.Esc = EscFuncTagged
name := func(s *Sym, narg int) string {
name := func(s *types.Sym, narg int) string {
if s != nil {
return s.Name
}
@ -2048,7 +2049,7 @@ func (e *EscState) esctag(fn *Node) {
if fn.Nbody.Len() == 0 {
if fn.Noescape() {
for _, f := range fn.Type.Params().Fields().Slice() {
if haspointers(f.Type) {
if types.Haspointers(f.Type) {
f.Note = mktag(EscNone)
}
}
@ -2103,7 +2104,7 @@ func (e *EscState) esctag(fn *Node) {
switch ln.Esc & EscMask {
case EscNone, // not touched by escflood
EscReturn:
if haspointers(ln.Type) { // don't bother tagging for scalars
if types.Haspointers(ln.Type) { // don't bother tagging for scalars
if ln.Name.Param.Field.Note != uintptrEscapesTag {
ln.Name.Param.Field.Note = mktag(int(ln.Esc))
}

View file

@ -7,6 +7,7 @@ package gc
import (
"bufio"
"bytes"
"cmd/compile/internal/types"
"cmd/internal/bio"
"fmt"
"unicode"
@ -44,8 +45,8 @@ func exportsym(n *Node) {
}
// Ensure original types are on exportlist before type aliases.
if n.Sym.isAlias() {
exportlist = append(exportlist, n.Sym.Def)
if IsAlias(n.Sym) {
exportlist = append(exportlist, asNode(n.Sym.Def))
}
exportlist = append(exportlist, n)
@ -65,7 +66,7 @@ func initname(s string) bool {
// exportedsym reports whether a symbol will be visible
// to files that import our package.
func exportedsym(sym *Sym) bool {
func exportedsym(sym *types.Sym) bool {
// Builtins are visible everywhere.
if sym.Pkg == builtinpkg || sym.Origpkg == builtinpkg {
return true
@ -141,7 +142,7 @@ func reexportdep(n *Node) {
}
// methodbyname sorts types by symbol name.
type methodbyname []*Field
type methodbyname []*types.Field
func (x methodbyname) Len() int { return len(x) }
func (x methodbyname) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
@ -174,7 +175,7 @@ func dumpexport() {
// (use empty package map to avoid collisions)
savedPkgMap := pkgMap
savedPkgs := pkgs
pkgMap = make(map[string]*Pkg)
pkgMap = make(map[string]*types.Pkg)
pkgs = nil
Import(mkpkg(""), bufio.NewReader(&copy)) // must not die
pkgs = savedPkgs
@ -191,14 +192,14 @@ func dumpexport() {
// importsym declares symbol s as an imported object representable by op.
// pkg is the package being imported
func importsym(pkg *Pkg, s *Sym, op Op) {
if s.Def != nil && s.Def.Op != op {
func importsym(pkg *types.Pkg, s *types.Sym, op Op) {
if asNode(s.Def) != nil && asNode(s.Def).Op != op {
pkgstr := fmt.Sprintf("during import %q", pkg.Path)
redeclare(s, pkgstr)
}
// mark the symbol so it is not reexported
if s.Def == nil {
if asNode(s.Def) == nil {
if exportname(s.Name) || initname(s.Name) {
s.SetExport(true)
} else {
@ -210,28 +211,28 @@ func importsym(pkg *Pkg, s *Sym, op Op) {
// pkgtype returns the named type declared by symbol s.
// If no such type has been declared yet, a forward declaration is returned.
// pkg is the package being imported
func pkgtype(pkg *Pkg, s *Sym) *Type {
func pkgtype(pkg *types.Pkg, s *types.Sym) *types.Type {
importsym(pkg, s, OTYPE)
if s.Def == nil || s.Def.Op != OTYPE {
t := typ(TFORW)
if asNode(s.Def) == nil || asNode(s.Def).Op != OTYPE {
t := types.New(TFORW)
t.Sym = s
s.Def = typenod(t)
s.Def.Name = new(Name)
s.Def = asTypesNode(typenod(t))
asNode(s.Def).Name = new(Name)
}
if s.Def.Type == nil {
if asNode(s.Def).Type == nil {
Fatalf("pkgtype %v", s)
}
return s.Def.Type
return asNode(s.Def).Type
}
// importconst declares symbol s as an imported constant with type t and value n.
// pkg is the package being imported
func importconst(pkg *Pkg, s *Sym, t *Type, n *Node) {
func importconst(pkg *types.Pkg, s *types.Sym, t *types.Type, n *Node) {
importsym(pkg, s, OLITERAL)
n = convlit(n, t)
if s.Def != nil { // TODO: check if already the same.
if asNode(s.Def) != nil { // TODO: check if already the same.
return
}
@ -256,13 +257,13 @@ func importconst(pkg *Pkg, s *Sym, t *Type, n *Node) {
// importvar declares symbol s as an imported variable with type t.
// pkg is the package being imported
func importvar(pkg *Pkg, s *Sym, t *Type) {
func importvar(pkg *types.Pkg, s *types.Sym, t *types.Type) {
importsym(pkg, s, ONAME)
if s.Def != nil && s.Def.Op == ONAME {
if eqtype(t, s.Def.Type) {
if asNode(s.Def) != nil && asNode(s.Def).Op == ONAME {
if eqtype(t, asNode(s.Def).Type) {
return
}
yyerror("inconsistent definition for var %v during import\n\t%v (in %q)\n\t%v (in %q)", s, s.Def.Type, s.Importdef.Path, t, pkg.Path)
yyerror("inconsistent definition for var %v during import\n\t%v (in %q)\n\t%v (in %q)", s, asNode(s.Def).Type, s.Importdef.Path, t, pkg.Path)
}
n := newname(s)
@ -277,13 +278,13 @@ func importvar(pkg *Pkg, s *Sym, t *Type) {
// importalias declares symbol s as an imported type alias with type t.
// pkg is the package being imported
func importalias(pkg *Pkg, s *Sym, t *Type) {
func importalias(pkg *types.Pkg, s *types.Sym, t *types.Type) {
importsym(pkg, s, OTYPE)
if s.Def != nil && s.Def.Op == OTYPE {
if eqtype(t, s.Def.Type) {
if asNode(s.Def) != nil && asNode(s.Def).Op == OTYPE {
if eqtype(t, asNode(s.Def).Type) {
return
}
yyerror("inconsistent definition for type alias %v during import\n\t%v (in %q)\n\t%v (in %q)", s, s.Def.Type, s.Importdef.Path, t, pkg.Path)
yyerror("inconsistent definition for type alias %v during import\n\t%v (in %q)\n\t%v (in %q)", s, asNode(s.Def).Type, s.Importdef.Path, t, pkg.Path)
}
n := newname(s)

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"fmt"
"strconv"
"strings"
@ -69,12 +70,12 @@ func fmtFlag(s fmt.State, verb rune) FmtFlag {
//
// %v *Val Constant values
//
// %v *Sym Symbols
// %v *types.Sym Symbols
// %S unqualified identifier in any mode
// Flags: +,- #: mode (see below)
// 0: in export mode: unqualified identifier if exported, qualified if not
//
// %v *Type Types
// %v *types.Type Types
// %S omit "func" and receiver in function types
// %L definition instead of name.
// Flags: +,- #: mode (see below)
@ -89,7 +90,7 @@ func fmtFlag(s fmt.State, verb rune) FmtFlag {
// Flags: those of *Node
// .: separate items with ',' instead of ';'
// *Sym, *Type, and *Node types use the flags below to set the format mode
// *types.Sym, *types.Type, and *Node types use the flags below to set the format mode
const (
FErr = iota
FDbg
@ -98,10 +99,10 @@ const (
)
// The mode flags '+', '-', and '#' are sticky; they persist through
// recursions of *Node, *Type, and *Sym values. The ' ' flag is
// sticky only on *Type recursions and only used in %-/*Sym mode.
// recursions of *Node, *types.Type, and *types.Sym values. The ' ' flag is
// sticky only on *types.Type recursions and only used in %-/*types.Sym mode.
//
// Example: given a *Sym: %+v %#v %-v print an identifier properly qualified for debug/export/internal mode
// Example: given a *types.Sym: %+v %#v %-v print an identifier properly qualified for debug/export/internal mode
// Useful format combinations:
// TODO(gri): verify these
@ -114,7 +115,7 @@ const (
// %#v Go format
// %L "foo (type Bar)" for error messages
//
// *Type:
// *types.Type:
// %#v Go format
// %#L type definition instead of name
// %#S omit"func" and receiver in function signature
@ -263,15 +264,15 @@ type (
fmtOpTypeId Op
fmtOpTypeIdName Op
fmtTypeErr Type
fmtTypeDbg Type
fmtTypeTypeId Type
fmtTypeTypeIdName Type
fmtTypeErr types.Type
fmtTypeDbg types.Type
fmtTypeTypeId types.Type
fmtTypeTypeIdName types.Type
fmtSymErr Sym
fmtSymDbg Sym
fmtSymTypeId Sym
fmtSymTypeIdName Sym
fmtSymErr types.Sym
fmtSymDbg types.Sym
fmtSymTypeId types.Sym
fmtSymTypeIdName types.Sym
fmtNodesErr Nodes
fmtNodesDbg Nodes
@ -291,17 +292,23 @@ func (o fmtOpTypeId) Format(s fmt.State, verb rune) { Op(o).format(s, verb,
func (o fmtOpTypeIdName) Format(s fmt.State, verb rune) { Op(o).format(s, verb, FTypeIdName) }
func (o Op) Format(s fmt.State, verb rune) { o.format(s, verb, FErr) }
func (t *fmtTypeErr) Format(s fmt.State, verb rune) { (*Type)(t).format(s, verb, FErr) }
func (t *fmtTypeDbg) Format(s fmt.State, verb rune) { (*Type)(t).format(s, verb, FDbg) }
func (t *fmtTypeTypeId) Format(s fmt.State, verb rune) { (*Type)(t).format(s, verb, FTypeId) }
func (t *fmtTypeTypeIdName) Format(s fmt.State, verb rune) { (*Type)(t).format(s, verb, FTypeIdName) }
func (t *Type) Format(s fmt.State, verb rune) { t.format(s, verb, FErr) }
func (t *fmtTypeErr) Format(s fmt.State, verb rune) { typeFormat((*types.Type)(t), s, verb, FErr) }
func (t *fmtTypeDbg) Format(s fmt.State, verb rune) { typeFormat((*types.Type)(t), s, verb, FDbg) }
func (t *fmtTypeTypeId) Format(s fmt.State, verb rune) { typeFormat((*types.Type)(t), s, verb, FTypeId) }
func (t *fmtTypeTypeIdName) Format(s fmt.State, verb rune) {
typeFormat((*types.Type)(t), s, verb, FTypeIdName)
}
func (y *fmtSymErr) Format(s fmt.State, verb rune) { (*Sym)(y).format(s, verb, FErr) }
func (y *fmtSymDbg) Format(s fmt.State, verb rune) { (*Sym)(y).format(s, verb, FDbg) }
func (y *fmtSymTypeId) Format(s fmt.State, verb rune) { (*Sym)(y).format(s, verb, FTypeId) }
func (y *fmtSymTypeIdName) Format(s fmt.State, verb rune) { (*Sym)(y).format(s, verb, FTypeIdName) }
func (y *Sym) Format(s fmt.State, verb rune) { y.format(s, verb, FErr) }
// func (t *types.Type) Format(s fmt.State, verb rune) // in package types
func (y *fmtSymErr) Format(s fmt.State, verb rune) { symFormat((*types.Sym)(y), s, verb, FErr) }
func (y *fmtSymDbg) Format(s fmt.State, verb rune) { symFormat((*types.Sym)(y), s, verb, FDbg) }
func (y *fmtSymTypeId) Format(s fmt.State, verb rune) { symFormat((*types.Sym)(y), s, verb, FTypeId) }
func (y *fmtSymTypeIdName) Format(s fmt.State, verb rune) {
symFormat((*types.Sym)(y), s, verb, FTypeIdName)
}
// func (y *types.Sym) Format(s fmt.State, verb rune) // in package types { y.format(s, verb, FErr) }
func (n fmtNodesErr) Format(s fmt.State, verb rune) { (Nodes)(n).format(s, verb, FErr) }
func (n fmtNodesDbg) Format(s fmt.State, verb rune) { (Nodes)(n).format(s, verb, FDbg) }
@ -333,13 +340,13 @@ func (m fmtMode) prepareArgs(args []interface{}) {
args[i] = fmtOpErr(arg)
case *Node:
args[i] = (*fmtNodeErr)(arg)
case *Type:
case *types.Type:
args[i] = (*fmtTypeErr)(arg)
case *Sym:
case *types.Sym:
args[i] = (*fmtSymErr)(arg)
case Nodes:
args[i] = fmtNodesErr(arg)
case Val, int32, int64, string, EType:
case Val, int32, int64, string, types.EType:
// OK: printing these types doesn't depend on mode
default:
Fatalf("mode.prepareArgs type %T", arg)
@ -352,13 +359,13 @@ func (m fmtMode) prepareArgs(args []interface{}) {
args[i] = fmtOpDbg(arg)
case *Node:
args[i] = (*fmtNodeDbg)(arg)
case *Type:
case *types.Type:
args[i] = (*fmtTypeDbg)(arg)
case *Sym:
case *types.Sym:
args[i] = (*fmtSymDbg)(arg)
case Nodes:
args[i] = fmtNodesDbg(arg)
case Val, int32, int64, string, EType:
case Val, int32, int64, string, types.EType:
// OK: printing these types doesn't depend on mode
default:
Fatalf("mode.prepareArgs type %T", arg)
@ -371,13 +378,13 @@ func (m fmtMode) prepareArgs(args []interface{}) {
args[i] = fmtOpTypeId(arg)
case *Node:
args[i] = (*fmtNodeTypeId)(arg)
case *Type:
case *types.Type:
args[i] = (*fmtTypeTypeId)(arg)
case *Sym:
case *types.Sym:
args[i] = (*fmtSymTypeId)(arg)
case Nodes:
args[i] = fmtNodesTypeId(arg)
case Val, int32, int64, string, EType:
case Val, int32, int64, string, types.EType:
// OK: printing these types doesn't depend on mode
default:
Fatalf("mode.prepareArgs type %T", arg)
@ -390,13 +397,13 @@ func (m fmtMode) prepareArgs(args []interface{}) {
args[i] = fmtOpTypeIdName(arg)
case *Node:
args[i] = (*fmtNodeTypeIdName)(arg)
case *Type:
case *types.Type:
args[i] = (*fmtTypeTypeIdName)(arg)
case *Sym:
case *types.Sym:
args[i] = (*fmtSymTypeIdName)(arg)
case Nodes:
args[i] = fmtNodesTypeIdName(arg)
case Val, int32, int64, string, EType:
case Val, int32, int64, string, types.EType:
// OK: printing these types doesn't depend on mode
default:
Fatalf("mode.prepareArgs type %T", arg)
@ -606,53 +613,8 @@ s%.+% [T&] = "&",%g
s%^ ........*\]%&~%g
s%~ %%g
*/
var etnames = []string{
Txxx: "Txxx",
TINT: "INT",
TUINT: "UINT",
TINT8: "INT8",
TUINT8: "UINT8",
TINT16: "INT16",
TUINT16: "UINT16",
TINT32: "INT32",
TUINT32: "UINT32",
TINT64: "INT64",
TUINT64: "UINT64",
TUINTPTR: "UINTPTR",
TFLOAT32: "FLOAT32",
TFLOAT64: "FLOAT64",
TCOMPLEX64: "COMPLEX64",
TCOMPLEX128: "COMPLEX128",
TBOOL: "BOOL",
TPTR32: "PTR32",
TPTR64: "PTR64",
TFUNC: "FUNC",
TARRAY: "ARRAY",
TSLICE: "SLICE",
TSTRUCT: "STRUCT",
TCHAN: "CHAN",
TMAP: "MAP",
TINTER: "INTER",
TFORW: "FORW",
TSTRING: "STRING",
TUNSAFEPTR: "TUNSAFEPTR",
TANY: "ANY",
TIDEAL: "TIDEAL",
TNIL: "TNIL",
TBLANK: "TBLANK",
TFUNCARGS: "TFUNCARGS",
TCHANARGS: "TCHANARGS",
TDDDFIELD: "TDDDFIELD",
}
func (et EType) String() string {
if int(et) < len(etnames) && etnames[et] != "" {
return etnames[et]
}
return fmt.Sprintf("E-%d", et)
}
func (s *Sym) symfmt(flag FmtFlag, mode fmtMode) string {
func symfmt(s *types.Sym, flag FmtFlag, mode fmtMode) string {
if s.Pkg != nil && flag&FmtShort == 0 {
switch mode {
case FErr: // This is for the user
@ -719,38 +681,38 @@ var basicnames = []string{
TBLANK: "blank",
}
func (t *Type) typefmt(flag FmtFlag, mode fmtMode, depth int) string {
func typefmt(t *types.Type, flag FmtFlag, mode fmtMode, depth int) string {
if t == nil {
return "<T>"
}
if t == bytetype || t == runetype {
if t == types.Bytetype || t == types.Runetype {
// in %-T mode collapse rune and byte with their originals.
switch mode {
case FTypeIdName, FTypeId:
t = Types[t.Etype]
t = types.Types[t.Etype]
default:
return t.Sym.sconv(FmtShort, mode)
return sconv(t.Sym, FmtShort, mode)
}
}
if t == errortype {
if t == types.Errortype {
return "error"
}
// Unless the 'l' flag was specified, if the type has a name, just print that name.
if flag&FmtLong == 0 && t.Sym != nil && t != Types[t.Etype] {
if flag&FmtLong == 0 && t.Sym != nil && t != types.Types[t.Etype] {
switch mode {
case FTypeId, FTypeIdName:
if flag&FmtShort != 0 {
if t.Vargen != 0 {
return mode.Sprintf("%v·%d", t.Sym.sconv(FmtShort, mode), t.Vargen)
return mode.Sprintf("%v·%d", sconv(t.Sym, FmtShort, mode), t.Vargen)
}
return t.Sym.sconv(FmtShort, mode)
return sconv(t.Sym, FmtShort, mode)
}
if mode == FTypeIdName {
return t.Sym.sconv(FmtUnsigned, mode)
return sconv(t.Sym, FmtUnsigned, mode)
}
if t.Sym.Pkg == localpkg && t.Vargen != 0 {
@ -758,19 +720,19 @@ func (t *Type) typefmt(flag FmtFlag, mode fmtMode, depth int) string {
}
}
return t.Sym.modeString(mode)
return smodeString(t.Sym, mode)
}
if int(t.Etype) < len(basicnames) && basicnames[t.Etype] != "" {
prefix := ""
if mode == FErr && (t == idealbool || t == idealstring) {
if mode == FErr && (t == types.Idealbool || t == types.Idealstring) {
prefix = "untyped "
}
return prefix + basicnames[t.Etype]
}
if mode == FDbg {
return t.Etype.String() + "-" + t.typefmt(flag, 0, depth)
return t.Etype.String() + "-" + typefmt(t, flag, 0, depth)
}
switch t.Etype {
@ -778,36 +740,36 @@ func (t *Type) typefmt(flag FmtFlag, mode fmtMode, depth int) string {
switch mode {
case FTypeId, FTypeIdName:
if flag&FmtShort != 0 {
return "*" + t.Elem().tconv(FmtShort, mode, depth)
return "*" + tconv(t.Elem(), FmtShort, mode, depth)
}
}
return "*" + t.Elem().modeString(mode, depth)
return "*" + tmodeString(t.Elem(), mode, depth)
case TARRAY:
if t.isDDDArray() {
return "[...]" + t.Elem().modeString(mode, depth)
if t.IsDDDArray() {
return "[...]" + tmodeString(t.Elem(), mode, depth)
}
return "[" + strconv.FormatInt(t.NumElem(), 10) + "]" + t.Elem().modeString(mode, depth)
return "[" + strconv.FormatInt(t.NumElem(), 10) + "]" + tmodeString(t.Elem(), mode, depth)
case TSLICE:
return "[]" + t.Elem().modeString(mode, depth)
return "[]" + tmodeString(t.Elem(), mode, depth)
case TCHAN:
switch t.ChanDir() {
case Crecv:
return "<-chan " + t.Elem().modeString(mode, depth)
case types.Crecv:
return "<-chan " + tmodeString(t.Elem(), mode, depth)
case Csend:
return "chan<- " + t.Elem().modeString(mode, depth)
case types.Csend:
return "chan<- " + tmodeString(t.Elem(), mode, depth)
}
if t.Elem() != nil && t.Elem().IsChan() && t.Elem().Sym == nil && t.Elem().ChanDir() == Crecv {
return "chan (" + t.Elem().modeString(mode, depth) + ")"
if t.Elem() != nil && t.Elem().IsChan() && t.Elem().Sym == nil && t.Elem().ChanDir() == types.Crecv {
return "chan (" + tmodeString(t.Elem(), mode, depth) + ")"
}
return "chan " + t.Elem().modeString(mode, depth)
return "chan " + tmodeString(t.Elem(), mode, depth)
case TMAP:
return "map[" + t.Key().modeString(mode, depth) + "]" + t.Val().modeString(mode, depth)
return "map[" + tmodeString(t.Key(), mode, depth) + "]" + tmodeString(t.Val(), mode, depth)
case TINTER:
if t.IsEmptyInterface() {
@ -826,11 +788,11 @@ func (t *Type) typefmt(flag FmtFlag, mode fmtMode, depth int) string {
// Wrong interface definitions may have types lacking a symbol.
break
case exportname(f.Sym.Name):
buf = append(buf, f.Sym.sconv(FmtShort, mode)...)
buf = append(buf, sconv(f.Sym, FmtShort, mode)...)
default:
buf = append(buf, f.Sym.sconv(FmtUnsigned, mode)...)
buf = append(buf, sconv(f.Sym, FmtUnsigned, mode)...)
}
buf = append(buf, f.Type.tconv(FmtShort, mode, depth)...)
buf = append(buf, tconv(f.Type, FmtShort, mode, depth)...)
}
if t.NumFields() != 0 {
buf = append(buf, ' ')
@ -845,12 +807,12 @@ func (t *Type) typefmt(flag FmtFlag, mode fmtMode, depth int) string {
} else {
if t.Recv() != nil {
buf = append(buf, "method"...)
buf = append(buf, t.Recvs().modeString(mode, depth)...)
buf = append(buf, tmodeString(t.Recvs(), mode, depth)...)
buf = append(buf, ' ')
}
buf = append(buf, "func"...)
}
buf = append(buf, t.Params().modeString(mode, depth)...)
buf = append(buf, tmodeString(t.Params(), mode, depth)...)
switch t.Results().NumFields() {
case 0:
@ -858,11 +820,11 @@ func (t *Type) typefmt(flag FmtFlag, mode fmtMode, depth int) string {
case 1:
buf = append(buf, ' ')
buf = append(buf, t.Results().Field(0).Type.modeString(mode, depth)...) // struct->field->field's type
buf = append(buf, tmodeString(t.Results().Field(0).Type, mode, depth)...) // struct->field->field's type
default:
buf = append(buf, ' ')
buf = append(buf, t.Results().modeString(mode, depth)...)
buf = append(buf, tmodeString(t.Results(), mode, depth)...)
}
return string(buf)
@ -872,15 +834,15 @@ func (t *Type) typefmt(flag FmtFlag, mode fmtMode, depth int) string {
// Format the bucket struct for map[x]y as map.bucket[x]y.
// This avoids a recursive print that generates very long names.
if mt.Bucket == t {
return "map.bucket[" + m.Key().modeString(mode, depth) + "]" + m.Val().modeString(mode, depth)
return "map.bucket[" + tmodeString(m.Key(), mode, depth) + "]" + tmodeString(m.Val(), mode, depth)
}
if mt.Hmap == t {
return "map.hdr[" + m.Key().modeString(mode, depth) + "]" + m.Val().modeString(mode, depth)
return "map.hdr[" + tmodeString(m.Key(), mode, depth) + "]" + tmodeString(m.Val(), mode, depth)
}
if mt.Hiter == t {
return "map.iter[" + m.Key().modeString(mode, depth) + "]" + m.Val().modeString(mode, depth)
return "map.iter[" + tmodeString(m.Key(), mode, depth) + "]" + tmodeString(m.Val(), mode, depth)
}
Fatalf("unknown internal map type")
@ -920,7 +882,7 @@ func (t *Type) typefmt(flag FmtFlag, mode fmtMode, depth int) string {
case TFORW:
if t.Sym != nil {
return "undefined " + t.Sym.modeString(mode)
return "undefined " + smodeString(t.Sym, mode)
}
return "undefined"
@ -1296,7 +1258,7 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
return
}
if n.Sym != nil {
fmt.Fprint(s, n.Sym.modeString(mode))
fmt.Fprint(s, smodeString(n.Sym, mode))
return
}
}
@ -1304,10 +1266,10 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
n.Orig.exprfmt(s, prec, mode)
return
}
if n.Type != nil && n.Type.Etype != TIDEAL && n.Type.Etype != TNIL && n.Type != idealbool && n.Type != idealstring {
if n.Type != nil && n.Type.Etype != TIDEAL && n.Type.Etype != TNIL && n.Type != types.Idealbool && n.Type != types.Idealstring {
// Need parens when type begins with what might
// be misinterpreted as a unary operator: * or <-.
if n.Type.IsPtr() || (n.Type.IsChan() && n.Type.ChanDir() == Crecv) {
if n.Type.IsPtr() || (n.Type.IsChan() && n.Type.ChanDir() == types.Crecv) {
mode.Fprintf(s, "(%v)(%v)", n.Type, n.Val())
return
} else {
@ -1327,11 +1289,11 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
}
fallthrough
case OPACK, ONONAME:
fmt.Fprint(s, n.Sym.modeString(mode))
fmt.Fprint(s, smodeString(n.Sym, mode))
case OTYPE:
if n.Type == nil && n.Sym != nil {
fmt.Fprint(s, n.Sym.modeString(mode))
fmt.Fprint(s, smodeString(n.Sym, mode))
return
}
mode.Fprintf(s, "%v", n.Type)
@ -1347,15 +1309,15 @@ func (n *Node) exprfmt(s fmt.State, prec int, mode fmtMode) {
mode.Fprintf(s, "map[%v]%v", n.Left, n.Right)
case OTCHAN:
switch ChanDir(n.Etype) {
case Crecv:
switch types.ChanDir(n.Etype) {
case types.Crecv:
mode.Fprintf(s, "<-chan %v", n.Left)
case Csend:
case types.Csend:
mode.Fprintf(s, "chan<- %v", n.Left)
default:
if n.Left != nil && n.Left.Op == OTCHAN && n.Left.Sym == nil && ChanDir(n.Left.Etype) == Crecv {
if n.Left != nil && n.Left.Op == OTCHAN && n.Left.Sym == nil && types.ChanDir(n.Left.Etype) == types.Crecv {
mode.Fprintf(s, "chan (%v)", n.Left)
} else {
mode.Fprintf(s, "chan %v", n.Left)
@ -1717,21 +1679,20 @@ func (n *Node) nodedump(s fmt.State, flag FmtFlag, mode fmtMode) {
}
// "%S" suppresses qualifying with package
func (s *Sym) format(f fmt.State, verb rune, mode fmtMode) {
func symFormat(s *types.Sym, f fmt.State, verb rune, mode fmtMode) {
switch verb {
case 'v', 'S':
fmt.Fprint(f, s.sconv(fmtFlag(f, verb), mode))
fmt.Fprint(f, sconv(s, fmtFlag(f, verb), mode))
default:
fmt.Fprintf(f, "%%!%c(*Sym=%p)", verb, s)
fmt.Fprintf(f, "%%!%c(*types.Sym=%p)", verb, s)
}
}
func (s *Sym) String() string { return s.sconv(0, FErr) }
func (s *Sym) modeString(mode fmtMode) string { return s.sconv(0, mode) }
func smodeString(s *types.Sym, mode fmtMode) string { return sconv(s, 0, mode) }
// See #16897 before changing the implementation of sconv.
func (s *Sym) sconv(flag FmtFlag, mode fmtMode) string {
func sconv(s *types.Sym, flag FmtFlag, mode fmtMode) string {
if flag&FmtLong != 0 {
panic("linksymfmt")
}
@ -1745,36 +1706,14 @@ func (s *Sym) sconv(flag FmtFlag, mode fmtMode) string {
}
flag, mode = flag.update(mode)
return s.symfmt(flag, mode)
return symfmt(s, flag, mode)
}
func (t *Type) String() string {
// This is an external entry point, so we pass depth 0 to tconv.
// The implementation of tconv (including typefmt and fldconv)
// must take care not to use a type in a formatting string
// to avoid resetting the recursion counter.
return t.tconv(0, FErr, 0)
func tmodeString(t *types.Type, mode fmtMode, depth int) string {
return tconv(t, 0, mode, depth)
}
func (t *Type) modeString(mode fmtMode, depth int) string {
return t.tconv(0, mode, depth)
}
// ShortString generates a short description of t.
// It is used in autogenerated method names, reflection,
// and itab names.
func (t *Type) ShortString() string {
return t.tconv(FmtLeft, FErr, 0)
}
// LongString generates a complete description of t.
// It is useful for reflection,
// or when a unique fingerprint or hash of a type is required.
func (t *Type) LongString() string {
return t.tconv(FmtLeft|FmtUnsigned, FErr, 0)
}
func fldconv(f *Field, flag FmtFlag, mode fmtMode, depth int) string {
func fldconv(f *types.Field, flag FmtFlag, mode fmtMode, depth int) string {
if f == nil {
return "<T>"
}
@ -1790,9 +1729,9 @@ func fldconv(f *Field, flag FmtFlag, mode fmtMode, depth int) string {
// Take the name from the original, lest we substituted it with ~r%d or ~b%d.
// ~r%d is a (formerly) unnamed result.
if mode == FErr && f.Nname != nil {
if f.Nname.Orig != nil {
s = f.Nname.Orig.Sym
if mode == FErr && asNode(f.Nname) != nil {
if asNode(f.Nname).Orig != nil {
s = asNode(f.Nname).Orig.Sym
if s != nil && s.Name[0] == '~' {
if s.Name[1] == 'r' { // originally an unnamed result
s = nil
@ -1806,24 +1745,24 @@ func fldconv(f *Field, flag FmtFlag, mode fmtMode, depth int) string {
}
if s != nil && f.Embedded == 0 {
if f.Funarg != FunargNone {
name = f.Nname.modeString(mode)
if f.Funarg != types.FunargNone {
name = asNode(f.Nname).modeString(mode)
} else if flag&FmtLong != 0 {
name = mode.Sprintf("%0S", s)
if !exportname(name) && flag&FmtUnsigned == 0 {
name = s.modeString(mode) // qualify non-exported names (used on structs, not on funarg)
name = smodeString(s, mode) // qualify non-exported names (used on structs, not on funarg)
}
} else {
name = s.modeString(mode)
name = smodeString(s, mode)
}
}
}
var typ string
if f.Isddd() {
typ = "..." + f.Type.Elem().modeString(mode, depth)
typ = "..." + tmodeString(f.Type.Elem(), mode, depth)
} else {
typ = f.Type.modeString(mode, depth)
typ = tmodeString(f.Type, mode, depth)
}
str := typ
@ -1831,7 +1770,7 @@ func fldconv(f *Field, flag FmtFlag, mode fmtMode, depth int) string {
str = name + " " + typ
}
if flag&FmtShort == 0 && f.Funarg == FunargNone && f.Note != "" {
if flag&FmtShort == 0 && f.Funarg == types.FunargNone && f.Note != "" {
str += " " + strconv.Quote(f.Note)
}
@ -1840,12 +1779,12 @@ func fldconv(f *Field, flag FmtFlag, mode fmtMode, depth int) string {
// "%L" print definition, not name
// "%S" omit 'func' and receiver from function types, short type names
func (t *Type) format(s fmt.State, verb rune, mode fmtMode) {
func typeFormat(t *types.Type, s fmt.State, verb rune, mode fmtMode) {
switch verb {
case 'v', 'S', 'L':
// This is an external entry point, so we pass depth 0 to tconv.
// See comments in Type.String.
fmt.Fprint(s, t.tconv(fmtFlag(s, verb), mode, 0))
fmt.Fprint(s, tconv(t, fmtFlag(s, verb), mode, 0))
default:
fmt.Fprintf(s, "%%!%c(*Type=%p)", verb, t)
@ -1853,7 +1792,7 @@ func (t *Type) format(s fmt.State, verb rune, mode fmtMode) {
}
// See #16897 before changing the implementation of tconv.
func (t *Type) tconv(flag FmtFlag, mode fmtMode, depth int) string {
func tconv(t *types.Type, flag FmtFlag, mode fmtMode, depth int) string {
if t == nil {
return "<T>"
}
@ -1867,7 +1806,7 @@ func (t *Type) tconv(flag FmtFlag, mode fmtMode, depth int) string {
flag |= FmtUnsigned
}
str := t.typefmt(flag, mode, depth+1)
str := typefmt(t, flag, mode, depth+1)
return str
}

View file

@ -7,6 +7,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
"fmt"
@ -114,7 +115,7 @@ func moveToHeap(n *Node) {
// Allocate a local stack variable to hold the pointer to the heap copy.
// temp will add it to the function declaration list automatically.
heapaddr := temp(typPtr(n.Type))
heapaddr := temp(types.NewPtr(n.Type))
heapaddr.Sym = lookup("&" + n.Sym.Name)
heapaddr.Orig.Sym = heapaddr.Sym
@ -191,11 +192,12 @@ func autotmpname(n int) string {
// Start with a buffer big enough to hold a large n.
b := []byte(prefix + " ")[:len(prefix)]
b = strconv.AppendInt(b, int64(n), 10)
return internString(b)
_ = b
return types.InternString(b)
}
// make a new Node off the books
func tempnamel(pos src.XPos, curfn *Node, nn *Node, t *Type) {
func tempnamel(pos src.XPos, curfn *Node, nn *Node, t *types.Type) {
if curfn == nil {
Fatalf("no curfn for tempname")
}
@ -207,12 +209,12 @@ func tempnamel(pos src.XPos, curfn *Node, nn *Node, t *Type) {
Fatalf("tempname called with nil type")
}
s := &Sym{
s := &types.Sym{
Name: autotmpname(len(curfn.Func.Dcl)),
Pkg: localpkg,
}
n := newnamel(pos, s)
s.Def = n
s.Def = asTypesNode(n)
n.Type = t
n.Class = PAUTO
n.Esc = EscNever
@ -224,16 +226,16 @@ func tempnamel(pos src.XPos, curfn *Node, nn *Node, t *Type) {
*nn = *n
}
func temp(t *Type) *Node {
func temp(t *types.Type) *Node {
var n Node
tempnamel(lineno, Curfn, &n, t)
n.Sym.Def.SetUsed(true)
asNode(n.Sym.Def).SetUsed(true)
return n.Orig
}
func tempAt(pos src.XPos, curfn *Node, t *Type) *Node {
func tempAt(pos src.XPos, curfn *Node, t *types.Type) *Node {
var n Node
tempnamel(pos, curfn, &n, t)
n.Sym.Def.SetUsed(true)
asNode(n.Sym.Def).SetUsed(true)
return n.Orig
}

View file

@ -6,90 +6,25 @@ package gc
import (
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/obj"
"cmd/internal/src"
)
const (
BADWIDTH = -1000000000
BADWIDTH = types.BADWIDTH
MaxStackVarSize = 10 * 1024 * 1024
)
type Pkg struct {
Name string // package name, e.g. "sys"
Path string // string literal used in import statement, e.g. "runtime/internal/sys"
Pathsym *obj.LSym
Prefix string // escaped path for use in symbol table
Imported bool // export data of this package was parsed
Direct bool // imported directly
Syms map[string]*Sym
}
// isRuntime reports whether p is package runtime.
func (p *Pkg) isRuntime() bool {
// isRuntimePkg reports whether p is package runtime.
func isRuntimePkg(p *types.Pkg) bool {
if compiling_runtime && p == localpkg {
return true
}
return p.Path == "runtime"
}
// Sym represents an object name. Most commonly, this is a Go identifier naming
// an object declared within a package, but Syms are also used to name internal
// synthesized objects.
//
// As an exception, field and method names that are exported use the Sym
// associated with localpkg instead of the package that declared them. This
// allows using Sym pointer equality to test for Go identifier uniqueness when
// handling selector expressions.
type Sym struct {
Link *Sym
Importdef *Pkg // where imported definition was found
Linkname string // link name
// saved and restored by dcopy
Pkg *Pkg
Name string // object name
Def *Node // definition: ONAME OTYPE OPACK or OLITERAL
Lastlineno src.XPos // last declaration for diagnostic
Block int32 // blocknumber to catch redeclaration
flags bitset8
Label *Node // corresponding label (ephemeral)
Origpkg *Pkg // original package for . import
Lsym *obj.LSym
}
const (
symExport = 1 << iota // added to exportlist (no need to add again)
symPackage
symExported // already written out by export
symUniq
symSiggen
symAsm
symAlgGen
)
func (sym *Sym) Export() bool { return sym.flags&symExport != 0 }
func (sym *Sym) Package() bool { return sym.flags&symPackage != 0 }
func (sym *Sym) Exported() bool { return sym.flags&symExported != 0 }
func (sym *Sym) Uniq() bool { return sym.flags&symUniq != 0 }
func (sym *Sym) Siggen() bool { return sym.flags&symSiggen != 0 }
func (sym *Sym) Asm() bool { return sym.flags&symAsm != 0 }
func (sym *Sym) AlgGen() bool { return sym.flags&symAlgGen != 0 }
func (sym *Sym) SetExport(b bool) { sym.flags.set(symExport, b) }
func (sym *Sym) SetPackage(b bool) { sym.flags.set(symPackage, b) }
func (sym *Sym) SetExported(b bool) { sym.flags.set(symExported, b) }
func (sym *Sym) SetUniq(b bool) { sym.flags.set(symUniq, b) }
func (sym *Sym) SetSiggen(b bool) { sym.flags.set(symSiggen, b) }
func (sym *Sym) SetAsm(b bool) { sym.flags.set(symAsm, b) }
func (sym *Sym) SetAlgGen(b bool) { sym.flags.set(symAlgGen, b) }
func (sym *Sym) isAlias() bool {
return sym.Def != nil && sym.Def.Sym != sym
}
// The Class of a variable/function describes the "storage class"
// of a variable or function. During parsing, storage classes are
// called declaration contexts.
@ -165,38 +100,36 @@ var debugstr string
var Debug_checknil int
var Debug_typeassert int
var localpkg *Pkg // package being compiled
var localpkg *types.Pkg // package being compiled
var inimport bool // set during import
var itabpkg *Pkg // fake pkg for itab entries
var itabpkg *types.Pkg // fake pkg for itab entries
var itablinkpkg *Pkg // fake package for runtime itab entries
var itablinkpkg *types.Pkg // fake package for runtime itab entries
var Runtimepkg *Pkg // fake package runtime
var Runtimepkg *types.Pkg // fake package runtime
var racepkg *Pkg // package runtime/race
var racepkg *types.Pkg // package runtime/race
var msanpkg *Pkg // package runtime/msan
var msanpkg *types.Pkg // package runtime/msan
var typepkg *Pkg // fake package for runtime type info (headers)
var typepkg *types.Pkg // fake package for runtime type info (headers)
var unsafepkg *Pkg // package unsafe
var unsafepkg *types.Pkg // package unsafe
var trackpkg *Pkg // fake package for field tracking
var trackpkg *types.Pkg // fake package for field tracking
var mappkg *Pkg // fake package for map zero value
var mappkg *types.Pkg // fake package for map zero value
var zerosize int64
var Tptr EType // either TPTR32 or TPTR64
var myimportpath string
var localimport string
var asmhdr string
var simtype [NTYPE]EType
var simtype [NTYPE]types.EType
var (
isforw [NTYPE]bool
@ -238,7 +171,7 @@ var exportlist []*Node
var importlist []*Node // imported functions and methods with inlinable bodies
var funcsyms []*Sym
var funcsyms []*types.Sym
var dclcontext Class // PEXTERN/PAUTO

View file

@ -31,6 +31,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
)
@ -194,13 +195,13 @@ func ggloblnod(nam *Node) {
if nam.Name.Readonly() {
flags = obj.RODATA
}
if nam.Type != nil && !haspointers(nam.Type) {
if nam.Type != nil && !types.Haspointers(nam.Type) {
flags |= obj.NOPTR
}
Ctxt.Globl(s, nam.Type.Width, flags)
}
func ggloblsym(s *Sym, width int32, flags int16) {
func ggloblsym(s *types.Sym, width int32, flags int16) {
ggloblLSym(Linksym(s), width, flags)
}
@ -212,7 +213,7 @@ func ggloblLSym(s *obj.LSym, width int32, flags int16) {
Ctxt.Globl(s, int64(width), int(flags))
}
func isfat(t *Type) bool {
func isfat(t *types.Type) bool {
if t != nil {
switch t.Etype {
case TSTRUCT, TARRAY, TSLICE, TSTRING,
@ -231,8 +232,8 @@ func Addrconst(a *obj.Addr, v int64) {
}
// nodarg returns a Node for the function argument denoted by t,
// which is either the entire function argument or result struct (t is a struct *Type)
// or a specific argument (t is a *Field within a struct *Type).
// which is either the entire function argument or result struct (t is a struct *types.Type)
// or a specific argument (t is a *types.Field within a struct *types.Type).
//
// If fp is 0, the node is for use by a caller invoking the given
// function, preparing the arguments before the call
@ -247,12 +248,12 @@ func Addrconst(a *obj.Addr, v int64) {
func nodarg(t interface{}, fp int) *Node {
var n *Node
var funarg Funarg
var funarg types.Funarg
switch t := t.(type) {
default:
Fatalf("bad nodarg %T(%v)", t, t)
case *Type:
case *types.Type:
// Entire argument struct, not just one arg
if !t.IsFuncArgStruct() {
Fatalf("nodarg: bad type %v", t)
@ -271,7 +272,7 @@ func nodarg(t interface{}, fp int) *Node {
}
n.Xoffset = first.Offset
case *Field:
case *types.Field:
funarg = t.Funarg
if fp == 1 {
// NOTE(rsc): This should be using t.Nname directly,
@ -285,7 +286,7 @@ func nodarg(t interface{}, fp int) *Node {
// toward time for the Go 1.7 beta).
// At some quieter time (assuming we've never seen these Fatalfs happen)
// we could change this code to use "expect" directly.
expect := t.Nname
expect := asNode(t.Nname)
if expect.isParamHeapCopy() {
expect = expect.Name.Param.Stackcopy
}
@ -293,7 +294,7 @@ func nodarg(t interface{}, fp int) *Node {
for _, n := range Curfn.Func.Dcl {
if (n.Class == PPARAM || n.Class == PPARAMOUT) && !isblanksym(t.Sym) && n.Sym == t.Sym {
if n != expect {
Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, t.Nname, t.Nname, t.Nname.Op)
Fatalf("nodarg: unexpected node: %v (%p %v) vs %v (%p %v)", n, n, n.Op, asNode(t.Nname), asNode(t.Nname), asNode(t.Nname).Op)
}
return n
}
@ -313,7 +314,7 @@ func nodarg(t interface{}, fp int) *Node {
Fatalf("nodarg: offset not computed for %v", t)
}
n.Xoffset = t.Offset
n.Orig = t.Nname
n.Orig = asNode(t.Nname)
}
// Rewrite argument named _ to __,
@ -333,7 +334,7 @@ func nodarg(t interface{}, fp int) *Node {
case 1: // reading arguments inside call
n.Class = PPARAM
if funarg == FunargResults {
if funarg == types.FunargResults {
n.Class = PPARAMOUT
}
}

View file

@ -4,6 +4,8 @@
package gc
import "cmd/compile/internal/types"
// a function named init is a special case.
// it is called by the initialization before
// main is run. to make it unique within a
@ -12,7 +14,7 @@ package gc
var renameinit_initgen int
func renameinit() *Sym {
func renameinit() *types.Sym {
renameinit_initgen++
return lookupN("init.", renameinit_initgen)
}
@ -42,7 +44,7 @@ func anyinit(n []*Node) bool {
}
// are there any imported init functions
for _, s := range initSyms {
for _, s := range types.InitSyms {
if s.Def != nil {
return true
}
@ -81,7 +83,7 @@ func fninit(n []*Node) {
// (1)
gatevar := newname(lookup("initdone·"))
addvar(gatevar, Types[TUINT8], PEXTERN)
addvar(gatevar, types.Types[TUINT8], PEXTERN)
// (2)
fn := nod(ODCLFUNC, nil, nil)
@ -116,10 +118,10 @@ func fninit(n []*Node) {
r = append(r, a)
// (6)
for _, s := range initSyms {
for _, s := range types.InitSyms {
if s.Def != nil && s != initsym {
// could check that it is fn of no args/returns
a = nod(OCALL, s.Def, nil)
a = nod(OCALL, asNode(s.Def), nil)
r = append(r, a)
}
}
@ -134,7 +136,7 @@ func fninit(n []*Node) {
if s.Def == nil {
break
}
a = nod(OCALL, s.Def, nil)
a = nod(OCALL, asNode(s.Def), nil)
r = append(r, a)
}

View file

@ -28,13 +28,14 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
)
// Get the function's package. For ordinary functions it's on the ->sym, but for imported methods
// the ->sym can be re-used in the local package, so peel it off the receiver's type.
func fnpkg(fn *Node) *Pkg {
func fnpkg(fn *Node) *types.Pkg {
if fn.IsMethod() {
// method
rcvr := fn.Type.Recv().Type
@ -171,7 +172,7 @@ func caninl(fn *Node) {
// hack, TODO, check for better way to link method nodes back to the thing with the ->inl
// this is so export can find the body of a method
fn.Type.SetNname(n)
fn.Type.FuncType().Nname = asTypesNode(n)
if Debug['m'] > 1 {
fmt.Printf("%v: can inline %#v as: %#v { %#v }\n", fn.Line(), n, fn.Type, n.Func.Inl)
@ -210,7 +211,7 @@ func ishairy(n *Node, budget *int32, reason *string) bool {
}
if n.isMethodCalledAsFunction() {
if d := n.Left.Sym.Def; d != nil && d.Func.Inl.Len() != 0 {
if d := asNode(n.Left.Sym.Def); d != nil && d.Func.Inl.Len() != 0 {
*budget -= d.Func.InlCost
break
}
@ -229,7 +230,7 @@ func ishairy(n *Node, budget *int32, reason *string) bool {
if t.Nname() == nil {
Fatalf("no function definition for [%p] %+v\n", t, t)
}
if inlfn := t.Nname().Func; inlfn.Inl.Len() != 0 {
if inlfn := asNode(t.FuncType().Nname).Func; inlfn.Inl.Len() != 0 {
*budget -= inlfn.InlCost
break
}
@ -507,8 +508,8 @@ func inlnode(n *Node) *Node {
}
if n.Left.Func != nil && n.Left.Func.Inl.Len() != 0 && !isIntrinsicCall(n) { // normal case
n = mkinlcall(n, n.Left, n.Isddd())
} else if n.isMethodCalledAsFunction() && n.Left.Sym.Def != nil {
n = mkinlcall(n, n.Left.Sym.Def, n.Isddd())
} else if n.isMethodCalledAsFunction() && asNode(n.Left.Sym.Def) != nil {
n = mkinlcall(n, asNode(n.Left.Sym.Def), n.Isddd())
}
case OCALLMETH:
@ -525,7 +526,7 @@ func inlnode(n *Node) *Node {
Fatalf("no function definition for [%p] %+v\n", n.Left.Type, n.Left.Type)
}
n = mkinlcall(n, n.Left.Type.Nname(), n.Isddd())
n = mkinlcall(n, asNode(n.Left.Type.FuncType().Nname), n.Isddd())
}
lineno = lno
@ -549,11 +550,11 @@ func mkinlcall(n *Node, fn *Node, isddd bool) *Node {
return n
}
func tinlvar(t *Field, inlvars map[*Node]*Node) *Node {
if t.Nname != nil && !isblank(t.Nname) {
inlvar := inlvars[t.Nname]
func tinlvar(t *types.Field, inlvars map[*Node]*Node) *Node {
if asNode(t.Nname) != nil && !isblank(asNode(t.Nname)) {
inlvar := inlvars[asNode(t.Nname)]
if inlvar == nil {
Fatalf("missing inlvar for %v\n", t.Nname)
Fatalf("missing inlvar for %v\n", asNode(t.Nname))
}
return inlvar
}
@ -631,10 +632,10 @@ func mkinlcall1(n *Node, fn *Node, isddd bool) *Node {
// temporaries for return values.
var m *Node
for _, t := range fn.Type.Results().Fields().Slice() {
if t != nil && t.Nname != nil && !isblank(t.Nname) {
m = inlvar(t.Nname)
if t != nil && asNode(t.Nname) != nil && !isblank(asNode(t.Nname)) {
m = inlvar(asNode(t.Nname))
m = typecheck(m, Erv)
inlvars[t.Nname] = m
inlvars[asNode(t.Nname)] = m
} else {
// anonymous return values, synthesize names for use in assignment that replaces return
m = retvar(t, i)
@ -650,8 +651,8 @@ func mkinlcall1(n *Node, fn *Node, isddd bool) *Node {
// method call with a receiver.
t := fn.Type.Recv()
if t != nil && t.Nname != nil && !isblank(t.Nname) && inlvars[t.Nname] == nil {
Fatalf("missing inlvar for %v\n", t.Nname)
if t != nil && t.Nname != nil && !isblank(asNode(t.Nname)) && inlvars[asNode(t.Nname)] == nil {
Fatalf("missing inlvar for %v\n", asNode(t.Nname))
}
if n.Left.Left == nil {
Fatalf("method call without receiver: %+v", n)
@ -669,7 +670,7 @@ func mkinlcall1(n *Node, fn *Node, isddd bool) *Node {
// check if inlined function is variadic.
variadic := false
var varargtype *Type
var varargtype *types.Type
varargcount := 0
for _, t := range fn.Type.Params().Fields().Slice() {
if t.Isddd() {
@ -719,8 +720,8 @@ func mkinlcall1(n *Node, fn *Node, isddd bool) *Node {
// append receiver inlvar to LHS.
t := fn.Type.Recv()
if t != nil && t.Nname != nil && !isblank(t.Nname) && inlvars[t.Nname] == nil {
Fatalf("missing inlvar for %v\n", t.Nname)
if t != nil && t.Nname != nil && !isblank(asNode(t.Nname)) && inlvars[asNode(t.Nname)] == nil {
Fatalf("missing inlvar for %v\n", asNode(t.Nname))
}
if t == nil {
Fatalf("method call unknown receiver type: %+v", n)
@ -753,7 +754,7 @@ func mkinlcall1(n *Node, fn *Node, isddd bool) *Node {
}
} else {
// match arguments except final variadic (unless the call is dotted itself)
t, it := iterFields(fn.Type.Params())
t, it := types.IterFields(fn.Type.Params())
for t != nil {
if li >= n.List.Len() {
break
@ -799,7 +800,7 @@ func mkinlcall1(n *Node, fn *Node, isddd bool) *Node {
as.Right = nodnil()
as.Right.Type = varargtype
} else {
varslicetype := typSlice(varargtype.Elem())
varslicetype := types.NewSlice(varargtype.Elem())
as.Right = nod(OCOMPLIT, nil, typenod(varslicetype))
as.Right.List.Set(varargs)
}
@ -911,7 +912,7 @@ func inlvar(var_ *Node) *Node {
}
// Synthesize a variable to store the inlined function's results in.
func retvar(t *Field, i int) *Node {
func retvar(t *types.Field, i int) *Node {
n := newname(lookupN("~r", i))
n.Type = t.Type
n.Class = PAUTO
@ -923,7 +924,7 @@ func retvar(t *Field, i int) *Node {
// Synthesize a variable to store the inlined function's arguments
// when they come from a multiple return call.
func argvar(t *Type, i int) *Node {
func argvar(t *types.Type, i int) *Node {
n := newname(lookupN("~arg", i))
n.Type = t.Elem()
n.Class = PAUTO

View file

@ -104,17 +104,6 @@ func pragmaValue(verb string) syntax.Pragma {
return 0
}
var internedStrings = map[string]string{}
func internString(b []byte) string {
s, ok := internedStrings[string(b)] // string(b) here doesn't allocate
if !ok {
s = string(b)
internedStrings[s] = s
}
return s
}
// pragcgo is called concurrently if files are parsed concurrently.
func (p *noder) pragcgo(pos src.Pos, text string) string {
f := pragmaFields(text)

View file

@ -10,6 +10,7 @@ import (
"bufio"
"bytes"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
"cmd/internal/sys"
@ -347,6 +348,35 @@ func Main(archInit func(*Arch)) {
Widthptr = thearch.LinkArch.PtrSize
Widthreg = thearch.LinkArch.RegSize
// initialize types package
// (we need to do this to break dependencies that otherwise
// would lead to import cycles)
types.Widthptr = Widthptr
types.Dowidth = dowidth
types.Fatalf = Fatalf
types.Sconv = func(s *types.Sym, flag, mode int) string {
return sconv(s, FmtFlag(flag), fmtMode(mode))
}
types.Tconv = func(t *types.Type, flag, mode, depth int) string {
return tconv(t, FmtFlag(flag), fmtMode(mode), depth)
}
types.FormatSym = func(sym *types.Sym, s fmt.State, verb rune, mode int) {
symFormat(sym, s, verb, fmtMode(mode))
}
types.FormatType = func(t *types.Type, s fmt.State, verb rune, mode int) {
typeFormat(t, s, verb, fmtMode(mode))
}
types.Cmptyp = cmptyp
types.FieldName = func(f *types.Field) string {
return f.Sym.Name
}
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return Linksym(typenamesym(t))
}
types.FmtLeft = int(FmtLeft)
types.FmtUnsigned = int(FmtUnsigned)
types.FErr = FErr
initUniverse()
blockgen = 1
@ -752,7 +782,7 @@ func loadsys() {
inimport = false
}
func importfile(f *Val) *Pkg {
func importfile(f *Val) *types.Pkg {
path_, ok := f.U.(string)
if !ok {
yyerror("import path must be a string")
@ -964,28 +994,28 @@ func mkpackage(pkgname string) {
func clearImports() {
for _, s := range localpkg.Syms {
if s.Def == nil {
if asNode(s.Def) == nil {
continue
}
if s.Def.Op == OPACK {
if asNode(s.Def).Op == OPACK {
// throw away top-level package name leftover
// from previous file.
// leave s->block set to cause redeclaration
// errors if a conflicting top-level name is
// introduced by a different file.
if !s.Def.Used() && nsyntaxerrors == 0 {
pkgnotused(s.Def.Pos, s.Def.Name.Pkg.Path, s.Name)
if !asNode(s.Def).Used() && nsyntaxerrors == 0 {
pkgnotused(asNode(s.Def).Pos, asNode(s.Def).Name.Pkg.Path, s.Name)
}
s.Def = nil
continue
}
if s.isAlias() {
if IsAlias(s) {
// throw away top-level name left over
// from previous import . "x"
if s.Def.Name != nil && s.Def.Name.Pack != nil && !s.Def.Name.Pack.Used() && nsyntaxerrors == 0 {
pkgnotused(s.Def.Name.Pack.Pos, s.Def.Name.Pack.Name.Pkg.Path, "")
s.Def.Name.Pack.SetUsed(true)
if asNode(s.Def).Name != nil && asNode(s.Def).Name.Pack != nil && !asNode(s.Def).Name.Pack.Used() && nsyntaxerrors == 0 {
pkgnotused(asNode(s.Def).Name.Pack.Pos, asNode(s.Def).Name.Pack.Name.Pkg.Path, "")
asNode(s.Def).Name.Pack.SetUsed(true)
}
s.Def = nil
@ -993,3 +1023,7 @@ func clearImports() {
}
}
}
func IsAlias(sym *types.Sym) bool {
return sym.Def != nil && asNode(sym.Def).Sym != sym
}

View file

@ -34,6 +34,8 @@ func main() {
fmt.Fprintln(&b, "// Code generated by mkbuiltin.go. DO NOT EDIT.")
fmt.Fprintln(&b)
fmt.Fprintln(&b, "package gc")
fmt.Fprintln(&b)
fmt.Fprintln(&b, `import "cmd/compile/internal/types"`)
mkbuiltin(&b, "runtime")
@ -98,8 +100,8 @@ func mkbuiltin(w io.Writer, name string) {
fmt.Fprintln(w, "}")
fmt.Fprintln(w)
fmt.Fprintf(w, "func %sTypes() []*Type {\n", name)
fmt.Fprintf(w, "var typs [%d]*Type\n", len(interner.typs))
fmt.Fprintf(w, "func %sTypes() []*types.Type {\n", name)
fmt.Fprintf(w, "var typs [%d]*types.Type\n", len(interner.typs))
for i, typ := range interner.typs {
fmt.Fprintf(w, "typs[%d] = %s\n", i, typ)
}
@ -138,42 +140,42 @@ func (i *typeInterner) mktype(t ast.Expr) string {
case *ast.Ident:
switch t.Name {
case "byte":
return "bytetype"
return "types.Bytetype"
case "rune":
return "runetype"
return "types.Runetype"
}
return fmt.Sprintf("Types[T%s]", strings.ToUpper(t.Name))
return fmt.Sprintf("types.Types[T%s]", strings.ToUpper(t.Name))
case *ast.SelectorExpr:
if t.X.(*ast.Ident).Name != "unsafe" || t.Sel.Name != "Pointer" {
log.Fatalf("unhandled type: %#v", t)
}
return "Types[TUNSAFEPTR]"
return "types.Types[TUNSAFEPTR]"
case *ast.ArrayType:
if t.Len == nil {
return fmt.Sprintf("typSlice(%s)", i.subtype(t.Elt))
return fmt.Sprintf("types.NewSlice(%s)", i.subtype(t.Elt))
}
return fmt.Sprintf("typArray(%s, %d)", i.subtype(t.Elt), intconst(t.Len))
return fmt.Sprintf("types.NewArray(%s, %d)", i.subtype(t.Elt), intconst(t.Len))
case *ast.ChanType:
dir := "Cboth"
dir := "types.Cboth"
switch t.Dir {
case ast.SEND:
dir = "Csend"
dir = "types.Csend"
case ast.RECV:
dir = "Crecv"
dir = "types.Crecv"
}
return fmt.Sprintf("typChan(%s, %s)", i.subtype(t.Value), dir)
return fmt.Sprintf("types.NewChan(%s, %s)", i.subtype(t.Value), dir)
case *ast.FuncType:
return fmt.Sprintf("functype(nil, %s, %s)", i.fields(t.Params, false), i.fields(t.Results, false))
case *ast.InterfaceType:
if len(t.Methods.List) != 0 {
log.Fatal("non-empty interfaces unsupported")
}
return "Types[TINTER]"
return "types.Types[TINTER]"
case *ast.MapType:
return fmt.Sprintf("typMap(%s, %s)", i.subtype(t.Key), i.subtype(t.Value))
return fmt.Sprintf("types.NewMap(%s, %s)", i.subtype(t.Key), i.subtype(t.Value))
case *ast.StarExpr:
return fmt.Sprintf("typPtr(%s)", i.subtype(t.X))
return fmt.Sprintf("types.NewPtr(%s)", i.subtype(t.X))
case *ast.StructType:
return fmt.Sprintf("tostruct(%s)", i.fields(t.Fields, true))

View file

@ -12,6 +12,7 @@ import (
"unicode/utf8"
"cmd/compile/internal/syntax"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
)
@ -147,7 +148,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
ipkg.Direct = true
var my *Sym
var my *types.Sym
if imp.LocalPkgName != nil {
my = p.name(imp.LocalPkgName)
} else {
@ -173,7 +174,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
lineno = pack.Pos
redeclare(my, "as imported package name")
}
my.Def = pack
my.Def = asTypesNode(pack)
my.Lastlineno = pack.Pos
my.Block = 1 // at top level
}
@ -521,7 +522,7 @@ func (p *noder) expr(expr syntax.Expr) *Node {
return p.nod(expr, OTMAP, p.typeExpr(expr.Key), p.typeExpr(expr.Value))
case *syntax.ChanType:
n := p.nod(expr, OTCHAN, p.typeExpr(expr.Elem), nil)
n.Etype = EType(p.chanDir(expr.Dir))
n.Etype = types.EType(p.chanDir(expr.Dir))
return n
case *syntax.TypeSwitchGuard:
@ -549,14 +550,14 @@ func (p *noder) typeExprOrNil(typ syntax.Expr) *Node {
return nil
}
func (p *noder) chanDir(dir syntax.ChanDir) ChanDir {
func (p *noder) chanDir(dir syntax.ChanDir) types.ChanDir {
switch dir {
case 0:
return Cboth
return types.Cboth
case syntax.SendOnly:
return Csend
return types.Csend
case syntax.RecvOnly:
return Crecv
return types.Crecv
}
panic("unhandled ChanDir")
}
@ -605,7 +606,7 @@ func (p *noder) interfaceType(expr *syntax.InterfaceType) *Node {
return n
}
func (p *noder) packname(expr syntax.Expr) *Sym {
func (p *noder) packname(expr syntax.Expr) *types.Sym {
switch expr := expr.(type) {
case *syntax.Name:
name := p.name(expr)
@ -615,13 +616,13 @@ func (p *noder) packname(expr syntax.Expr) *Sym {
return name
case *syntax.SelectorExpr:
name := p.name(expr.X.(*syntax.Name))
var pkg *Pkg
if name.Def == nil || name.Def.Op != OPACK {
var pkg *types.Pkg
if asNode(name.Def) == nil || asNode(name.Def).Op != OPACK {
yyerror("%v is not a package", name)
pkg = localpkg
} else {
name.Def.SetUsed(true)
pkg = name.Def.Name.Pkg
asNode(name.Def).SetUsed(true)
pkg = asNode(name.Def).Name.Pkg
}
return restrictlookup(expr.Sel.Value, pkg)
}
@ -681,7 +682,7 @@ func (p *noder) stmt(stmt syntax.Stmt) *Node {
if stmt.Op != 0 && stmt.Op != syntax.Def {
n := p.nod(stmt, OASOP, p.expr(stmt.Lhs), p.expr(stmt.Rhs))
n.SetImplicit(stmt.Rhs == syntax.ImplicitOne)
n.Etype = EType(p.binOp(stmt.Op))
n.Etype = types.EType(p.binOp(stmt.Op))
return n
}
@ -757,7 +758,7 @@ func (p *noder) stmt(stmt syntax.Stmt) *Node {
if ln.Class != PPARAMOUT {
break
}
if ln.Sym.Def != ln {
if asNode(ln.Sym.Def) != ln {
yyerror("%s is shadowed during return", ln.Sym.Name)
}
}
@ -1030,7 +1031,7 @@ func (p *noder) basicLit(lit *syntax.BasicLit) Val {
}
}
func (p *noder) name(name *syntax.Name) *Sym {
func (p *noder) name(name *syntax.Name) *types.Sym {
return lookup(name.Value)
}
@ -1125,7 +1126,7 @@ func (p *noder) pragma(pos src.Pos, text string) syntax.Pragma {
return 0
}
func mkname(sym *Sym) *Node {
func mkname(sym *types.Sym) *Node {
n := oldname(sym)
if n.Name != nil && n.Name.Pack != nil {
n.Name.Pack.SetUsed(true)

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/obj"
"crypto/sha256"
@ -173,7 +174,7 @@ func dumpptabs() {
}
for _, exportn := range exportlist {
s := exportn.Sym
n := s.Def
n := asNode(s.Def)
if n == nil {
continue
}
@ -188,10 +189,10 @@ func dumpptabs() {
}
if n.Type.Etype == TFUNC && n.Class == PFUNC {
// function
ptabs = append(ptabs, ptabEntry{s: s, t: s.Def.Type})
ptabs = append(ptabs, ptabEntry{s: s, t: asNode(s.Def).Type})
} else {
// variable
ptabs = append(ptabs, ptabEntry{s: s, t: typPtr(s.Def.Type)})
ptabs = append(ptabs, ptabEntry{s: s, t: types.NewPtr(asNode(s.Def).Type)})
}
}
}
@ -217,7 +218,7 @@ func dumpglobls() {
}
for _, s := range funcsyms {
sf := s.Pkg.Lookup(s.funcsymname())
sf := s.Pkg.Lookup(funcsymname(s))
dsymptr(sf, 0, s, 0)
ggloblsym(sf, int32(Widthptr), obj.DUPOK|obj.RODATA)
}
@ -226,7 +227,7 @@ func dumpglobls() {
funcsyms = nil
}
func Linksym(s *Sym) *obj.LSym {
func Linksym(s *types.Sym) *obj.LSym {
if s == nil {
return nil
}
@ -247,7 +248,7 @@ func Linksym(s *Sym) *obj.LSym {
return ls
}
func duintxx(s *Sym, off int, v uint64, wid int) int {
func duintxx(s *types.Sym, off int, v uint64, wid int) int {
return duintxxLSym(Linksym(s), off, v, wid)
}
@ -260,23 +261,23 @@ func duintxxLSym(s *obj.LSym, off int, v uint64, wid int) int {
return int(obj.Setuintxx(Ctxt, s, int64(off), v, int64(wid)))
}
func duint8(s *Sym, off int, v uint8) int {
func duint8(s *types.Sym, off int, v uint8) int {
return duintxx(s, off, uint64(v), 1)
}
func duint16(s *Sym, off int, v uint16) int {
func duint16(s *types.Sym, off int, v uint16) int {
return duintxx(s, off, uint64(v), 2)
}
func duint32(s *Sym, off int, v uint32) int {
func duint32(s *types.Sym, off int, v uint32) int {
return duintxx(s, off, uint64(v), 4)
}
func duintptr(s *Sym, off int, v uint64) int {
func duintptr(s *types.Sym, off int, v uint64) int {
return duintxx(s, off, v, Widthptr)
}
func dbvec(s *Sym, off int, bv bvec) int {
func dbvec(s *types.Sym, off int, bv bvec) int {
// Runtime reads the bitmaps as byte arrays. Oblige.
for j := 0; int32(j) < bv.n; j += 8 {
word := bv.b[j/32]
@ -319,7 +320,7 @@ func slicebytes(nam *Node, s string, len int) {
slicebytes_gen++
symname := fmt.Sprintf(".gobytes.%d", slicebytes_gen)
sym := localpkg.Lookup(symname)
sym.Def = newname(sym)
sym.Def = asTypesNode(newname(sym))
off := dsname(sym, 0, s)
ggloblsym(sym, int32(off), obj.NOPTR|obj.LOCAL)
@ -333,7 +334,7 @@ func slicebytes(nam *Node, s string, len int) {
duintxx(nam.Sym, off, uint64(len), Widthint)
}
func dsname(s *Sym, off int, t string) int {
func dsname(s *types.Sym, off int, t string) int {
return dsnameLSym(Linksym(s), off, t)
}
@ -342,7 +343,7 @@ func dsnameLSym(s *obj.LSym, off int, t string) int {
return off + len(t)
}
func dsymptr(s *Sym, off int, x *Sym, xoff int) int {
func dsymptr(s *types.Sym, off int, x *types.Sym, xoff int) int {
return dsymptrLSym(Linksym(s), off, Linksym(x), xoff)
}

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
)
@ -59,7 +60,7 @@ func order(fn *Node) {
// Ordertemp allocates a new temporary with the given type,
// pushes it onto the temp stack, and returns it.
// If clear is true, ordertemp emits code to zero the temporary.
func ordertemp(t *Type, order *Order, clear bool) *Node {
func ordertemp(t *types.Type, order *Order, clear bool) *Node {
var_ := temp(t)
if clear {
a := nod(OAS, var_, nil)
@ -83,7 +84,7 @@ func ordertemp(t *Type, order *Order, clear bool) *Node {
// (The other candidate would be map access, but map access
// returns a pointer to the result data instead of taking a pointer
// to be filled in.)
func ordercopyexpr(n *Node, t *Type, order *Order, clear int) *Node {
func ordercopyexpr(n *Node, t *types.Type, order *Order, clear int) *Node {
var_ := ordertemp(t, order, clear != 0)
a := nod(OAS, var_, n)
a = typecheck(a, Etop)
@ -208,7 +209,7 @@ func orderaddrtemp(n *Node, order *Order) *Node {
// ordermapkeytemp prepares n to be a key in a map runtime call and returns n.
// It should only be used for map runtime calls which have *_fast* versions.
func ordermapkeytemp(t *Type, n *Node, order *Order) *Node {
func ordermapkeytemp(t *types.Type, n *Node, order *Order) *Node {
// Most map calls need to take the address of the key.
// Exception: map*_fast* calls. See golang.org/issue/19015.
if mapfast(t) == mapslow {
@ -595,8 +596,8 @@ func orderstmt(n *Node, order *Order) {
orderexprlist(n.List, order)
n.Rlist.First().Left = orderexpr(n.Rlist.First().Left, order, nil) // arg to recv
ch := n.Rlist.First().Left.Type
tmp1 := ordertemp(ch.Elem(), order, haspointers(ch.Elem()))
tmp2 := ordertemp(Types[TBOOL], order, false)
tmp1 := ordertemp(ch.Elem(), order, types.Haspointers(ch.Elem()))
tmp2 := ordertemp(types.Types[TBOOL], order, false)
order.out = append(order.out, n)
r := nod(OAS, n.List.First(), tmp1)
r = typecheck(r, Etop)
@ -745,9 +746,9 @@ func orderstmt(n *Node, order *Order) {
// make copy.
r := n.Right
if r.Type.IsString() && r.Type != Types[TSTRING] {
if r.Type.IsString() && r.Type != types.Types[TSTRING] {
r = nod(OCONV, r, nil)
r.Type = Types[TSTRING]
r.Type = types.Types[TSTRING]
r = typecheck(r, Erv)
}
@ -761,7 +762,7 @@ func orderstmt(n *Node, order *Order) {
n.Right = ordercopyexpr(r, r.Type, order, 0)
// n->alloc is the temp for the iterator.
prealloc[n] = ordertemp(Types[TUINT8], order, true)
prealloc[n] = ordertemp(types.Types[TUINT8], order, true)
}
for i := range n.List.Slice() {
n.List.SetIndex(i, orderexprinplace(n.List.Index(i), order))
@ -862,7 +863,7 @@ func orderstmt(n *Node, order *Order) {
n2.Ninit.Append(tmp2)
}
r.Left = ordertemp(r.Right.Left.Type.Elem(), order, haspointers(r.Right.Left.Type.Elem()))
r.Left = ordertemp(r.Right.Left.Type.Elem(), order, types.Haspointers(r.Right.Left.Type.Elem()))
tmp2 = nod(OAS, tmp1, r.Left)
tmp2 = typecheck(tmp2, Etop)
n2.Ninit.Append(tmp2)
@ -879,7 +880,7 @@ func orderstmt(n *Node, order *Order) {
n2.Ninit.Append(tmp2)
}
r.List.Set1(ordertemp(Types[TBOOL], order, false))
r.List.Set1(ordertemp(types.Types[TBOOL], order, false))
tmp2 = okas(tmp1, r.List.First())
tmp2 = typecheck(tmp2, Etop)
n2.Ninit.Append(tmp2)
@ -1012,7 +1013,7 @@ func orderexpr(n *Node, order *Order, lhs *Node) *Node {
orderexprlist(n.List, order)
if n.List.Len() > 5 {
t := typArray(Types[TSTRING], int64(n.List.Len()))
t := types.NewArray(types.Types[TSTRING], int64(n.List.Len()))
prealloc[n] = ordertemp(t, order, false)
}
@ -1165,7 +1166,7 @@ func orderexpr(n *Node, order *Order, lhs *Node) *Node {
case OCLOSURE:
if n.Noescape() && n.Func.Cvars.Len() > 0 {
prealloc[n] = ordertemp(Types[TUINT8], order, false) // walk will fill in correct type
prealloc[n] = ordertemp(types.Types[TUINT8], order, false) // walk will fill in correct type
}
case OARRAYLIT, OSLICELIT, OCALLPART:
@ -1174,7 +1175,7 @@ func orderexpr(n *Node, order *Order, lhs *Node) *Node {
orderexprlist(n.List, order)
orderexprlist(n.Rlist, order)
if n.Noescape() {
prealloc[n] = ordertemp(Types[TUINT8], order, false) // walk will fill in correct type
prealloc[n] = ordertemp(types.Types[TUINT8], order, false) // walk will fill in correct type
}
case ODDDARG:
@ -1237,7 +1238,7 @@ func orderas2(n *Node, order *Order) {
left := []*Node{}
for _, l := range n.List.Slice() {
if !isblank(l) {
tmp := ordertemp(l.Type, order, haspointers(l.Type))
tmp := ordertemp(l.Type, order, types.Haspointers(l.Type))
tmplist = append(tmplist, tmp)
left = append(left, l)
}
@ -1266,11 +1267,11 @@ func orderokas2(n *Node, order *Order) {
var tmp1, tmp2 *Node
if !isblank(n.List.First()) {
typ := n.Rlist.First().Type
tmp1 = ordertemp(typ, order, haspointers(typ))
tmp1 = ordertemp(typ, order, types.Haspointers(typ))
}
if !isblank(n.List.Second()) {
tmp2 = ordertemp(Types[TBOOL], order, false)
tmp2 = ordertemp(types.Types[TBOOL], order, false)
}
order.out = append(order.out, n)

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/dwarf"
"cmd/internal/obj"
"cmd/internal/src"
@ -16,7 +17,7 @@ import (
// "Portable" code generation.
func makefuncdatasym(pp *Progs, nameprefix string, funcdatakind int64, curfn *Node) *Sym {
func makefuncdatasym(pp *Progs, nameprefix string, funcdatakind int64, curfn *Node) *types.Sym {
// This symbol requires a unique, reproducible name;
// unique to avoid duplicate symbols,
// and reproducible for reproducible builds and toolstash.
@ -165,8 +166,8 @@ func cmpstackvarlt(a, b *Node) bool {
return a.Used()
}
ap := haspointers(a.Type)
bp := haspointers(b.Type)
ap := types.Haspointers(a.Type)
bp := types.Haspointers(b.Type)
if ap != bp {
return ap
}
@ -230,7 +231,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
}
if f.Config.NeedsFpScratch && scratchUsed {
s.scratchFpMem = tempAt(src.NoXPos, s.curfn, Types[TUINT64])
s.scratchFpMem = tempAt(src.NoXPos, s.curfn, types.Types[TUINT64])
}
sort.Sort(byStackVar(fn.Dcl))
@ -252,7 +253,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
}
s.stksize += w
s.stksize = Rnd(s.stksize, int64(n.Type.Align))
if haspointers(n.Type) {
if types.Haspointers(n.Type) {
s.stkptrsize = s.stksize
}
if thearch.LinkArch.InFamily(sys.MIPS, sys.MIPS64, sys.ARM, sys.ARM64, sys.PPC64, sys.S390X) {
@ -379,7 +380,7 @@ func debuginfo(fnsym *obj.LSym, curfn interface{}) []*dwarf.Var {
// fieldtrack adds R_USEFIELD relocations to fnsym to record any
// struct fields that it used.
func fieldtrack(fnsym *obj.LSym, tracked map[*Sym]struct{}) {
func fieldtrack(fnsym *obj.LSym, tracked map[*types.Sym]struct{}) {
if fnsym == nil {
return
}
@ -387,7 +388,7 @@ func fieldtrack(fnsym *obj.LSym, tracked map[*Sym]struct{}) {
return
}
trackSyms := make([]*Sym, 0, len(tracked))
trackSyms := make([]*types.Sym, 0, len(tracked))
for sym := range tracked {
trackSyms = append(trackSyms, sym)
}
@ -399,7 +400,7 @@ func fieldtrack(fnsym *obj.LSym, tracked map[*Sym]struct{}) {
}
}
type symByName []*Sym
type symByName []*types.Sym
func (a symByName) Len() int { return len(a) }
func (a symByName) Less(i, j int) bool { return a[i].Name < a[j].Name }

View file

@ -5,22 +5,23 @@
package gc
import (
"cmd/compile/internal/types"
"reflect"
"sort"
"testing"
)
func typeWithoutPointers() *Type {
t := typ(TSTRUCT)
f := &Field{Type: typ(TINT)}
t.SetFields([]*Field{f})
func typeWithoutPointers() *types.Type {
t := types.New(TSTRUCT)
f := &types.Field{Type: types.New(TINT)}
t.SetFields([]*types.Field{f})
return t
}
func typeWithPointers() *Type {
t := typ(TSTRUCT)
f := &Field{Type: typ(TPTR64)}
t.SetFields([]*Field{f})
func typeWithPointers() *types.Type {
t := types.New(TSTRUCT)
f := &types.Field{Type: types.New(TPTR64)}
t.SetFields([]*types.Field{f})
return t
}
@ -86,38 +87,38 @@ func TestCmpstackvar(t *testing.T) {
true,
},
{
Node{Class: PAUTO, Type: &Type{}, Name: &Name{flags: nameNeedzero}},
Node{Class: PAUTO, Type: &Type{}, Name: &Name{}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{flags: nameNeedzero}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{}},
true,
},
{
Node{Class: PAUTO, Type: &Type{}, Name: &Name{}},
Node{Class: PAUTO, Type: &Type{}, Name: &Name{flags: nameNeedzero}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{flags: nameNeedzero}},
false,
},
{
Node{Class: PAUTO, Type: &Type{Width: 1}, Name: &Name{}},
Node{Class: PAUTO, Type: &Type{Width: 2}, Name: &Name{}},
Node{Class: PAUTO, Type: &types.Type{Width: 1}, Name: &Name{}},
Node{Class: PAUTO, Type: &types.Type{Width: 2}, Name: &Name{}},
false,
},
{
Node{Class: PAUTO, Type: &Type{Width: 2}, Name: &Name{}},
Node{Class: PAUTO, Type: &Type{Width: 1}, Name: &Name{}},
Node{Class: PAUTO, Type: &types.Type{Width: 2}, Name: &Name{}},
Node{Class: PAUTO, Type: &types.Type{Width: 1}, Name: &Name{}},
true,
},
{
Node{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "abc"}},
Node{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "xyz"}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "abc"}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "xyz"}},
true,
},
{
Node{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "abc"}},
Node{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "abc"}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "abc"}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "abc"}},
false,
},
{
Node{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "xyz"}},
Node{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "abc"}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "xyz"}},
Node{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "abc"}},
false,
},
}
@ -135,41 +136,41 @@ func TestCmpstackvar(t *testing.T) {
func TestStackvarSort(t *testing.T) {
inp := []*Node{
{Class: PFUNC, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PFUNC, Xoffset: 0, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PFUNC, Xoffset: 10, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PFUNC, Xoffset: 20, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, flags: nodeUsed, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: typeWithoutPointers(), Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{}, Name: &Name{flags: nameNeedzero}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{Width: 1}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{Width: 2}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "abc"}},
{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "xyz"}},
{Class: PFUNC, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PFUNC, Xoffset: 0, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PFUNC, Xoffset: 10, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PFUNC, Xoffset: 20, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, flags: nodeUsed, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: typeWithoutPointers(), Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{flags: nameNeedzero}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{Width: 1}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{Width: 2}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "abc"}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "xyz"}},
}
want := []*Node{
{Class: PFUNC, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PFUNC, Xoffset: 0, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PFUNC, Xoffset: 10, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PFUNC, Xoffset: 20, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, flags: nodeUsed, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{}, Name: &Name{flags: nameNeedzero}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{Width: 2}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{Width: 1}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{}},
{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "abc"}},
{Class: PAUTO, Type: &Type{}, Name: &Name{}, Sym: &Sym{Name: "xyz"}},
{Class: PAUTO, Type: typeWithoutPointers(), Name: &Name{}, Sym: &Sym{}},
{Class: PFUNC, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PFUNC, Xoffset: 0, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PFUNC, Xoffset: 10, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PFUNC, Xoffset: 20, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, flags: nodeUsed, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{flags: nameNeedzero}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{Width: 2}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{Width: 1}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "abc"}},
{Class: PAUTO, Type: &types.Type{}, Name: &Name{}, Sym: &types.Sym{Name: "xyz"}},
{Class: PAUTO, Type: typeWithoutPointers(), Name: &Name{}, Sym: &types.Sym{}},
}
// haspointers updates Type.Haspointers as a side effect, so
// exercise this function on all inputs so that reflect.DeepEqual
// doesn't produce false positives.
for i := range want {
haspointers(want[i].Type)
haspointers(inp[i].Type)
types.Haspointers(want[i].Type)
types.Haspointers(inp[i].Type)
}
sort.Sort(byStackVar(inp))

View file

@ -16,6 +16,7 @@ package gc
import (
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
"crypto/md5"
"fmt"
@ -84,7 +85,7 @@ type progeffectscache struct {
// nor do we care about empty structs (handled by the pointer check),
// nor do we care about the fake PAUTOHEAP variables.
func livenessShouldTrack(n *Node) bool {
return n.Op == ONAME && (n.Class == PAUTO || n.Class == PPARAM || n.Class == PPARAMOUT) && haspointers(n.Type)
return n.Op == ONAME && (n.Class == PAUTO || n.Class == PPARAM || n.Class == PPARAMOUT) && types.Haspointers(n.Type)
}
// getvariables returns the list of on-stack variables that we need to track.
@ -320,7 +321,7 @@ func (lv *Liveness) blockEffects(b *ssa.Block) *BlockEffects {
// and then simply copied into bv at the correct offset on future calls with
// the same type t. On https://rsc.googlecode.com/hg/testdata/slow.go, onebitwalktype1
// accounts for 40% of the 6g execution time.
func onebitwalktype1(t *Type, xoffset *int64, bv bvec) {
func onebitwalktype1(t *types.Type, xoffset *int64, bv bvec) {
if t.Align > 0 && *xoffset&int64(t.Align-1) != 0 {
Fatalf("onebitwalktype1: invalid initial alignment, %v", t)
}
@ -1050,7 +1051,7 @@ func livenessprintdebug(lv *Liveness) {
fmt.Printf("\n")
}
func finishgclocals(sym *Sym) {
func finishgclocals(sym *types.Sym) {
ls := Linksym(sym)
ls.Name = fmt.Sprintf("gclocals·%x", md5.Sum(ls.P))
ls.Set(obj.AttrDuplicateOK, true)
@ -1068,7 +1069,7 @@ func finishgclocals(sym *Sym) {
// first word dumped is the total number of bitmaps. The second word is the
// length of the bitmaps. All bitmaps are assumed to be of equal length. The
// remaining bytes are the raw bitmaps.
func livenessemit(lv *Liveness, argssym, livesym *Sym) {
func livenessemit(lv *Liveness, argssym, livesym *types.Sym) {
args := bvalloc(argswords(lv))
aoff := duint32(argssym, 0, uint32(len(lv.livevars))) // number of bitmaps
aoff = duint32(argssym, aoff, uint32(args.n)) // number of bits in each bitmap
@ -1095,7 +1096,7 @@ func livenessemit(lv *Liveness, argssym, livesym *Sym) {
// pointer variables in the function and emits a runtime data
// structure read by the garbage collector.
// Returns a map from GC safe points to their corresponding stack map index.
func liveness(e *ssafn, f *ssa.Func, argssym, livesym *Sym) map[*ssa.Value]int {
func liveness(e *ssafn, f *ssa.Func, argssym, livesym *types.Sym) map[*ssa.Value]int {
// Construct the global liveness state.
vars := getvariables(e.curfn)
lv := newliveness(e.curfn, f, vars, e.stkptrsize)

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
"strings"
@ -67,7 +68,7 @@ func instrument(fn *Node) {
// getcallerpc. We use -widthptr(FP) for x86.
// BUG: this will not work on arm.
nodpc := *nodfp
nodpc.Type = Types[TUINTPTR]
nodpc.Type = types.Types[TUINTPTR]
nodpc.Xoffset = int64(-Widthptr)
nd := mkcall("racefuncenter", nil, nil, &nodpc)
fn.Func.Enter.Prepend(nd)
@ -216,7 +217,7 @@ func instrumentnode(np **Node, init *Nodes, wr int, skip int) {
instrumentnode(&n.Left, init, 0, 0)
if n.Left.Type.IsMap() {
n1 := nod(OCONVNOP, n.Left, nil)
n1.Type = typPtr(Types[TUINT8])
n1.Type = types.NewPtr(types.Types[TUINT8])
n1 = nod(OIND, n1, nil)
n1 = typecheck(n1, Erv)
callinstr(&n1, init, 0, skip)
@ -561,14 +562,14 @@ func makeaddable(n *Node) {
func uintptraddr(n *Node) *Node {
r := nod(OADDR, n, nil)
r.SetBounded(true)
r = conv(r, Types[TUNSAFEPTR])
r = conv(r, Types[TUINTPTR])
r = conv(r, types.Types[TUNSAFEPTR])
r = conv(r, types.Types[TUINTPTR])
return r
}
func detachexpr(n *Node, init *Nodes) *Node {
addr := nod(OADDR, n, nil)
l := temp(typPtr(n.Type))
l := temp(types.NewPtr(n.Type))
as := nod(OAS, l, addr)
as = typecheck(as, Etop)
as = walkexpr(as, init)

View file

@ -4,14 +4,17 @@
package gc
import "unicode/utf8"
import (
"cmd/compile/internal/types"
"unicode/utf8"
)
// range
func typecheckrange(n *Node) {
var toomany int
var why string
var t1 *Type
var t2 *Type
var t1 *types.Type
var t2 *types.Type
var v1 *Node
var v2 *Node
var ls []*Node
@ -52,7 +55,7 @@ func typecheckrange(n *Node) {
goto out
case TARRAY, TSLICE:
t1 = Types[TINT]
t1 = types.Types[TINT]
t2 = t.Elem()
case TMAP:
@ -72,8 +75,8 @@ func typecheckrange(n *Node) {
}
case TSTRING:
t1 = Types[TINT]
t2 = runetype
t1 = types.Types[TINT]
t2 = types.Runetype
}
if n.List.Len() > 2 || toomany != 0 {
@ -187,15 +190,15 @@ func walkrange(n *Node) *Node {
// orderstmt arranged for a copy of the array/slice variable if needed.
ha := a
hv1 := temp(Types[TINT])
hn := temp(Types[TINT])
hv1 := temp(types.Types[TINT])
hn := temp(types.Types[TINT])
var hp *Node
init = append(init, nod(OAS, hv1, nil))
init = append(init, nod(OAS, hn, nod(OLEN, ha, nil)))
if v2 != nil {
hp = temp(typPtr(n.Type.Elem()))
hp = temp(types.NewPtr(n.Type.Elem()))
tmp := nod(OINDEX, ha, nodintconst(0))
tmp.SetBounded(true)
init = append(init, nod(OAS, hp, nod(OADDR, tmp, nil)))
@ -229,7 +232,7 @@ func walkrange(n *Node) *Node {
tmp.Type = hp.Type
tmp.Typecheck = 1
tmp.Right.Type = Types[Tptr]
tmp.Right.Type = types.Types[types.Tptr]
tmp.Right.Typecheck = 1
a = nod(OAS, hp, tmp)
a = typecheck(a, Etop)
@ -281,10 +284,10 @@ func walkrange(n *Node) *Node {
hv1 := temp(t.Elem())
hv1.Typecheck = 1
if haspointers(t.Elem()) {
if types.Haspointers(t.Elem()) {
init = append(init, nod(OAS, hv1, nil))
}
hb := temp(Types[TBOOL])
hb := temp(types.Types[TBOOL])
n.Left = nod(ONE, hb, nodbool(false))
a := nod(OAS2RECV, nil, nil)
@ -321,9 +324,9 @@ func walkrange(n *Node) *Node {
// orderstmt arranged for a copy of the string variable.
ha := a
hv1 := temp(Types[TINT])
hv1t := temp(Types[TINT])
hv2 := temp(runetype)
hv1 := temp(types.Types[TINT])
hv1t := temp(types.Types[TINT])
hv2 := temp(types.Runetype)
// hv1 := 0
init = append(init, nod(OAS, hv1, nil))
@ -339,7 +342,7 @@ func walkrange(n *Node) *Node {
// hv2 := rune(ha[hv1])
nind := nod(OINDEX, ha, hv1)
nind.SetBounded(true)
body = append(body, nod(OAS, hv2, conv(nind, runetype)))
body = append(body, nod(OAS, hv2, conv(nind, types.Runetype)))
// if hv2 < utf8.RuneSelf
nif := nod(OIF, nil, nil)
@ -448,25 +451,25 @@ func memclrrange(n, v1, v2, a *Node) bool {
n.Left = nod(ONE, nod(OLEN, a, nil), nodintconst(0))
// hp = &a[0]
hp := temp(Types[TUNSAFEPTR])
hp := temp(types.Types[TUNSAFEPTR])
tmp := nod(OINDEX, a, nodintconst(0))
tmp.SetBounded(true)
tmp = nod(OADDR, tmp, nil)
tmp = nod(OCONVNOP, tmp, nil)
tmp.Type = Types[TUNSAFEPTR]
tmp.Type = types.Types[TUNSAFEPTR]
n.Nbody.Append(nod(OAS, hp, tmp))
// hn = len(a) * sizeof(elem(a))
hn := temp(Types[TUINTPTR])
hn := temp(types.Types[TUINTPTR])
tmp = nod(OLEN, a, nil)
tmp = nod(OMUL, tmp, nodintconst(elemsize))
tmp = conv(tmp, Types[TUINTPTR])
tmp = conv(tmp, types.Types[TUINTPTR])
n.Nbody.Append(nod(OAS, hn, tmp))
var fn *Node
if haspointers(a.Type.Elem()) {
if types.Haspointers(a.Type.Elem()) {
// memclrHasPointers(hp, hn)
fn = mkcall("memclrHasPointers", nil, nil, hp, hn)
} else {

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/gcprog"
"cmd/internal/obj"
"cmd/internal/src"
@ -15,8 +16,8 @@ import (
)
type itabEntry struct {
t, itype *Type
sym *Sym
t, itype *types.Type
sym *types.Sym
// symbol of the itab itself;
// filled in lazily after typecheck
@ -29,22 +30,22 @@ type itabEntry struct {
}
type ptabEntry struct {
s *Sym
t *Type
s *types.Sym
t *types.Type
}
// runtime interface and reflection data structures
var signatlist []*Type
var signatlist []*types.Type
var itabs []itabEntry
var ptabs []ptabEntry
type Sig struct {
name string
pkg *Pkg
isym *Sym
tsym *Sym
type_ *Type
mtype *Type
pkg *types.Pkg
isym *types.Sym
tsym *types.Sym
type_ *types.Type
mtype *types.Type
offset int32
}
@ -87,48 +88,48 @@ const (
func structfieldSize() int { return 3 * Widthptr } // Sizeof(runtime.structfield{})
func imethodSize() int { return 4 + 4 } // Sizeof(runtime.imethod{})
func uncommonSize(t *Type) int { // Sizeof(runtime.uncommontype{})
func uncommonSize(t *types.Type) int { // Sizeof(runtime.uncommontype{})
if t.Sym == nil && len(methods(t)) == 0 {
return 0
}
return 4 + 2 + 2 + 4 + 4
}
func makefield(name string, t *Type) *Field {
f := newField()
func makefield(name string, t *types.Type) *types.Field {
f := types.NewField()
f.Type = t
f.Sym = nopkg.Lookup(name)
f.Sym = types.Nopkg.Lookup(name)
return f
}
func mapbucket(t *Type) *Type {
func mapbucket(t *types.Type) *types.Type {
if t.MapType().Bucket != nil {
return t.MapType().Bucket
}
bucket := typ(TSTRUCT)
bucket := types.New(TSTRUCT)
keytype := t.Key()
valtype := t.Val()
dowidth(keytype)
dowidth(valtype)
if keytype.Width > MAXKEYSIZE {
keytype = typPtr(keytype)
keytype = types.NewPtr(keytype)
}
if valtype.Width > MAXVALSIZE {
valtype = typPtr(valtype)
valtype = types.NewPtr(valtype)
}
field := make([]*Field, 0, 5)
field := make([]*types.Field, 0, 5)
// The first field is: uint8 topbits[BUCKETSIZE].
arr := typArray(Types[TUINT8], BUCKETSIZE)
arr := types.NewArray(types.Types[TUINT8], BUCKETSIZE)
field = append(field, makefield("topbits", arr))
arr = typArray(keytype, BUCKETSIZE)
arr = types.NewArray(keytype, BUCKETSIZE)
arr.SetNoalg(true)
field = append(field, makefield("keys", arr))
arr = typArray(valtype, BUCKETSIZE)
arr = types.NewArray(valtype, BUCKETSIZE)
arr.SetNoalg(true)
field = append(field, makefield("values", arr))
@ -150,7 +151,7 @@ func mapbucket(t *Type) *Type {
// then it would end with an extra 32-bit padding field.
// Preempt that by emitting the padding here.
if int(t.Val().Align) > Widthptr || int(t.Key().Align) > Widthptr {
field = append(field, makefield("pad", Types[TUINTPTR]))
field = append(field, makefield("pad", types.Types[TUINTPTR]))
}
// If keys and values have no pointers, the map implementation
@ -159,9 +160,9 @@ func mapbucket(t *Type) *Type {
// Arrange for the bucket to have no pointers by changing
// the type of the overflow field to uintptr in this case.
// See comment on hmap.overflow in ../../../../runtime/hashmap.go.
otyp := typPtr(bucket)
if !haspointers(t.Val()) && !haspointers(t.Key()) && t.Val().Width <= MAXVALSIZE && t.Key().Width <= MAXKEYSIZE {
otyp = Types[TUINTPTR]
otyp := types.NewPtr(bucket)
if !types.Haspointers(t.Val()) && !types.Haspointers(t.Key()) && t.Val().Width <= MAXVALSIZE && t.Key().Width <= MAXKEYSIZE {
otyp = types.Types[TUINTPTR]
}
ovf := makefield("overflow", otyp)
field = append(field, ovf)
@ -186,25 +187,25 @@ func mapbucket(t *Type) *Type {
// Builds a type representing a Hmap structure for the given map type.
// Make sure this stays in sync with ../../../../runtime/hashmap.go!
func hmap(t *Type) *Type {
func hmap(t *types.Type) *types.Type {
if t.MapType().Hmap != nil {
return t.MapType().Hmap
}
bucket := mapbucket(t)
fields := []*Field{
makefield("count", Types[TINT]),
makefield("flags", Types[TUINT8]),
makefield("B", Types[TUINT8]),
makefield("noverflow", Types[TUINT16]),
makefield("hash0", Types[TUINT32]),
makefield("buckets", typPtr(bucket)),
makefield("oldbuckets", typPtr(bucket)),
makefield("nevacuate", Types[TUINTPTR]),
makefield("overflow", Types[TUNSAFEPTR]),
fields := []*types.Field{
makefield("count", types.Types[TINT]),
makefield("flags", types.Types[TUINT8]),
makefield("B", types.Types[TUINT8]),
makefield("noverflow", types.Types[TUINT16]),
makefield("hash0", types.Types[TUINT32]),
makefield("buckets", types.NewPtr(bucket)),
makefield("oldbuckets", types.NewPtr(bucket)),
makefield("nevacuate", types.Types[TUINTPTR]),
makefield("overflow", types.Types[TUNSAFEPTR]),
}
h := typ(TSTRUCT)
h := types.New(TSTRUCT)
h.SetNoalg(true)
h.SetLocal(t.Local())
h.SetFields(fields)
@ -214,7 +215,7 @@ func hmap(t *Type) *Type {
return h
}
func hiter(t *Type) *Type {
func hiter(t *types.Type) *types.Type {
if t.MapType().Hiter != nil {
return t.MapType().Hiter
}
@ -235,22 +236,22 @@ func hiter(t *Type) *Type {
// checkBucket uintptr
// }
// must match ../../../../runtime/hashmap.go:hiter.
var field [12]*Field
field[0] = makefield("key", typPtr(t.Key()))
field[1] = makefield("val", typPtr(t.Val()))
field[2] = makefield("t", typPtr(Types[TUINT8]))
field[3] = makefield("h", typPtr(hmap(t)))
field[4] = makefield("buckets", typPtr(mapbucket(t)))
field[5] = makefield("bptr", typPtr(mapbucket(t)))
field[6] = makefield("overflow0", Types[TUNSAFEPTR])
field[7] = makefield("overflow1", Types[TUNSAFEPTR])
field[8] = makefield("startBucket", Types[TUINTPTR])
field[9] = makefield("stuff", Types[TUINTPTR]) // offset+wrapped+B+I
field[10] = makefield("bucket", Types[TUINTPTR])
field[11] = makefield("checkBucket", Types[TUINTPTR])
var field [12]*types.Field
field[0] = makefield("key", types.NewPtr(t.Key()))
field[1] = makefield("val", types.NewPtr(t.Val()))
field[2] = makefield("t", types.NewPtr(types.Types[TUINT8]))
field[3] = makefield("h", types.NewPtr(hmap(t)))
field[4] = makefield("buckets", types.NewPtr(mapbucket(t)))
field[5] = makefield("bptr", types.NewPtr(mapbucket(t)))
field[6] = makefield("overflow0", types.Types[TUNSAFEPTR])
field[7] = makefield("overflow1", types.Types[TUNSAFEPTR])
field[8] = makefield("startBucket", types.Types[TUINTPTR])
field[9] = makefield("stuff", types.Types[TUINTPTR]) // offset+wrapped+B+I
field[10] = makefield("bucket", types.Types[TUINTPTR])
field[11] = makefield("checkBucket", types.Types[TUINTPTR])
// build iterator struct holding the above fields
i := typ(TSTRUCT)
i := types.New(TSTRUCT)
i.SetNoalg(true)
i.SetFields(field[:])
dowidth(i)
@ -264,7 +265,7 @@ func hiter(t *Type) *Type {
// f is method type, with receiver.
// return function type, receiver as first argument (or not).
func methodfunc(f *Type, receiver *Type) *Type {
func methodfunc(f *types.Type, receiver *types.Type) *types.Type {
var in []*Node
if receiver != nil {
d := nod(ODCLFIELD, nil, nil)
@ -298,7 +299,7 @@ func methodfunc(f *Type, receiver *Type) *Type {
// methods returns the methods of the non-interface type t, sorted by name.
// Generates stub functions as needed.
func methods(t *Type) []*Sig {
func methods(t *types.Type) []*Sig {
// method type
mt := methtype(t)
@ -311,7 +312,7 @@ func methods(t *Type) []*Sig {
it := t
if !isdirectiface(it) {
it = typPtr(t)
it = types.NewPtr(t)
}
// make list of methods for t,
@ -386,7 +387,7 @@ func methods(t *Type) []*Sig {
}
// imethods returns the methods of the interface type t, sorted by name.
func imethods(t *Type) []*Sig {
func imethods(t *types.Type) []*Sig {
var methods []*Sig
for _, f := range t.Fields().Slice() {
if f.Type.Etype != TFUNC || f.Sym == nil {
@ -434,7 +435,7 @@ func imethods(t *Type) []*Sig {
return methods
}
func dimportpath(p *Pkg) {
func dimportpath(p *types.Pkg) {
if p.Pathsym != nil {
return
}
@ -460,11 +461,11 @@ func dimportpath(p *Pkg) {
p.Pathsym = s
}
func dgopkgpath(s *Sym, ot int, pkg *Pkg) int {
func dgopkgpath(s *types.Sym, ot int, pkg *types.Pkg) int {
return dgopkgpathLSym(Linksym(s), ot, pkg)
}
func dgopkgpathLSym(s *obj.LSym, ot int, pkg *Pkg) int {
func dgopkgpathLSym(s *obj.LSym, ot int, pkg *types.Pkg) int {
if pkg == nil {
return duintxxLSym(s, ot, 0, Widthptr)
}
@ -484,7 +485,7 @@ func dgopkgpathLSym(s *obj.LSym, ot int, pkg *Pkg) int {
}
// dgopkgpathOffLSym writes an offset relocation in s at offset ot to the pkg path symbol.
func dgopkgpathOffLSym(s *obj.LSym, ot int, pkg *Pkg) int {
func dgopkgpathOffLSym(s *obj.LSym, ot int, pkg *types.Pkg) int {
if pkg == nil {
return duintxxLSym(s, ot, 0, 4)
}
@ -504,7 +505,7 @@ func dgopkgpathOffLSym(s *obj.LSym, ot int, pkg *Pkg) int {
// isExportedField reports whether a struct field is exported.
// It also returns the package to use for PkgPath for an unexported field.
func isExportedField(ft *Field) (bool, *Pkg) {
func isExportedField(ft *types.Field) (bool, *types.Pkg) {
if ft.Sym != nil && ft.Embedded == 0 {
return exportname(ft.Sym.Name), ft.Sym.Pkg
} else {
@ -518,7 +519,7 @@ func isExportedField(ft *Field) (bool, *Pkg) {
}
// dnameField dumps a reflect.name for a struct field.
func dnameField(s *Sym, ot int, spkg *Pkg, ft *Field) int {
func dnameField(s *types.Sym, ot int, spkg *types.Pkg, ft *types.Field) int {
var name string
if ft.Sym != nil {
name = ft.Sym.Name
@ -532,7 +533,7 @@ func dnameField(s *Sym, ot int, spkg *Pkg, ft *Field) int {
}
// dnameData writes the contents of a reflect.name into s at offset ot.
func dnameData(s *obj.LSym, ot int, name, tag string, pkg *Pkg, exported bool) int {
func dnameData(s *obj.LSym, ot int, name, tag string, pkg *types.Pkg, exported bool) int {
if len(name) > 1<<16-1 {
Fatalf("name too long: %s", name)
}
@ -577,7 +578,7 @@ func dnameData(s *obj.LSym, ot int, name, tag string, pkg *Pkg, exported bool) i
var dnameCount int
// dname creates a reflect.name for a struct field or method.
func dname(name, tag string, pkg *Pkg, exported bool) *obj.LSym {
func dname(name, tag string, pkg *types.Pkg, exported bool) *obj.LSym {
// Write out data as "type.." to signal two things to the
// linker, first that when dynamically linking, the symbol
// should be moved to a relro section, and second that the
@ -610,7 +611,7 @@ func dname(name, tag string, pkg *Pkg, exported bool) *obj.LSym {
// dextratype dumps the fields of a runtime.uncommontype.
// dataAdd is the offset in bytes after the header where the
// backing array of the []method field is written (by dextratypeData).
func dextratype(s *Sym, ot int, t *Type, dataAdd int) int {
func dextratype(s *types.Sym, ot int, t *types.Type, dataAdd int) int {
m := methods(t)
if t.Sym == nil && len(m) == 0 {
return ot
@ -642,7 +643,7 @@ func dextratype(s *Sym, ot int, t *Type, dataAdd int) int {
return ot
}
func typePkg(t *Type) *Pkg {
func typePkg(t *types.Type) *types.Pkg {
tsym := t.Sym
if tsym == nil {
switch t.Etype {
@ -652,7 +653,7 @@ func typePkg(t *Type) *Pkg {
}
}
}
if tsym != nil && t != Types[t.Etype] && t != errortype {
if tsym != nil && t != types.Types[t.Etype] && t != types.Errortype {
return tsym.Pkg
}
return nil
@ -660,12 +661,12 @@ func typePkg(t *Type) *Pkg {
// dextratypeData dumps the backing array for the []method field of
// runtime.uncommontype.
func dextratypeData(s *Sym, ot int, t *Type) int {
func dextratypeData(s *types.Sym, ot int, t *types.Type) int {
lsym := Linksym(s)
for _, a := range methods(t) {
// ../../../../runtime/type.go:/method
exported := exportname(a.name)
var pkg *Pkg
var pkg *types.Pkg
if !exported && a.pkg != typePkg(t) {
pkg = a.pkg
}
@ -721,8 +722,8 @@ var kinds = []int{
// typeptrdata returns the length in bytes of the prefix of t
// containing pointer data. Anything after this offset is scalar data.
func typeptrdata(t *Type) int64 {
if !haspointers(t) {
func typeptrdata(t *types.Type) int64 {
if !types.Haspointers(t) {
return 0
}
@ -754,9 +755,9 @@ func typeptrdata(t *Type) int64 {
case TSTRUCT:
// Find the last field that has pointers.
var lastPtrField *Field
var lastPtrField *types.Field
for _, t1 := range t.Fields().Slice() {
if haspointers(t1.Type) {
if types.Haspointers(t1.Type) {
lastPtrField = t1
}
}
@ -781,10 +782,10 @@ const (
tflagNamed = 1 << 2
)
var dcommontype_algarray *Sym
var dcommontype_algarray *types.Sym
// dcommontype dumps the contents of a reflect.rtype (runtime._type).
func dcommontype(s *Sym, ot int, t *Type) int {
func dcommontype(s *types.Sym, ot int, t *types.Type) int {
if ot != 0 {
Fatalf("dcommontype %d", ot)
}
@ -795,15 +796,15 @@ func dcommontype(s *Sym, ot int, t *Type) int {
}
dowidth(t)
alg := algtype(t)
var algsym *Sym
var algsym *types.Sym
if alg == ASPECIAL || alg == AMEM {
algsym = dalgsym(t)
}
sptrWeak := true
var sptr *Sym
if !t.IsPtr() || t.ptrTo != nil {
tptr := typPtr(t)
var sptr *types.Sym
if !t.IsPtr() || t.PtrBase != nil {
tptr := types.NewPtr(t)
if t.Sym != nil || methods(tptr) != nil {
sptrWeak = false
}
@ -874,7 +875,7 @@ func dcommontype(s *Sym, ot int, t *Type) int {
ot = duint8(s, ot, t.Align) // fieldAlign
i = kinds[t.Etype]
if !haspointers(t) {
if !types.Haspointers(t) {
i |= obj.KindNoPointers
}
if isdirectiface(t) {
@ -905,7 +906,7 @@ func dcommontype(s *Sym, ot int, t *Type) int {
return ot
}
func typesym(t *Type) *Sym {
func typesym(t *types.Type) *types.Sym {
name := t.ShortString()
// Use a separate symbol name for Noalg types for #17752.
@ -918,11 +919,11 @@ func typesym(t *Type) *Sym {
// tracksym returns the symbol for tracking use of field/method f, assumed
// to be a member of struct/interface type t.
func tracksym(t *Type, f *Field) *Sym {
func tracksym(t *types.Type, f *types.Field) *types.Sym {
return trackpkg.Lookup(t.ShortString() + "." + f.Sym.Name)
}
func typesymprefix(prefix string, t *Type) *Sym {
func typesymprefix(prefix string, t *types.Type) *types.Sym {
p := prefix + "." + t.ShortString()
s := typepkg.Lookup(p)
@ -931,50 +932,50 @@ func typesymprefix(prefix string, t *Type) *Sym {
return s
}
func typenamesym(t *Type) *Sym {
func typenamesym(t *types.Type) *types.Sym {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() {
Fatalf("typename %v", t)
}
s := typesym(t)
if s.Def == nil {
n := newnamel(src.NoXPos, s)
n.Type = Types[TUINT8]
n.Type = types.Types[TUINT8]
n.Class = PEXTERN
n.Typecheck = 1
s.Def = n
s.Def = asTypesNode(n)
signatlist = append(signatlist, t)
}
return s.Def.Sym
return asNode(s.Def).Sym
}
func typename(t *Type) *Node {
func typename(t *types.Type) *Node {
s := typenamesym(t)
n := nod(OADDR, s.Def, nil)
n.Type = typPtr(s.Def.Type)
n := nod(OADDR, asNode(s.Def), nil)
n.Type = types.NewPtr(asNode(s.Def).Type)
n.SetAddable(true)
n.Typecheck = 1
return n
}
func itabname(t, itype *Type) *Node {
func itabname(t, itype *types.Type) *Node {
if t == nil || (t.IsPtr() && t.Elem() == nil) || t.IsUntyped() || !itype.IsInterface() || itype.IsEmptyInterface() {
Fatalf("itabname(%v, %v)", t, itype)
}
s := itabpkg.Lookup(t.ShortString() + "," + itype.ShortString())
if s.Def == nil {
n := newname(s)
n.Type = Types[TUINT8]
n.Type = types.Types[TUINT8]
n.Class = PEXTERN
n.Typecheck = 1
s.Def = n
s.Def = asTypesNode(n)
itabs = append(itabs, itabEntry{t: t, itype: itype, sym: s})
}
n := nod(OADDR, s.Def, nil)
n.Type = typPtr(s.Def.Type)
n := nod(OADDR, asNode(s.Def), nil)
n.Type = types.NewPtr(asNode(s.Def).Type)
n.SetAddable(true)
n.Typecheck = 1
return n
@ -982,7 +983,7 @@ func itabname(t, itype *Type) *Node {
// isreflexive reports whether t has a reflexive equality operator.
// That is, if x==x for all x of type t.
func isreflexive(t *Type) bool {
func isreflexive(t *types.Type) bool {
switch t.Etype {
case TBOOL,
TINT,
@ -1029,7 +1030,7 @@ func isreflexive(t *Type) bool {
// needkeyupdate reports whether map updates with t as a key
// need the key to be updated.
func needkeyupdate(t *Type) bool {
func needkeyupdate(t *types.Type) bool {
switch t.Etype {
case TBOOL,
TINT,
@ -1074,12 +1075,12 @@ func needkeyupdate(t *Type) bool {
}
}
func dtypesym(t *Type) *Sym {
func dtypesym(t *types.Type) *types.Sym {
// Replace byte, rune aliases with real type.
// They've been separate internally to make error messages
// better, but we have to merge them in the reflect tables.
if t == bytetype || t == runetype {
t = Types[t.Etype]
if t == types.Bytetype || t == types.Runetype {
t = types.Types[t.Etype]
}
if t.IsUntyped() {
@ -1105,7 +1106,7 @@ func dtypesym(t *Type) *Sym {
dupok = obj.DUPOK
}
if myimportpath == "runtime" && (tbase == Types[tbase.Etype] || tbase == bytetype || tbase == runetype || tbase == errortype) { // int, float, etc
if myimportpath == "runtime" && (tbase == types.Types[tbase.Etype] || tbase == types.Bytetype || tbase == types.Runetype || tbase == types.Errortype) { // int, float, etc
goto ok
}
@ -1127,7 +1128,7 @@ ok:
case TARRAY:
// ../../../../runtime/type.go:/arrayType
s1 := dtypesym(t.Elem())
t2 := typSlice(t.Elem())
t2 := types.NewSlice(t.Elem())
s2 := dtypesym(t2)
ot = dcommontype(s, ot, t)
ot = dsymptr(s, ot, s1, 0)
@ -1199,8 +1200,8 @@ ok:
// ../../../../runtime/type.go:/interfaceType
ot = dcommontype(s, ot, t)
var tpkg *Pkg
if t.Sym != nil && t != Types[t.Etype] && t != errortype {
var tpkg *types.Pkg
if t.Sym != nil && t != types.Types[t.Etype] && t != types.Errortype {
tpkg = t.Sym.Pkg
}
ot = dgopkgpath(s, ot, tpkg)
@ -1215,7 +1216,7 @@ ok:
for _, a := range m {
// ../../../../runtime/type.go:/imethod
exported := exportname(a.name)
var pkg *Pkg
var pkg *types.Pkg
if !exported && a.pkg != tpkg {
pkg = a.pkg
}
@ -1362,7 +1363,7 @@ func peekitabs() {
// for the given concrete type and interface
// type, return the (sorted) set of methods
// on the concrete type that implement the interface
func genfun(t, it *Type) []*obj.LSym {
func genfun(t, it *types.Type) []*obj.LSym {
if t == nil || it == nil {
return nil
}
@ -1431,7 +1432,7 @@ func dumptypestructs() {
t := signatlist[i]
dtypesym(t)
if t.Sym != nil {
dtypesym(typPtr(t))
dtypesym(types.NewPtr(t))
}
}
@ -1502,17 +1503,17 @@ func dumptypestructs() {
// another possible choice would be package main,
// but using runtime means fewer copies in .6 files.
if myimportpath == "runtime" {
for i := EType(1); i <= TBOOL; i++ {
dtypesym(typPtr(Types[i]))
for i := types.EType(1); i <= TBOOL; i++ {
dtypesym(types.NewPtr(types.Types[i]))
}
dtypesym(typPtr(Types[TSTRING]))
dtypesym(typPtr(Types[TUNSAFEPTR]))
dtypesym(types.NewPtr(types.Types[TSTRING]))
dtypesym(types.NewPtr(types.Types[TUNSAFEPTR]))
// emit type structs for error and func(error) string.
// The latter is the type of an auto-generated wrapper.
dtypesym(typPtr(errortype))
dtypesym(types.NewPtr(types.Errortype))
dtypesym(functype(nil, []*Node{anonfield(errortype)}, []*Node{anonfield(Types[TSTRING])}))
dtypesym(functype(nil, []*Node{anonfield(types.Errortype)}, []*Node{anonfield(types.Types[TSTRING])}))
// add paths for runtime and main, which 6l imports implicitly.
dimportpath(Runtimepkg)
@ -1527,16 +1528,16 @@ func dumptypestructs() {
}
}
type pkgByPath []*Pkg
type pkgByPath []*types.Pkg
func (a pkgByPath) Len() int { return len(a) }
func (a pkgByPath) Less(i, j int) bool { return a[i].Path < a[j].Path }
func (a pkgByPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func dalgsym(t *Type) *Sym {
var s *Sym
var hashfunc *Sym
var eqfunc *Sym
func dalgsym(t *types.Type) *types.Sym {
var s *types.Sym
var hashfunc *types.Sym
var eqfunc *types.Sym
// dalgsym is only called for a type that needs an algorithm table,
// which implies that the type is comparable (or else it would use ANOEQ).
@ -1637,7 +1638,7 @@ const maxPtrmaskBytes = 2048
// dgcsym emits and returns a data symbol containing GC information for type t,
// along with a boolean reporting whether the UseGCProg bit should be set in
// the type kind, and the ptrdata field to record in the reflect type information.
func dgcsym(t *Type) (sym *Sym, useGCProg bool, ptrdata int64) {
func dgcsym(t *types.Type) (sym *types.Sym, useGCProg bool, ptrdata int64) {
ptrdata = typeptrdata(t)
if ptrdata/int64(Widthptr) <= maxPtrmaskBytes*8 {
sym = dgcptrmask(t)
@ -1650,7 +1651,7 @@ func dgcsym(t *Type) (sym *Sym, useGCProg bool, ptrdata int64) {
}
// dgcptrmask emits and returns the symbol containing a pointer mask for type t.
func dgcptrmask(t *Type) *Sym {
func dgcptrmask(t *types.Type) *types.Sym {
ptrmask := make([]byte, (typeptrdata(t)/int64(Widthptr)+7)/8)
fillptrmask(t, ptrmask)
p := fmt.Sprintf("gcbits.%x", ptrmask)
@ -1669,11 +1670,11 @@ func dgcptrmask(t *Type) *Sym {
// fillptrmask fills in ptrmask with 1s corresponding to the
// word offsets in t that hold pointers.
// ptrmask is assumed to fit at least typeptrdata(t)/Widthptr bits.
func fillptrmask(t *Type, ptrmask []byte) {
func fillptrmask(t *types.Type, ptrmask []byte) {
for i := range ptrmask {
ptrmask[i] = 0
}
if !haspointers(t) {
if !types.Haspointers(t) {
return
}
@ -1693,7 +1694,7 @@ func fillptrmask(t *Type, ptrmask []byte) {
// along with the size of the data described by the program (in the range [typeptrdata(t), t.Width]).
// In practice, the size is typeptrdata(t) except for non-trivial arrays.
// For non-trivial arrays, the program describes the full t.Width size.
func dgcprog(t *Type) (*Sym, int64) {
func dgcprog(t *types.Type) (*types.Sym, int64) {
dowidth(t)
if t.Width == BADWIDTH {
Fatalf("dgcprog: %v badwidth", t)
@ -1711,14 +1712,14 @@ func dgcprog(t *Type) (*Sym, int64) {
}
type GCProg struct {
sym *Sym
sym *types.Sym
symoff int
w gcprog.Writer
}
var Debug_gcprog int // set by -d gcprog
func (p *GCProg) init(sym *Sym) {
func (p *GCProg) init(sym *types.Sym) {
p.sym = sym
p.symoff = 4 // first 4 bytes hold program length
p.w.Init(p.writeByte)
@ -1741,9 +1742,9 @@ func (p *GCProg) end() {
}
}
func (p *GCProg) emit(t *Type, offset int64) {
func (p *GCProg) emit(t *types.Type, offset int64) {
dowidth(t)
if !haspointers(t) {
if !types.Haspointers(t) {
return
}
if t.Width == int64(Widthptr) {
@ -1808,13 +1809,13 @@ func zeroaddr(size int64) *Node {
s := mappkg.Lookup("zero")
if s.Def == nil {
x := newname(s)
x.Type = Types[TUINT8]
x.Type = types.Types[TUINT8]
x.Class = PEXTERN
x.Typecheck = 1
s.Def = x
s.Def = asTypesNode(x)
}
z := nod(OADDR, s.Def, nil)
z.Type = typPtr(Types[TUINT8])
z := nod(OADDR, asNode(s.Def), nil)
z.Type = types.NewPtr(types.Types[TUINT8])
z.SetAddable(true)
z.Typecheck = 1
return z

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"reflect"
"sort"
"testing"
@ -12,24 +13,24 @@ import (
func TestSortingByMethodNameAndPackagePath(t *testing.T) {
data := []*Sig{
&Sig{name: "b", pkg: &Pkg{Path: "abc"}},
&Sig{name: "b", pkg: &types.Pkg{Path: "abc"}},
&Sig{name: "b", pkg: nil},
&Sig{name: "c", pkg: nil},
&Sig{name: "c", pkg: &Pkg{Path: "uvw"}},
&Sig{name: "c", pkg: &types.Pkg{Path: "uvw"}},
&Sig{name: "c", pkg: nil},
&Sig{name: "b", pkg: &Pkg{Path: "xyz"}},
&Sig{name: "a", pkg: &Pkg{Path: "abc"}},
&Sig{name: "b", pkg: &types.Pkg{Path: "xyz"}},
&Sig{name: "a", pkg: &types.Pkg{Path: "abc"}},
&Sig{name: "b", pkg: nil},
}
want := []*Sig{
&Sig{name: "a", pkg: &Pkg{Path: "abc"}},
&Sig{name: "a", pkg: &types.Pkg{Path: "abc"}},
&Sig{name: "b", pkg: nil},
&Sig{name: "b", pkg: nil},
&Sig{name: "b", pkg: &Pkg{Path: "abc"}},
&Sig{name: "b", pkg: &Pkg{Path: "xyz"}},
&Sig{name: "b", pkg: &types.Pkg{Path: "abc"}},
&Sig{name: "b", pkg: &types.Pkg{Path: "xyz"}},
&Sig{name: "c", pkg: nil},
&Sig{name: "c", pkg: nil},
&Sig{name: "c", pkg: &Pkg{Path: "uvw"}},
&Sig{name: "c", pkg: &types.Pkg{Path: "uvw"}},
}
if len(data) != len(want) {
t.Fatal("want and data must match")

View file

@ -4,6 +4,8 @@
package gc
import "cmd/compile/internal/types"
// select
func typecheckselect(sel *Node) {
var ncase *Node
@ -225,21 +227,21 @@ func walkselect(sel *Node) {
case OSEND:
// if selectnbsend(c, v) { body } else { default body }
ch := n.Left
r.Left = mkcall1(chanfn("selectnbsend", 2, ch.Type), Types[TBOOL], &r.Ninit, ch, n.Right)
r.Left = mkcall1(chanfn("selectnbsend", 2, ch.Type), types.Types[TBOOL], &r.Ninit, ch, n.Right)
case OSELRECV:
// if c != nil && selectnbrecv(&v, c) { body } else { default body }
r = nod(OIF, nil, nil)
r.Ninit.Set(cas.Ninit.Slice())
ch := n.Right.Left
r.Left = mkcall1(chanfn("selectnbrecv", 2, ch.Type), Types[TBOOL], &r.Ninit, n.Left, ch)
r.Left = mkcall1(chanfn("selectnbrecv", 2, ch.Type), types.Types[TBOOL], &r.Ninit, n.Left, ch)
case OSELRECV2:
// if c != nil && selectnbrecv2(&v, c) { body } else { default body }
r = nod(OIF, nil, nil)
r.Ninit.Set(cas.Ninit.Slice())
ch := n.Right.Left
r.Left = mkcall1(chanfn("selectnbrecv2", 2, ch.Type), Types[TBOOL], &r.Ninit, n.Left, n.List.First(), ch)
r.Left = mkcall1(chanfn("selectnbrecv2", 2, ch.Type), types.Types[TBOOL], &r.Ninit, n.Left, n.List.First(), ch)
}
r.Left = typecheck(r.Left, Erv)
@ -258,7 +260,7 @@ func walkselect(sel *Node) {
r = nod(OAS, selv, nil)
r = typecheck(r, Etop)
init = append(init, r)
var_ = conv(conv(nod(OADDR, selv, nil), Types[TUNSAFEPTR]), typPtr(Types[TUINT8]))
var_ = conv(conv(nod(OADDR, selv, nil), types.Types[TUNSAFEPTR]), types.NewPtr(types.Types[TUINT8]))
r = mkcall("newselect", nil, nil, var_, nodintconst(selv.Type.Width), nodintconst(sel.Xoffset))
r = typecheck(r, Etop)
init = append(init, r)
@ -297,8 +299,8 @@ func walkselect(sel *Node) {
// run the select
setlineno(sel)
chosen = temp(Types[TINT])
r = nod(OAS, chosen, mkcall("selectgo", Types[TINT], nil, var_))
chosen = temp(types.Types[TINT])
r = nod(OAS, chosen, mkcall("selectgo", types.Types[TINT], nil, var_))
r = typecheck(r, Etop)
init = append(init, r)
@ -327,29 +329,29 @@ out:
}
// Keep in sync with src/runtime/select.go.
func selecttype(size int64) *Type {
func selecttype(size int64) *types.Type {
// TODO(dvyukov): it's possible to generate Scase only once
// and then cache; and also cache Select per size.
scase := tostruct([]*Node{
namedfield("elem", typPtr(Types[TUINT8])),
namedfield("chan", typPtr(Types[TUINT8])),
namedfield("pc", Types[TUINTPTR]),
namedfield("kind", Types[TUINT16]),
namedfield("receivedp", typPtr(Types[TUINT8])),
namedfield("releasetime", Types[TUINT64]),
namedfield("elem", types.NewPtr(types.Types[TUINT8])),
namedfield("chan", types.NewPtr(types.Types[TUINT8])),
namedfield("pc", types.Types[TUINTPTR]),
namedfield("kind", types.Types[TUINT16]),
namedfield("receivedp", types.NewPtr(types.Types[TUINT8])),
namedfield("releasetime", types.Types[TUINT64]),
})
scase.SetNoalg(true)
scase.SetLocal(true)
sel := tostruct([]*Node{
namedfield("tcase", Types[TUINT16]),
namedfield("ncase", Types[TUINT16]),
namedfield("pollorder", typPtr(Types[TUINT8])),
namedfield("lockorder", typPtr(Types[TUINT8])),
namedfield("scase", typArray(scase, size)),
namedfield("lockorderarr", typArray(Types[TUINT16], size)),
namedfield("pollorderarr", typArray(Types[TUINT16], size)),
namedfield("tcase", types.Types[TUINT16]),
namedfield("ncase", types.Types[TUINT16]),
namedfield("pollorder", types.NewPtr(types.Types[TUINT8])),
namedfield("lockorder", types.NewPtr(types.Types[TUINT8])),
namedfield("scase", types.NewArray(scase, size)),
namedfield("lockorderarr", types.NewArray(types.Types[TUINT16], size)),
namedfield("pollorderarr", types.NewArray(types.Types[TUINT16], size)),
})
sel.SetNoalg(true)
sel.SetLocal(true)

View file

@ -4,7 +4,10 @@
package gc
import "fmt"
import (
"cmd/compile/internal/types"
"fmt"
)
// static initialization
const (
@ -43,7 +46,7 @@ func init1(n *Node, out *[]*Node) {
if n.Left != nil && n.Type != nil && n.Left.Op == OTYPE && n.Class == PFUNC {
// Methods called as Type.Method(receiver, ...).
// Definitions for method expressions are stored in type->nname.
init1(n.Type.Nname(), out)
init1(asNode(n.Type.FuncType().Nname), out)
}
if n.Op != ONAME {
@ -214,7 +217,7 @@ func init2(n *Node, out *[]*Node) {
init2list(n.Func.Closure.Nbody, out)
}
if n.Op == ODOTMETH || n.Op == OCALLPART {
init2(n.Type.Nname(), out)
init2(asNode(n.Type.FuncType().Nname), out)
}
}
@ -424,7 +427,7 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool {
initplan(r)
// Init slice.
bound := r.Right.Int64()
ta := typArray(r.Type.Elem(), bound)
ta := types.NewArray(r.Type.Elem(), bound)
a := staticname(ta)
inittemps[r] = a
n := *l
@ -535,7 +538,7 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool {
*out = append(*out, nod(OAS, a, val))
}
ptr := nod(OADDR, a, nil)
n.Type = typPtr(val.Type)
n.Type = types.NewPtr(val.Type)
gdata(&n, ptr, Widthptr)
}
@ -574,7 +577,7 @@ var statuniqgen int // name generator for static temps
// staticname returns a name backed by a static data symbol.
// Callers should call n.Name.SetReadonly(true) on the
// returned node for readonly nodes.
func staticname(t *Type) *Node {
func staticname(t *types.Type) *Node {
// Don't use lookupN; it interns the resulting string, but these are all unique.
n := newname(lookup(fmt.Sprintf("statictmp_%d", statuniqgen)))
statuniqgen++
@ -768,7 +771,7 @@ func fixedlit(ctxt initContext, kind initKind, n *Node, var_ *Node, init *Nodes)
func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
// make an array type corresponding the number of elements we have
t := typArray(n.Type.Elem(), n.Right.Int64())
t := types.NewArray(n.Type.Elem(), n.Right.Int64())
dowidth(t)
if ctxt == inNonInitFunction {
@ -786,7 +789,7 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
}
var v Node
nodconst(&v, Types[TINT], t.NumElem())
nodconst(&v, types.Types[TINT], t.NumElem())
nam.Xoffset += int64(array_array)
gdata(&nam, nod(OADDR, vstat, nil), Widthptr)
@ -831,7 +834,7 @@ func slicelit(ctxt initContext, n *Node, var_ *Node, init *Nodes) {
}
// make new auto *array (3 declare)
vauto := temp(typPtr(t))
vauto := temp(types.NewPtr(t))
// set auto to point at new temp or heap (3 assign)
var a *Node
@ -946,8 +949,8 @@ func maplit(n *Node, m *Node, init *Nodes) {
// For a large number of static entries, put them in an array and loop.
// build types [count]Tindex and [count]Tvalue
tk := typArray(n.Type.Key(), int64(len(stat)))
tv := typArray(n.Type.Val(), int64(len(stat)))
tk := types.NewArray(n.Type.Key(), int64(len(stat)))
tv := types.NewArray(n.Type.Val(), int64(len(stat)))
// TODO(josharian): suppress alg generation for these types?
dowidth(tk)
@ -982,7 +985,7 @@ func maplit(n *Node, m *Node, init *Nodes) {
// for i = 0; i < len(vstatk); i++ {
// map[vstatk[i]] = vstatv[i]
// }
i := temp(Types[TINT])
i := temp(types.Types[TINT])
rhs := nod(OINDEX, vstatv, i)
rhs.SetBounded(true)

View file

@ -7,6 +7,7 @@
package gc
import (
"cmd/compile/internal/types"
"reflect"
"testing"
"unsafe"
@ -26,20 +27,21 @@ func TestSizeof(t *testing.T) {
{Name{}, 36, 56},
{Param{}, 28, 56},
{Node{}, 84, 136},
{Sym{}, 60, 104},
{Type{}, 52, 88},
{MapType{}, 20, 40},
{ForwardType{}, 20, 32},
{FuncType{}, 28, 48},
{StructType{}, 12, 24},
{InterType{}, 4, 8},
{ChanType{}, 8, 16},
{ArrayType{}, 12, 16},
{DDDFieldType{}, 4, 8},
{FuncArgsType{}, 4, 8},
{ChanArgsType{}, 4, 8},
{PtrType{}, 4, 8},
{SliceType{}, 4, 8},
// TODO(gri) test the ones below in the types package
{types.Sym{}, 60, 104},
{types.Type{}, 52, 88},
{types.MapType{}, 20, 40},
{types.ForwardType{}, 20, 32},
{types.FuncType{}, 28, 48},
{types.StructType{}, 12, 24},
{types.InterType{}, 4, 8},
{types.ChanType{}, 8, 16},
{types.ArrayType{}, 12, 16},
{types.DDDFieldType{}, 4, 8},
{types.FuncArgsType{}, 4, 8},
{types.ChanArgsType{}, 4, 8},
{types.PtrType{}, 4, 8},
{types.SliceType{}, 4, 8},
}
for _, tt := range tests {

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@ package gc
import (
"bytes"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
"crypto/md5"
@ -209,13 +210,13 @@ func setlineno(n *Node) src.XPos {
return lno
}
func lookup(name string) *Sym {
func lookup(name string) *types.Sym {
return localpkg.Lookup(name)
}
// lookupN looks up the symbol starting with prefix and ending with
// the decimal n. If prefix is too long, lookupN panics.
func lookupN(prefix string, n int) *Sym {
func lookupN(prefix string, n int) *types.Sym {
var buf [20]byte // plenty long enough for all current users
copy(buf[:], prefix)
b := strconv.AppendInt(buf[:len(prefix)], int64(n), 10)
@ -241,49 +242,7 @@ func autolabel(prefix string) *Node {
return newname(lookupN(prefix, int(n)))
}
var initSyms []*Sym
var nopkg = &Pkg{
Syms: make(map[string]*Sym),
}
func (pkg *Pkg) Lookup(name string) *Sym {
s, _ := pkg.LookupOK(name)
return s
}
// LookupOK looks up name in pkg and reports whether it previously existed.
func (pkg *Pkg) LookupOK(name string) (s *Sym, existed bool) {
if pkg == nil {
pkg = nopkg
}
if s := pkg.Syms[name]; s != nil {
return s, true
}
s = &Sym{
Name: name,
Pkg: pkg,
}
if name == "init" {
initSyms = append(initSyms, s)
}
pkg.Syms[name] = s
return s, false
}
func (pkg *Pkg) LookupBytes(name []byte) *Sym {
if pkg == nil {
pkg = nopkg
}
if s := pkg.Syms[string(name)]; s != nil {
return s
}
str := internString(name)
return pkg.Lookup(str)
}
func restrictlookup(name string, pkg *Pkg) *Sym {
func restrictlookup(name string, pkg *types.Pkg) *types.Sym {
if !exportname(name) && pkg != localpkg {
yyerror("cannot refer to unexported name %s.%s", pkg.Name, name)
}
@ -292,8 +251,8 @@ func restrictlookup(name string, pkg *Pkg) *Sym {
// find all the exported symbols in package opkg
// and make them available in the current package
func importdot(opkg *Pkg, pack *Node) {
var s1 *Sym
func importdot(opkg *types.Pkg, pack *Node) {
var s1 *types.Sym
var pkgerror string
n := 0
@ -313,11 +272,11 @@ func importdot(opkg *Pkg, pack *Node) {
s1.Def = s.Def
s1.Block = s.Block
if s1.Def.Name == nil {
Dump("s1def", s1.Def)
if asNode(s1.Def).Name == nil {
Dump("s1def", asNode(s1.Def))
Fatalf("missing Name")
}
s1.Def.Name.Pack = pack
asNode(s1.Def).Name.Pack = pack
s1.Origpkg = opkg
n++
}
@ -364,7 +323,7 @@ func nodl(pos src.XPos, op Op, nleft, nright *Node) *Node {
}
// newname returns a new ONAME Node associated with symbol s.
func newname(s *Sym) *Node {
func newname(s *types.Sym) *Node {
n := newnamel(lineno, s)
n.Name.Curfn = Curfn
return n
@ -372,7 +331,7 @@ func newname(s *Sym) *Node {
// newname returns a new ONAME Node associated with symbol s at position pos.
// The caller is responsible for setting n.Name.Curfn.
func newnamel(pos src.XPos, s *Sym) *Node {
func newnamel(pos src.XPos, s *types.Sym) *Node {
if s == nil {
Fatalf("newnamel nil")
}
@ -397,7 +356,7 @@ func newnamel(pos src.XPos, s *Sym) *Node {
// nodSym makes a Node with Op op and with the Left field set to left
// and the Sym field set to sym. This is for ODOT and friends.
func nodSym(op Op, left *Node, sym *Sym) *Node {
func nodSym(op Op, left *Node, sym *types.Sym) *Node {
n := nod(op, left, nil)
n.Sym = sym
return n
@ -413,7 +372,7 @@ func saveorignode(n *Node) {
}
// methcmp sorts by symbol, then by package path for unexported symbols.
type methcmp []*Field
type methcmp []*types.Field
func (x methcmp) Len() int { return len(x) }
func (x methcmp) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
@ -446,7 +405,7 @@ func nodintconst(v int64) *Node {
c.SetAddable(true)
c.SetVal(Val{new(Mpint)})
c.Val().U.(*Mpint).SetInt64(v)
c.Type = Types[TIDEAL]
c.Type = types.Types[TIDEAL]
return c
}
@ -455,11 +414,11 @@ func nodfltconst(v *Mpflt) *Node {
c.SetAddable(true)
c.SetVal(Val{newMpflt()})
c.Val().U.(*Mpflt).Set(v)
c.Type = Types[TIDEAL]
c.Type = types.Types[TIDEAL]
return c
}
func nodconst(n *Node, t *Type, v int64) {
func nodconst(n *Node, t *types.Type, v int64) {
*n = Node{}
n.Op = OLITERAL
n.SetAddable(true)
@ -475,14 +434,14 @@ func nodconst(n *Node, t *Type, v int64) {
func nodnil() *Node {
c := nodintconst(0)
c.SetVal(Val{new(NilVal)})
c.Type = Types[TNIL]
c.Type = types.Types[TNIL]
return c
}
func nodbool(b bool) *Node {
c := nodintconst(0)
c.SetVal(Val{b})
c.Type = idealbool
c.Type = types.Idealbool
return c
}
@ -531,7 +490,7 @@ func isnil(n *Node) bool {
return Isconst(n.Orig, CTNIL)
}
func isptrto(t *Type, et EType) bool {
func isptrto(t *types.Type, et types.EType) bool {
if t == nil {
return false
}
@ -555,14 +514,14 @@ func isblank(n *Node) bool {
return isblanksym(n.Sym)
}
func isblanksym(s *Sym) bool {
func isblanksym(s *types.Sym) bool {
return s != nil && s.Name == "_"
}
// methtype returns the underlying type, if any,
// that owns methods with receiver parameter t.
// The result is either a named type or an anonymous struct.
func methtype(t *Type) *Type {
func methtype(t *types.Type) *types.Type {
if t == nil {
return nil
}
@ -594,7 +553,7 @@ func methtype(t *Type) *Type {
return nil
}
func cplxsubtype(et EType) EType {
func cplxsubtype(et types.EType) types.EType {
switch et {
case TCOMPLEX64:
return TFLOAT32
@ -613,21 +572,21 @@ func cplxsubtype(et EType) EType {
// named, it is only identical to the other if they are the same
// pointer (t1 == t2), so there's no chance of chasing cycles
// ad infinitum, so no need for a depth counter.
func eqtype(t1, t2 *Type) bool {
func eqtype(t1, t2 *types.Type) bool {
return eqtype1(t1, t2, true, nil)
}
// eqtypeIgnoreTags is like eqtype but it ignores struct tags for struct identity.
func eqtypeIgnoreTags(t1, t2 *Type) bool {
func eqtypeIgnoreTags(t1, t2 *types.Type) bool {
return eqtype1(t1, t2, false, nil)
}
type typePair struct {
t1 *Type
t2 *Type
t1 *types.Type
t2 *types.Type
}
func eqtype1(t1, t2 *Type, cmpTags bool, assumedEqual map[typePair]struct{}) bool {
func eqtype1(t1, t2 *types.Type, cmpTags bool, assumedEqual map[typePair]struct{}) bool {
if t1 == t2 {
return true
}
@ -639,9 +598,9 @@ func eqtype1(t1, t2 *Type, cmpTags bool, assumedEqual map[typePair]struct{}) boo
// separate for error messages. Treat them as equal.
switch t1.Etype {
case TUINT8:
return (t1 == Types[TUINT8] || t1 == bytetype) && (t2 == Types[TUINT8] || t2 == bytetype)
return (t1 == types.Types[TUINT8] || t1 == types.Bytetype) && (t2 == types.Types[TUINT8] || t2 == types.Bytetype)
case TINT32:
return (t1 == Types[TINT32] || t1 == runetype) && (t2 == Types[TINT32] || t2 == runetype)
return (t1 == types.Types[TINT32] || t1 == types.Runetype) && (t2 == types.Types[TINT32] || t2 == types.Runetype)
default:
return false
}
@ -686,7 +645,7 @@ func eqtype1(t1, t2 *Type, cmpTags bool, assumedEqual map[typePair]struct{}) boo
// Check parameters and result parameters for type equality.
// We intentionally ignore receiver parameters for type
// equality, because they're never relevant.
for _, f := range paramsResults {
for _, f := range types.ParamsResults {
// Loop over fields in structs, ignoring argument names.
fs1, fs2 := f(t1).FieldSlice(), f(t2).FieldSlice()
if len(fs1) != len(fs2) {
@ -724,7 +683,7 @@ func eqtype1(t1, t2 *Type, cmpTags bool, assumedEqual map[typePair]struct{}) boo
// Are t1 and t2 equal struct types when field names are ignored?
// For deciding whether the result struct from g can be copied
// directly when compiling f(g()).
func eqtypenoname(t1 *Type, t2 *Type) bool {
func eqtypenoname(t1 *types.Type, t2 *types.Type) bool {
if t1 == nil || t2 == nil || !t1.IsStruct() || !t2.IsStruct() {
return false
}
@ -744,7 +703,7 @@ func eqtypenoname(t1 *Type, t2 *Type) bool {
// Is type src assignment compatible to type dst?
// If so, return op code to use in conversion.
// If not, return 0.
func assignop(src *Type, dst *Type, why *string) Op {
func assignop(src *types.Type, dst *types.Type, why *string) Op {
if why != nil {
*why = ""
}
@ -791,7 +750,7 @@ func assignop(src *Type, dst *Type, why *string) Op {
// 3. dst is an interface type and src implements dst.
if dst.IsInterface() && src.Etype != TNIL {
var missing, have *Field
var missing, have *types.Field
var ptr int
if implements(src, dst, &missing, &have, &ptr) {
return OCONVIFACE
@ -831,7 +790,7 @@ func assignop(src *Type, dst *Type, why *string) Op {
}
if src.IsInterface() && dst.Etype != TBLANK {
var missing, have *Field
var missing, have *types.Field
var ptr int
if why != nil && implements(dst, src, &missing, &have, &ptr) {
*why = ": need type assertion"
@ -842,7 +801,7 @@ func assignop(src *Type, dst *Type, why *string) Op {
// 4. src is a bidirectional channel value, dst is a channel type,
// src and dst have identical element types, and
// either src or dst is not a named type.
if src.IsChan() && src.ChanDir() == Cboth && dst.IsChan() {
if src.IsChan() && src.ChanDir() == types.Cboth && dst.IsChan() {
if eqtype(src.Elem(), dst.Elem()) && (src.Sym == nil || dst.Sym == nil) {
return OCONVNOP
}
@ -875,7 +834,7 @@ func assignop(src *Type, dst *Type, why *string) Op {
// Can we convert a value of type src to a value of type dst?
// If so, return op code to use in conversion (maybe OCONVNOP).
// If not, return 0.
func convertop(src *Type, dst *Type, why *string) Op {
func convertop(src *types.Type, dst *types.Type, why *string) Op {
if why != nil {
*why = ""
}
@ -950,10 +909,10 @@ func convertop(src *Type, dst *Type, why *string) Op {
}
if src.IsSlice() && dst.IsString() {
if src.Elem().Etype == bytetype.Etype {
if src.Elem().Etype == types.Bytetype.Etype {
return OARRAYBYTESTR
}
if src.Elem().Etype == runetype.Etype {
if src.Elem().Etype == types.Runetype.Etype {
return OARRAYRUNESTR
}
}
@ -961,10 +920,10 @@ func convertop(src *Type, dst *Type, why *string) Op {
// 7. src is a string and dst is []byte or []rune.
// String to slice.
if src.IsString() && dst.IsSlice() {
if dst.Elem().Etype == bytetype.Etype {
if dst.Elem().Etype == types.Bytetype.Etype {
return OSTRARRAYBYTE
}
if dst.Elem().Etype == runetype.Etype {
if dst.Elem().Etype == types.Runetype.Etype {
return OSTRARRAYRUNE
}
}
@ -982,12 +941,12 @@ func convertop(src *Type, dst *Type, why *string) Op {
return 0
}
func assignconv(n *Node, t *Type, context string) *Node {
func assignconv(n *Node, t *types.Type, context string) *Node {
return assignconvfn(n, t, func() string { return context })
}
// Convert node n for assignment to type t.
func assignconvfn(n *Node, t *Type, context func() string) *Node {
func assignconvfn(n *Node, t *types.Type, context func() string) *Node {
if n == nil || n.Type == nil || n.Type.Broke() {
return n
}
@ -1007,10 +966,10 @@ func assignconvfn(n *Node, t *Type, context func() string) *Node {
// Convert ideal bool from comparison to plain bool
// if the next step is non-bool (like interface{}).
if n.Type == idealbool && !t.IsBoolean() {
if n.Type == types.Idealbool && !t.IsBoolean() {
if n.Op == ONAME || n.Op == OLITERAL {
r := nod(OCONVNOP, n, nil)
r.Type = Types[TBOOL]
r.Type = types.Types[TBOOL]
r.Typecheck = 1
r.SetImplicit(true)
n = r
@ -1132,11 +1091,11 @@ func syslook(name string) *Node {
if s == nil || s.Def == nil {
Fatalf("syslook: can't find runtime.%s", name)
}
return s.Def
return asNode(s.Def)
}
// typehash computes a hash value for type t to use in type switch statements.
func typehash(t *Type) uint32 {
func typehash(t *types.Type) uint32 {
p := t.LongString()
// Using MD5 is overkill, but reduces accidental collisions.
@ -1229,7 +1188,7 @@ out:
n.SetHasCall(b)
}
func badtype(op Op, tl *Type, tr *Type) {
func badtype(op Op, tl *types.Type, tr *types.Type) {
fmt_ := ""
if tl != nil {
fmt_ += fmt.Sprintf("\n\t%v", tl)
@ -1358,7 +1317,7 @@ func safeexpr(n *Node, init *Nodes) *Node {
return cheapexpr(n, init)
}
func copyexpr(n *Node, t *Type, init *Nodes) *Node {
func copyexpr(n *Node, t *types.Type, init *Nodes) *Node {
l := temp(t)
a := nod(OAS, l, n)
a = typecheck(a, Etop)
@ -1383,7 +1342,7 @@ func cheapexpr(n *Node, init *Nodes) *Node {
// A Dlist stores a pointer to a TFIELD Type embedded within
// a TSTRUCT or TINTER Type.
type Dlist struct {
field *Field
field *types.Field
}
// dotlist is used by adddot1 to record the path of embedded fields
@ -1394,7 +1353,7 @@ var dotlist = make([]Dlist, 10)
// lookdot0 returns the number of fields or methods named s associated
// with Type t. If exactly one exists, it will be returned in *save
// (if save is not nil).
func lookdot0(s *Sym, t *Type, save **Field, ignorecase bool) int {
func lookdot0(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) int {
u := t
if u.IsPtr() {
u = u.Elem()
@ -1433,13 +1392,13 @@ func lookdot0(s *Sym, t *Type, save **Field, ignorecase bool) int {
// in reverse order. If none exist, more will indicate whether t contains any
// embedded fields at depth d, so callers can decide whether to retry at
// a greater depth.
func adddot1(s *Sym, t *Type, d int, save **Field, ignorecase bool) (c int, more bool) {
func adddot1(s *types.Sym, t *types.Type, d int, save **types.Field, ignorecase bool) (c int, more bool) {
if t.Recur() {
return
}
t.SetRecur(true)
var u *Type
var u *types.Type
d--
if d < 0 {
// We've reached our target depth. If t has any fields/methods
@ -1487,7 +1446,7 @@ out:
// a selection expression x.f, where x is of type t and f is the symbol s.
// If no such path exists, dotpath returns nil.
// If there are multiple shortest paths to the same depth, ambig is true.
func dotpath(s *Sym, t *Type, save **Field, ignorecase bool) (path []Dlist, ambig bool) {
func dotpath(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) (path []Dlist, ambig bool) {
// The embedding of types within structs imposes a tree structure onto
// types: structs parent the types they embed, and types parent their
// fields or methods. Our goal here is to find the shortest path to
@ -1555,13 +1514,13 @@ func adddot(n *Node) *Node {
// with unique tasks and they return
// the actual methods.
type Symlink struct {
field *Field
field *types.Field
followptr bool
}
var slist []Symlink
func expand0(t *Type, followptr bool) {
func expand0(t *types.Type, followptr bool) {
u := t
if u.IsPtr() {
followptr = true
@ -1592,7 +1551,7 @@ func expand0(t *Type, followptr bool) {
}
}
func expand1(t *Type, top, followptr bool) {
func expand1(t *types.Type, top, followptr bool) {
if t.Recur() {
return
}
@ -1626,7 +1585,7 @@ out:
t.SetRecur(false)
}
func expandmeth(t *Type) {
func expandmeth(t *types.Type) {
if t == nil || t.AllMethods().Len() != 0 {
return
}
@ -1642,12 +1601,12 @@ func expandmeth(t *Type) {
expand1(t, true, false)
// check each method to be uniquely reachable
var ms []*Field
var ms []*types.Field
for i, sl := range slist {
slist[i].field = nil
sl.field.Sym.SetUniq(false)
var f *Field
var f *types.Field
if path, _ := dotpath(sl.field.Sym, t, &f, false); path == nil {
continue
}
@ -1675,7 +1634,7 @@ func expandmeth(t *Type) {
}
// Given funarg struct list, return list of ODCLFIELD Node fn args.
func structargs(tl *Type, mustname bool) []*Node {
func structargs(tl *types.Type, mustname bool) []*Node {
var args []*Node
gen := 0
for _, t := range tl.Fields().Slice() {
@ -1720,7 +1679,7 @@ func structargs(tl *Type, mustname bool) []*Node {
// rcvr - U
// method - M func (t T)(), a TFIELD type struct
// newnam - the eventual mangled name of this function
func genwrapper(rcvr *Type, method *Field, newnam *Sym, iface int) {
func genwrapper(rcvr *types.Type, method *types.Field, newnam *types.Sym, iface int) {
if false && Debug['r'] != 0 {
fmt.Printf("genwrapper rcvrtype=%v method=%v newnam=%v\n", rcvr, method, newnam)
}
@ -1743,7 +1702,7 @@ func genwrapper(rcvr *Type, method *Field, newnam *Sym, iface int) {
// that the interface call will pass in.
// Add a dummy padding argument after the
// receiver to make up the difference.
tpad := typArray(Types[TUINT8], int64(Widthptr)-rcvr.Width)
tpad := types.NewArray(types.Types[TUINT8], int64(Widthptr)-rcvr.Width)
pad := namedfield(".pad", tpad)
l = append(l, pad)
}
@ -1844,29 +1803,29 @@ func genwrapper(rcvr *Type, method *Field, newnam *Sym, iface int) {
funccompile(fn)
}
func hashmem(t *Type) *Node {
func hashmem(t *types.Type) *Node {
sym := Runtimepkg.Lookup("memhash")
n := newname(sym)
n.Class = PFUNC
tfn := nod(OTFUNC, nil, nil)
tfn.List.Append(anonfield(typPtr(t)))
tfn.List.Append(anonfield(Types[TUINTPTR]))
tfn.List.Append(anonfield(Types[TUINTPTR]))
tfn.Rlist.Append(anonfield(Types[TUINTPTR]))
tfn.List.Append(anonfield(types.NewPtr(t)))
tfn.List.Append(anonfield(types.Types[TUINTPTR]))
tfn.List.Append(anonfield(types.Types[TUINTPTR]))
tfn.Rlist.Append(anonfield(types.Types[TUINTPTR]))
tfn = typecheck(tfn, Etype)
n.Type = tfn.Type
return n
}
func ifacelookdot(s *Sym, t *Type, followptr *bool, ignorecase bool) *Field {
func ifacelookdot(s *types.Sym, t *types.Type, followptr *bool, ignorecase bool) *types.Field {
*followptr = false
if t == nil {
return nil
}
var m *Field
var m *types.Field
path, ambig := dotpath(s, t, &m, ignorecase)
if path == nil {
if ambig {
@ -1890,7 +1849,7 @@ func ifacelookdot(s *Sym, t *Type, followptr *bool, ignorecase bool) *Field {
return m
}
func implements(t, iface *Type, m, samename **Field, ptr *int) bool {
func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool {
t0 := t
if t == nil {
return false
@ -1997,7 +1956,7 @@ func (l Nodes) asblock() *Node {
return n
}
func ngotype(n *Node) *Sym {
func ngotype(n *Node) *types.Sym {
if n.Type != nil {
return typenamesym(n.Type)
}
@ -2031,18 +1990,18 @@ func pathtoprefix(s string) string {
return s
}
var pkgMap = make(map[string]*Pkg)
var pkgs []*Pkg
var pkgMap = make(map[string]*types.Pkg)
var pkgs []*types.Pkg
func mkpkg(path string) *Pkg {
func mkpkg(path string) *types.Pkg {
if p := pkgMap[path]; p != nil {
return p
}
p := new(Pkg)
p := new(types.Pkg)
p.Path = path
p.Prefix = pathtoprefix(path)
p.Syms = make(map[string]*Sym)
p.Syms = make(map[string]*types.Sym)
pkgMap[path] = p
pkgs = append(pkgs, p)
return p
@ -2128,7 +2087,7 @@ func checknil(x *Node, init *Nodes) {
// Can this type be stored directly in an interface word?
// Yes, if the representation is a single pointer.
func isdirectiface(t *Type) bool {
func isdirectiface(t *types.Type) bool {
switch t.Etype {
case TPTR32,
TPTR64,
@ -2153,7 +2112,7 @@ func isdirectiface(t *Type) bool {
// itabType loads the _type field from a runtime.itab struct.
func itabType(itab *Node) *Node {
typ := nodSym(ODOTPTR, itab, nil)
typ.Type = typPtr(Types[TUINT8])
typ.Type = types.NewPtr(types.Types[TUINT8])
typ.Typecheck = 1
typ.Xoffset = int64(Widthptr) // offset of _type in runtime.itab
typ.SetBounded(true) // guaranteed not to fault
@ -2163,14 +2122,14 @@ func itabType(itab *Node) *Node {
// ifaceData loads the data field from an interface.
// The concrete type must be known to have type t.
// It follows the pointer if !isdirectiface(t).
func ifaceData(n *Node, t *Type) *Node {
func ifaceData(n *Node, t *types.Type) *Node {
ptr := nodSym(OIDATA, n, nil)
if isdirectiface(t) {
ptr.Type = t
ptr.Typecheck = 1
return ptr
}
ptr.Type = typPtr(t)
ptr.Type = types.NewPtr(t)
ptr.SetBounded(true)
ptr.Typecheck = 1
ind := nod(OIND, ptr, nil)
@ -2178,16 +2137,3 @@ func ifaceData(n *Node, t *Type) *Node {
ind.Typecheck = 1
return ind
}
// iet returns 'T' if t is a concrete type,
// 'I' if t is an interface type, and 'E' if t is an empty interface type.
// It is used to build calls to the conv* and assert* runtime routines.
func (t *Type) iet() byte {
if t.IsEmptyInterface() {
return 'E'
}
if t.IsInterface() {
return 'I'
}
return 'T'
}

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"sort"
)
@ -58,7 +59,7 @@ func typecheckswitch(n *Node) {
var nilonly string
var top int
var t *Type
var t *types.Type
if n.Left != nil && n.Left.Op == OTYPESW {
// type switch
@ -76,7 +77,7 @@ func typecheckswitch(n *Node) {
n.Left = defaultlit(n.Left, nil)
t = n.Left.Type
} else {
t = Types[TBOOL]
t = types.Types[TBOOL]
}
if t != nil {
switch {
@ -84,10 +85,10 @@ func typecheckswitch(n *Node) {
yyerrorl(n.Pos, "cannot switch on %L", n.Left)
case t.IsSlice():
nilonly = "slice"
case t.IsArray() && !t.IsComparable():
case t.IsArray() && !IsComparable(t):
yyerrorl(n.Pos, "cannot switch on %L", n.Left)
case t.IsStruct():
if f := t.IncomparableField(); f != nil {
if f := IncomparableField(t); f != nil {
yyerrorl(n.Pos, "cannot switch on %L (struct containing %v cannot be compared)", n.Left, f.Type)
}
case t.Etype == TFUNC:
@ -137,13 +138,13 @@ func typecheckswitch(n *Node) {
}
case nilonly != "" && !isnil(n1):
yyerrorl(ncase.Pos, "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Left)
case t.IsInterface() && !n1.Type.IsInterface() && !n1.Type.IsComparable():
case t.IsInterface() && !n1.Type.IsInterface() && !IsComparable(n1.Type):
yyerrorl(ncase.Pos, "invalid case %L in switch (incomparable type)", n1)
}
// type switch
case Etype:
var missing, have *Field
var missing, have *types.Field
var ptr int
switch {
case n1.Op == OLITERAL && n1.Type.IsKind(TNIL):
@ -705,10 +706,10 @@ func (s *typeSwitch) walk(sw *Node) {
a = typecheck(a, Etop)
cas = append(cas, a)
s.okname = temp(Types[TBOOL])
s.okname = temp(types.Types[TBOOL])
s.okname = typecheck(s.okname, Erv)
s.hashname = temp(Types[TUINT32])
s.hashname = temp(types.Types[TUINT32])
s.hashname = typecheck(s.hashname, Erv)
// set up labels and jumps
@ -750,7 +751,7 @@ func (s *typeSwitch) walk(sw *Node) {
// Load hash from type or itab.
h := nodSym(ODOTPTR, itab, nil)
h.Type = Types[TUINT32]
h.Type = types.Types[TUINT32]
h.Typecheck = 1
if cond.Right.Type.IsEmptyInterface() {
h.Xoffset = int64(2 * Widthptr) // offset of hash in runtime._type

View file

@ -8,6 +8,7 @@ package gc
import (
"cmd/compile/internal/syntax"
"cmd/compile/internal/types"
"cmd/internal/src"
)
@ -26,7 +27,7 @@ type Node struct {
Rlist Nodes
// most nodes
Type *Type
Type *types.Type
Orig *Node // original form, for printing, and tracking copies of ONAMEs
// func
@ -35,7 +36,7 @@ type Node struct {
// ONAME, OTYPE, OPACK, OLABEL, some OLITERAL
Name *Name
Sym *Sym // various
Sym *types.Sym // various
E interface{} // Opt or Val, see methods below
// Various. Usually an offset into a struct. For example:
@ -54,7 +55,7 @@ type Node struct {
Esc uint16 // EscXXX
Op Op
Etype EType // op for OASOP, etype for OTYPE, exclam for export, 6g saved reg, ChanDir for OTCHAN, for OINDEXMAP 1=LHS,0=RHS
Etype types.EType // op for OASOP, etype for OTYPE, exclam for export, 6g saved reg, ChanDir for OTCHAN, for OINDEXMAP 1=LHS,0=RHS
Class Class // PPARAM, PAUTO, PEXTERN, etc
Embedded uint8 // ODCLFIELD embedded type
Walkdef uint8 // tracks state during typecheckdef; 2 == loop detected
@ -192,7 +193,7 @@ func (n *Node) mayBeShared() bool {
// Name holds Node fields used only by named nodes (ONAME, OTYPE, OPACK, OLABEL, some OLITERAL).
type Name struct {
Pack *Node // real package for import . names
Pkg *Pkg // pkg for OPACK nodes
Pkg *types.Pkg // pkg for OPACK nodes
Defn *Node // initializing assignment
Curfn *Node // function for local variables
Param *Param // additional fields for ONAME, OTYPE
@ -234,7 +235,7 @@ type Param struct {
Stackcopy *Node // the PPARAM/PPARAMOUT on-stack slot (moved func params only)
// ONAME PPARAM
Field *Field // TFIELD in arg struct
Field *types.Field // TFIELD in arg struct
// ONAME closure linkage
// Consider:
@ -317,7 +318,7 @@ type Param struct {
// Func holds Node fields used only with function-like nodes.
type Func struct {
Shortname *Sym
Shortname *types.Sym
Enter Nodes // for example, allocate and initialize memory for escaping parameters
Exit Nodes
Cvars Nodes // closure params
@ -325,7 +326,7 @@ type Func struct {
Inldcl Nodes // copy of dcl for use in inlining
Closgen int
Outerfunc *Node // outer function (for closure)
FieldTrack map[*Sym]struct{}
FieldTrack map[*types.Sym]struct{}
Ntype *Node // signature
Top int // top context (Ecall, Eproc, etc)
Closure *Node // OCLOSURE <-> ODCLFUNC

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
"fmt"
@ -32,7 +33,7 @@ var typecheckdefstack []*Node
// resolve ONONAME to definition, if any.
func resolve(n *Node) *Node {
if n != nil && n.Op == ONONAME && n.Sym != nil {
r := n.Sym.Def
r := asNode(n.Sym.Def)
if r != nil {
if r.Op != OIOTA {
n = r
@ -86,7 +87,7 @@ var _typekind = []string{
TIDEAL: "untyped number",
}
func typekind(t *Type) string {
func typekind(t *types.Type) string {
if t.IsSlice() {
return "slice"
}
@ -253,7 +254,7 @@ func indexlit(n *Node) *Node {
}
switch consttype(n) {
case CTINT, CTRUNE, CTFLT, CTCPLX:
n = defaultlit(n, Types[TINT])
n = defaultlit(n, types.Types[TINT])
}
n = defaultlit(n, nil)
@ -296,7 +297,7 @@ OpSwitch:
ok |= Erv
if n.Type == nil && n.Val().Ctype() == CTSTR {
n.Type = idealstring
n.Type = types.Idealstring
}
break OpSwitch
@ -351,9 +352,9 @@ OpSwitch:
return n
}
var t *Type
var t *types.Type
if n.Left == nil {
t = typSlice(r.Type)
t = types.NewSlice(r.Type)
} else if n.Left.Op == ODDD {
if top&Ecomplit == 0 {
if !n.Diag() {
@ -363,7 +364,7 @@ OpSwitch:
n.Type = nil
return n
}
t = typDDDArray(r.Type)
t = types.NewDDDArray(r.Type)
} else {
n.Left = indexlit(typecheck(n.Left, Erv))
l := n.Left
@ -378,7 +379,7 @@ OpSwitch:
}
v := l.Val()
if doesoverflow(v, Types[TINT]) {
if doesoverflow(v, types.Types[TINT]) {
yyerror("array bound is too large")
n.Type = nil
return n
@ -390,14 +391,14 @@ OpSwitch:
n.Type = nil
return n
}
t = typArray(r.Type, bound)
t = types.NewArray(r.Type, bound)
}
n.Op = OTYPE
n.Type = t
n.Left = nil
n.Right = nil
if !t.isDDDArray() {
if !t.IsDDDArray() {
checkwidth(t)
}
@ -418,7 +419,7 @@ OpSwitch:
yyerror("go:notinheap map value not allowed")
}
n.Op = OTYPE
n.Type = typMap(l.Type, r.Type)
n.Type = types.NewMap(l.Type, r.Type)
// map key validation
alg, bad := algtype1(l.Type)
@ -445,7 +446,7 @@ OpSwitch:
if l.Type.NotInHeap() {
yyerror("chan of go:notinheap type not allowed")
}
t := typChan(l.Type, ChanDir(n.Etype)) // TODO(marvin): Fix Node.EType type union.
t := types.NewChan(l.Type, types.ChanDir(n.Etype)) // TODO(marvin): Fix Node.EType type union.
n.Op = OTYPE
n.Type = t
n.Left = nil
@ -492,7 +493,7 @@ OpSwitch:
if l.Op == OTYPE {
ok |= Etype
n.Op = OTYPE
n.Type = typPtr(l.Type)
n.Type = types.NewPtr(l.Type)
n.Left = nil
break OpSwitch
}
@ -566,7 +567,7 @@ OpSwitch:
op = n.Op
}
if op == OLSH || op == ORSH {
r = defaultlit(r, Types[TUINT])
r = defaultlit(r, types.Types[TUINT])
n.Right = r
t := r.Type
if !t.IsInteger() || t.IsSigned() {
@ -618,7 +619,7 @@ OpSwitch:
if r.Type.Etype != TBLANK {
aop = assignop(l.Type, r.Type, nil)
if aop != 0 {
if r.Type.IsInterface() && !l.Type.IsInterface() && !l.Type.IsComparable() {
if r.Type.IsInterface() && !l.Type.IsInterface() && !IsComparable(l.Type) {
yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(l.Type))
n.Type = nil
return n
@ -640,7 +641,7 @@ OpSwitch:
if l.Type.Etype != TBLANK {
aop = assignop(r.Type, l.Type, nil)
if aop != 0 {
if l.Type.IsInterface() && !r.Type.IsInterface() && !r.Type.IsComparable() {
if l.Type.IsInterface() && !r.Type.IsInterface() && !IsComparable(r.Type) {
yyerror("invalid operation: %v (operator %v not defined on %s)", n, op, typekind(r.Type))
n.Type = nil
return n
@ -679,7 +680,7 @@ OpSwitch:
// okfor allows any array == array, map == map, func == func.
// restrict to slice/map/func == nil and nil == slice/map/func.
if l.Type.IsArray() && !l.Type.IsComparable() {
if l.Type.IsArray() && !IsComparable(l.Type) {
yyerror("invalid operation: %v (%v cannot be compared)", n, l.Type)
n.Type = nil
return n
@ -704,7 +705,7 @@ OpSwitch:
}
if l.Type.IsStruct() {
if f := l.Type.IncomparableField(); f != nil {
if f := IncomparableField(l.Type); f != nil {
yyerror("invalid operation: %v (struct containing %v cannot be compared)", n, f.Type)
n.Type = nil
return n
@ -714,7 +715,7 @@ OpSwitch:
t = l.Type
if iscmp[n.Op] {
evconst(n)
t = idealbool
t = types.Idealbool
if n.Op != OLITERAL {
l, r = defaultlit2(l, r, true)
n.Left = l
@ -725,7 +726,7 @@ OpSwitch:
if et == TSTRING {
if iscmp[n.Op] {
// TODO(marvin): Fix Node.EType type union.
n.Etype = EType(n.Op)
n.Etype = types.EType(n.Op)
n.Op = OCMPSTR
} else if n.Op == OADD {
// create OADDSTR node with list of strings in x + y + z + (w + v) + ...
@ -756,7 +757,7 @@ OpSwitch:
} else // leave alone for back end
if r.Type.IsInterface() == l.Type.IsInterface() {
// TODO(marvin): Fix Node.EType type union.
n.Etype = EType(n.Op)
n.Etype = types.EType(n.Op)
n.Op = OCMPIFACE
}
}
@ -823,7 +824,7 @@ OpSwitch:
n.Type = nil
return n
}
n.Type = typPtr(t)
n.Type = types.NewPtr(t)
break OpSwitch
case OCOMPLIT:
@ -968,7 +969,7 @@ OpSwitch:
}
if n.Type != nil && !n.Type.IsInterface() {
var missing, have *Field
var missing, have *types.Field
var ptr int
if !implements(n.Type, t, &missing, &have, &ptr) {
if have != nil && have.Sym == missing.Sym {
@ -1011,7 +1012,7 @@ OpSwitch:
case TSTRING, TARRAY, TSLICE:
n.Right = indexlit(n.Right)
if t.IsString() {
n.Type = bytetype
n.Type = types.Bytetype
} else {
n.Type = t.Elem()
}
@ -1146,7 +1147,7 @@ OpSwitch:
l = n.Left
}
t := l.Type
var tp *Type
var tp *types.Type
if t.IsString() {
if hasmax {
yyerror("invalid operation %v (3-index slice of string)", n)
@ -1157,7 +1158,7 @@ OpSwitch:
n.Op = OSLICESTR
} else if t.IsPtr() && t.Elem().IsArray() {
tp = t.Elem()
n.Type = typSlice(tp.Elem())
n.Type = types.NewSlice(tp.Elem())
dowidth(n.Type)
if hasmax {
n.Op = OSLICE3ARR
@ -1217,7 +1218,7 @@ OpSwitch:
n.Left = defaultlit(n.Left, nil)
l = n.Left
if l.Op == OTYPE {
if n.Isddd() || l.Type.isDDDArray() {
if n.Isddd() || l.Type.IsDDDArray() {
if !l.Type.Broke() {
yyerror("invalid use of ... in type conversion to %v", l.Type)
}
@ -1287,7 +1288,7 @@ OpSwitch:
if t.Results().NumFields() == 1 {
n.Type = l.Type.Results().Field(0).Type
if n.Op == OCALLFUNC && n.Left.Op == ONAME && n.Left.Sym.Pkg.isRuntime() && n.Left.Sym.Name == "getg" {
if n.Op == OCALLFUNC && n.Left.Op == ONAME && isRuntimePkg(n.Left.Sym.Pkg) && n.Left.Sym.Name == "getg" {
// Emit code for runtime.getg() directly instead of calling function.
// Most such rewrites (for example the similar one for math.Sqrt) should be done in walk,
// so that the ordering pass can make sure to preserve the semantics of the original code
@ -1319,7 +1320,7 @@ OpSwitch:
// any side effects disappear; ignore init
var r Node
nodconst(&r, Types[TUINTPTR], evalunsafe(n))
nodconst(&r, types.Types[TUINTPTR], evalunsafe(n))
r.Orig = n
n = &r
@ -1365,7 +1366,7 @@ OpSwitch:
n.Orig = r
}
n.Type = Types[cplxsubtype(t.Etype)]
n.Type = types.Types[cplxsubtype(t.Etype)]
break OpSwitch
}
@ -1374,7 +1375,7 @@ OpSwitch:
case TSTRING:
if Isconst(l, CTSTR) {
var r Node
nodconst(&r, Types[TINT], int64(len(l.Val().U.(string))))
nodconst(&r, types.Types[TINT], int64(len(l.Val().U.(string))))
r.Orig = n
n = &r
}
@ -1384,12 +1385,12 @@ OpSwitch:
break
}
var r Node
nodconst(&r, Types[TINT], t.NumElem())
nodconst(&r, types.Types[TINT], t.NumElem())
r.Orig = n
n = &r
}
n.Type = Types[TINT]
n.Type = types.Types[TINT]
break OpSwitch
badcall1:
@ -1421,8 +1422,8 @@ OpSwitch:
}
t = n.List.First().Type
l = t.Field(0).Nname
r = t.Field(1).Nname
l = asNode(t.Field(0).Nname)
r = asNode(t.Field(1).Nname)
} else {
if !twoarg(n) {
n.Type = nil
@ -1451,7 +1452,7 @@ OpSwitch:
return n
}
var t *Type
var t *types.Type
switch l.Type.Etype {
default:
yyerror("invalid operation: %v (arguments have type %v, expected floating-point)", n, l.Type)
@ -1459,13 +1460,13 @@ OpSwitch:
return n
case TIDEAL:
t = Types[TIDEAL]
t = types.Types[TIDEAL]
case TFLOAT32:
t = Types[TCOMPLEX64]
t = types.Types[TCOMPLEX64]
case TFLOAT64:
t = Types[TCOMPLEX128]
t = types.Types[TCOMPLEX128]
}
if l.Op == OLITERAL && r.Op == OLITERAL {
@ -1562,7 +1563,7 @@ OpSwitch:
}
// Unpack multiple-return result before type-checking.
var funarg *Type
var funarg *types.Type
if t.IsFuncArgStruct() {
funarg = t
t = t.Field(0).Type
@ -1595,7 +1596,7 @@ OpSwitch:
}
if t.Elem().IsKind(TUINT8) && args.Second().Type.IsString() {
args.SetSecond(defaultlit(args.Second(), Types[TSTRING]))
args.SetSecond(defaultlit(args.Second(), types.Types[TSTRING]))
break OpSwitch
}
@ -1639,7 +1640,7 @@ OpSwitch:
n.Left = args.First()
n.Right = args.Second()
n.List.Set(nil)
n.Type = Types[TINT]
n.Type = types.Types[TINT]
n.Left = typecheck(n.Left, Erv)
n.Right = typecheck(n.Right, Erv)
if n.Left.Type == nil || n.Right.Type == nil {
@ -1655,7 +1656,7 @@ OpSwitch:
// copy([]byte, string)
if n.Left.Type.IsSlice() && n.Right.Type.IsString() {
if eqtype(n.Left.Type.Elem(), bytetype) {
if eqtype(n.Left.Type.Elem(), types.Bytetype) {
break OpSwitch
}
yyerror("arguments to copy have different element types: %L and string", n.Left.Type)
@ -1800,7 +1801,7 @@ OpSwitch:
l = args[i]
i++
l = typecheck(l, Erv)
l = defaultlit(l, Types[TINT])
l = defaultlit(l, types.Types[TINT])
if l.Type == nil {
n.Type = nil
return n
@ -1821,7 +1822,7 @@ OpSwitch:
l = args[i]
i++
l = typecheck(l, Erv)
l = defaultlit(l, Types[TINT])
l = defaultlit(l, types.Types[TINT])
if l.Type == nil {
n.Type = nil
return n
@ -1870,7 +1871,7 @@ OpSwitch:
}
n.Left = l
n.Type = typPtr(t)
n.Type = types.NewPtr(t)
break OpSwitch
case OPRINT, OPRINTN:
@ -1880,7 +1881,7 @@ OpSwitch:
for i1, n1 := range ls {
// Special case for print: int constant is int64, not int.
if Isconst(n1, CTINT) {
ls[i1] = defaultlit(ls[i1], Types[TINT64])
ls[i1] = defaultlit(ls[i1], types.Types[TINT64])
} else {
ls[i1] = defaultlit(ls[i1], nil)
}
@ -1895,7 +1896,7 @@ OpSwitch:
return n
}
n.Left = typecheck(n.Left, Erv)
n.Left = defaultlit(n.Left, Types[TINTER])
n.Left = defaultlit(n.Left, types.Types[TINTER])
if n.Left.Type == nil {
n.Type = nil
return n
@ -1910,7 +1911,7 @@ OpSwitch:
return n
}
n.Type = Types[TINTER]
n.Type = types.Types[TINTER]
break OpSwitch
case OCLOSURE:
@ -1932,7 +1933,7 @@ OpSwitch:
if !t.IsInterface() {
Fatalf("OITAB of %v", t)
}
n.Type = typPtr(Types[TUINTPTR])
n.Type = types.NewPtr(types.Types[TUINTPTR])
break OpSwitch
case OIDATA:
@ -1953,9 +1954,9 @@ OpSwitch:
Fatalf("OSPTR of %v", t)
}
if t.IsString() {
n.Type = typPtr(Types[TUINT8])
n.Type = types.NewPtr(types.Types[TUINT8])
} else {
n.Type = typPtr(t.Elem())
n.Type = types.NewPtr(t.Elem())
}
break OpSwitch
@ -1966,7 +1967,7 @@ OpSwitch:
case OCFUNC:
ok |= Erv
n.Left = typecheck(n.Left, Erv)
n.Type = Types[TUINTPTR]
n.Type = types.Types[TUINTPTR]
break OpSwitch
case OCONVNOP:
@ -2183,7 +2184,7 @@ OpSwitch:
return n
}
func checksliceindex(l *Node, r *Node, tp *Type) bool {
func checksliceindex(l *Node, r *Node, tp *types.Type) bool {
t := r.Type
if t == nil {
return false
@ -2345,8 +2346,8 @@ func twoarg(n *Node) bool {
return true
}
func lookdot1(errnode *Node, s *Sym, t *Type, fs *Fields, dostrcmp int) *Field {
var r *Field
func lookdot1(errnode *Node, s *types.Sym, t *types.Type, fs *types.Fields, dostrcmp int) *types.Field {
var r *types.Field
for _, f := range fs.Slice() {
if dostrcmp != 0 && f.Sym.Name == s.Name {
return f
@ -2374,7 +2375,7 @@ func lookdot1(errnode *Node, s *Sym, t *Type, fs *Fields, dostrcmp int) *Field {
return r
}
func looktypedot(n *Node, t *Type, dostrcmp int) bool {
func looktypedot(n *Node, t *types.Type, dostrcmp int) bool {
s := n.Sym
if t.IsInterface() {
@ -2416,32 +2417,32 @@ func looktypedot(n *Node, t *Type, dostrcmp int) bool {
return true
}
func derefall(t *Type) *Type {
for t != nil && t.Etype == Tptr {
func derefall(t *types.Type) *types.Type {
for t != nil && t.Etype == types.Tptr {
t = t.Elem()
}
return t
}
type typeSym struct {
t *Type
s *Sym
type typeSymKey struct {
t *types.Type
s *types.Sym
}
// dotField maps (*Type, *Sym) pairs to the corresponding struct field (*Type with Etype==TFIELD).
// dotField maps (*types.Type, *types.Sym) pairs to the corresponding struct field (*types.Type with Etype==TFIELD).
// It is a cache for use during usefield in walk.go, only enabled when field tracking.
var dotField = map[typeSym]*Field{}
var dotField = map[typeSymKey]*types.Field{}
func lookdot(n *Node, t *Type, dostrcmp int) *Field {
func lookdot(n *Node, t *types.Type, dostrcmp int) *types.Field {
s := n.Sym
dowidth(t)
var f1 *Field
var f1 *types.Field
if t.IsStruct() || t.IsInterface() {
f1 = lookdot1(n, s, t, t.Fields(), dostrcmp)
}
var f2 *Field
var f2 *types.Field
if n.Left.Type == t || n.Left.Type.Sym == nil {
mt := methtype(t)
if mt != nil {
@ -2465,7 +2466,7 @@ func lookdot(n *Node, t *Type, dostrcmp int) *Field {
n.Xoffset = f1.Offset
n.Type = f1.Type
if obj.Fieldtrack_enabled > 0 {
dotField[typeSym{t.Orig, s}] = f1
dotField[typeSymKey{t.Orig, s}] = f1
}
if t.IsInterface() {
if n.Left.Type.IsPtr() {
@ -2489,20 +2490,20 @@ func lookdot(n *Node, t *Type, dostrcmp int) *Field {
dowidth(tt)
rcvr := f2.Type.Recv().Type
if !eqtype(rcvr, tt) {
if rcvr.Etype == Tptr && eqtype(rcvr.Elem(), tt) {
if rcvr.Etype == types.Tptr && eqtype(rcvr.Elem(), tt) {
checklvalue(n.Left, "call pointer method on")
n.Left = nod(OADDR, n.Left, nil)
n.Left.SetImplicit(true)
n.Left = typecheck(n.Left, Etype|Erv)
} else if tt.Etype == Tptr && rcvr.Etype != Tptr && eqtype(tt.Elem(), rcvr) {
} else if tt.Etype == types.Tptr && rcvr.Etype != types.Tptr && eqtype(tt.Elem(), rcvr) {
n.Left = nod(OIND, n.Left, nil)
n.Left.SetImplicit(true)
n.Left = typecheck(n.Left, Etype|Erv)
} else if tt.Etype == Tptr && tt.Elem().Etype == Tptr && eqtype(derefall(tt), derefall(rcvr)) {
} else if tt.Etype == types.Tptr && tt.Elem().Etype == types.Tptr && eqtype(derefall(tt), derefall(rcvr)) {
yyerror("calling method %v with receiver %L requires explicit dereference", n.Sym, n.Left)
for tt.Etype == Tptr {
for tt.Etype == types.Tptr {
// Stop one level early for method with pointer receiver.
if rcvr.Etype == Tptr && tt.Elem().Etype != Tptr {
if rcvr.Etype == types.Tptr && tt.Elem().Etype != types.Tptr {
break
}
n.Left = nod(OIND, n.Left, nil)
@ -2521,7 +2522,7 @@ func lookdot(n *Node, t *Type, dostrcmp int) *Field {
pll = ll
ll = ll.Left
}
if pll.Implicit() && ll.Type.IsPtr() && ll.Type.Sym != nil && ll.Type.Sym.Def != nil && ll.Type.Sym.Def.Op == OTYPE {
if pll.Implicit() && ll.Type.IsPtr() && ll.Type.Sym != nil && asNode(ll.Type.Sym.Def) != nil && asNode(ll.Type.Sym.Def).Op == OTYPE {
// It is invalid to automatically dereference a named pointer type when selecting a method.
// Make n->left == ll to clarify error message.
n.Left = ll
@ -2550,7 +2551,7 @@ func nokeys(l Nodes) bool {
return true
}
func hasddd(t *Type) bool {
func hasddd(t *types.Type) bool {
for _, tl := range t.Fields().Slice() {
if tl.Isddd() {
return true
@ -2561,8 +2562,8 @@ func hasddd(t *Type) bool {
}
// typecheck assignment: type list = expression list
func typecheckaste(op Op, call *Node, isddd bool, tstruct *Type, nl Nodes, desc func() string) {
var t *Type
func typecheckaste(op Op, call *Node, isddd bool, tstruct *types.Type, nl Nodes, desc func() string) {
var t *types.Type
var n *Node
var n1 int
var n2 int
@ -2740,7 +2741,7 @@ toomany:
goto out
}
func errorDetails(nl Nodes, tstruct *Type, isddd bool) string {
func errorDetails(nl Nodes, tstruct *types.Type, isddd bool) string {
// If we don't know any type at a call site, let's suppress any return
// message signatures. See Issue https://golang.org/issues/19012.
if tstruct == nil {
@ -2758,20 +2759,20 @@ func errorDetails(nl Nodes, tstruct *Type, isddd bool) string {
// sigrepr is a type's representation to the outside world,
// in string representations of return signatures
// e.g in error messages about wrong arguments to return.
func (t *Type) sigrepr() string {
func sigrepr(t *types.Type) string {
switch t {
default:
return t.String()
case Types[TIDEAL]:
case types.Types[TIDEAL]:
// "untyped number" is not commonly used
// outside of the compiler, so let's use "number".
return "number"
case idealstring:
case types.Idealstring:
return "string"
case idealbool:
case types.Idealbool:
return "bool"
}
}
@ -2786,11 +2787,11 @@ func (nl Nodes) retsigerr(isddd bool) string {
var typeStrings []string
if nl.Len() == 1 && nl.First().Type != nil && nl.First().Type.IsFuncArgStruct() {
for _, f := range nl.First().Type.Fields().Slice() {
typeStrings = append(typeStrings, f.Type.sigrepr())
typeStrings = append(typeStrings, sigrepr(f.Type))
}
} else {
for _, n := range nl.Slice() {
typeStrings = append(typeStrings, n.Type.sigrepr())
typeStrings = append(typeStrings, sigrepr(n.Type))
}
}
@ -2870,7 +2871,7 @@ func keydup(n *Node, hash map[uint32][]*Node) {
// iscomptype reports whether type t is a composite literal type
// or a pointer to one.
func iscomptype(t *Type) bool {
func iscomptype(t *types.Type) bool {
if t.IsPtr() {
t = t.Elem()
}
@ -2883,7 +2884,7 @@ func iscomptype(t *Type) bool {
}
}
func pushtype(n *Node, t *Type) {
func pushtype(n *Node, t *types.Type) {
if n == nil || n.Op != OCOMPLIT || !iscomptype(t) {
return
}
@ -2966,7 +2967,7 @@ func typecheckcomplit(n *Node) *Node {
}
var length, i int64
checkBounds := t.IsArray() && !t.isDDDArray()
checkBounds := t.IsArray() && !t.IsDDDArray()
nl := n.List.Slice()
for i2, l := range nl {
setlineno(l)
@ -3008,7 +3009,7 @@ func typecheckcomplit(n *Node) *Node {
}
}
if t.isDDDArray() {
if t.IsDDDArray() {
t.SetNumElem(length)
}
if t.IsSlice() {
@ -3311,7 +3312,7 @@ func typecheckas(n *Node) {
}
}
func checkassignto(src *Type, dst *Node) {
func checkassignto(src *types.Type, dst *Node) {
var why string
if assignop(src, dst.Type, &why) == 0 {
@ -3419,10 +3420,10 @@ func typecheckas2(n *Node) {
}
l := n.List.Second()
if l.Type != nil && !l.Type.IsBoolean() {
checkassignto(Types[TBOOL], l)
checkassignto(types.Types[TBOOL], l)
}
if l.Name != nil && l.Name.Defn == n && l.Name.Param.Ntype == nil {
l.Type = Types[TBOOL]
l.Type = types.Types[TBOOL]
}
goto out
}
@ -3456,7 +3457,7 @@ func typecheckfunc(n *Node) {
return
}
n.Type = t
t.SetNname(n.Func.Nname)
t.FuncType().Nname = asTypesNode(n.Func.Nname)
rcvr := t.Recv()
if rcvr != nil && n.Func.Shortname != nil {
n.Func.Nname.Sym = methodname(n.Func.Shortname, rcvr.Type)
@ -3504,13 +3505,13 @@ var ntypecheckdeftype int
var methodqueue []*Node
func domethod(n *Node) {
nt := n.Type.Nname()
nt := asNode(n.Type.FuncType().Nname)
nt = typecheck(nt, Etype)
if nt.Type == nil {
// type check failed; leave empty func
// TODO(mdempsky): Fix Type rekinding.
n.Type.Etype = TFUNC
n.Type.nod = nil
n.Type.Nod = nil
return
}
@ -3530,7 +3531,7 @@ func domethod(n *Node) {
// TODO(mdempsky): Fix Type rekinding.
*n.Type = *nt.Type
n.Type.nod = nil
n.Type.Nod = nil
checkwidth(n.Type)
}
@ -3542,18 +3543,18 @@ type mapqueueval struct {
// tracks the line numbers at which forward types are first used as map keys
var mapqueue []mapqueueval
func copytype(n *Node, t *Type) {
func copytype(n *Node, t *types.Type) {
if t.Etype == TFORW {
// This type isn't computed yet; when it is, update n.
t.ForwardType().Copyto = append(t.ForwardType().Copyto, n)
t.ForwardType().Copyto = append(t.ForwardType().Copyto, asTypesNode(n))
return
}
embedlineno := n.Type.ForwardType().Embedlineno
l := n.Type.ForwardType().Copyto
ptrTo := n.Type.ptrTo
sliceOf := n.Type.sliceOf
ptrBase := n.Type.PtrBase
sliceOf := n.Type.SliceOf
// TODO(mdempsky): Fix Type rekinding.
*n.Type = *t
@ -3569,14 +3570,14 @@ func copytype(n *Node, t *Type) {
// to the existing type, but the method set of an interface
// type [...] remains unchanged."
if !t.IsInterface() {
t.methods = Fields{}
t.allMethods = Fields{}
*t.Methods() = types.Fields{}
*t.AllMethods() = types.Fields{}
}
t.nod = n
t.Nod = asTypesNode(n)
t.SetDeferwidth(false)
t.ptrTo = ptrTo
t.sliceOf = sliceOf
t.PtrBase = ptrBase
t.SliceOf = sliceOf
// Propagate go:notinheap pragma from the Name to the Type.
if n.Name != nil && n.Name.Param != nil && n.Name.Param.Pragma&NotInHeap != 0 {
@ -3585,7 +3586,7 @@ func copytype(n *Node, t *Type) {
// Update nodes waiting on this type.
for _, n := range l {
copytype(n, t)
copytype(asNode(n), t)
}
// Double-check use of type as embedded type.
@ -3643,7 +3644,7 @@ ret:
}
for _, e := range mapqueue {
lineno = e.lno
if !e.n.Type.IsComparable() {
if !IsComparable(e.n.Type) {
yyerror("invalid map key type %v", e.n.Type)
}
}
@ -3811,7 +3812,7 @@ func typecheckdef(n *Node) *Node {
n.SetDiag(true)
goto ret
}
n.Sym.Def = p.Ntype
n.Sym.Def = asTypesNode(p.Ntype)
}
break
}
@ -3821,8 +3822,8 @@ func typecheckdef(n *Node) *Node {
defercheckwidth()
}
n.Walkdef = 1
n.Type = typ(TFORW)
n.Type.nod = n
n.Type = types.New(TFORW)
n.Type.Nod = asTypesNode(n)
n.Type.Sym = n.Sym // TODO(gri) this also happens in typecheckdeftype(n) - where should it happen?
nerrors0 := nerrors
typecheckdeftype(n)
@ -3852,7 +3853,7 @@ ret:
return n
}
func checkmake(t *Type, arg string, n *Node) bool {
func checkmake(t *types.Type, arg string, n *Node) bool {
if !n.Type.IsInteger() && n.Type.Etype != TIDEAL {
yyerror("non-integer %s argument in make(%v) - %v", arg, t, n.Type)
return false
@ -3874,7 +3875,7 @@ func checkmake(t *Type, arg string, n *Node) bool {
}
// defaultlit is necessary for non-constants too: n might be 1.1<<k.
n = defaultlit(n, Types[TINT])
n = defaultlit(n, types.Types[TINT])
return true
}
@ -3891,7 +3892,7 @@ func markbreak(n *Node, implicit *Node) {
implicit.SetHasBreak(true)
}
} else {
lab := n.Left.Sym.Label
lab := asNode(n.Left.Sym.Label)
if lab != nil {
lab.SetHasBreak(true)
}
@ -3925,7 +3926,7 @@ func markbreaklist(l Nodes, implicit *Node) {
if n.Op == OLABEL && i+1 < len(s) && n.Name.Defn == s[i+1] {
switch n.Name.Defn.Op {
case OFOR, OFORUNTIL, OSWITCH, OTYPESW, OSELECT, ORANGE:
n.Left.Sym.Label = n.Name.Defn
n.Left.Sym.Label = asTypesNode(n.Name.Defn)
markbreak(n.Name.Defn, n.Name.Defn)
n.Left.Sym.Label = nil
i++

View file

@ -0,0 +1,249 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
import (
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"fmt"
)
// convenience constants
const (
Txxx = types.Txxx
TINT8 = types.TINT8
TUINT8 = types.TUINT8
TINT16 = types.TINT16
TUINT16 = types.TUINT16
TINT32 = types.TINT32
TUINT32 = types.TUINT32
TINT64 = types.TINT64
TUINT64 = types.TUINT64
TINT = types.TINT
TUINT = types.TUINT
TUINTPTR = types.TUINTPTR
TCOMPLEX64 = types.TCOMPLEX64
TCOMPLEX128 = types.TCOMPLEX128
TFLOAT32 = types.TFLOAT32
TFLOAT64 = types.TFLOAT64
TBOOL = types.TBOOL
TPTR32 = types.TPTR32
TPTR64 = types.TPTR64
TFUNC = types.TFUNC
TSLICE = types.TSLICE
TARRAY = types.TARRAY
TSTRUCT = types.TSTRUCT
TCHAN = types.TCHAN
TMAP = types.TMAP
TINTER = types.TINTER
TFORW = types.TFORW
TANY = types.TANY
TSTRING = types.TSTRING
TUNSAFEPTR = types.TUNSAFEPTR
// pseudo-types for literals
TIDEAL = types.TIDEAL
TNIL = types.TNIL
TBLANK = types.TBLANK
// pseudo-types for frame layout
TFUNCARGS = types.TFUNCARGS
TCHANARGS = types.TCHANARGS
// pseudo-types for import/export
TDDDFIELD = types.TDDDFIELD // wrapper: contained type is a ... field
NTYPE = types.NTYPE
)
func cmpForNe(x bool) ssa.Cmp {
if x {
return ssa.CMPlt
}
return ssa.CMPgt
}
func cmpsym(r, s *types.Sym) ssa.Cmp {
if r == s {
return ssa.CMPeq
}
if r == nil {
return ssa.CMPlt
}
if s == nil {
return ssa.CMPgt
}
// Fast sort, not pretty sort
if len(r.Name) != len(s.Name) {
return cmpForNe(len(r.Name) < len(s.Name))
}
if r.Pkg != s.Pkg {
if len(r.Pkg.Prefix) != len(s.Pkg.Prefix) {
return cmpForNe(len(r.Pkg.Prefix) < len(s.Pkg.Prefix))
}
if r.Pkg.Prefix != s.Pkg.Prefix {
return cmpForNe(r.Pkg.Prefix < s.Pkg.Prefix)
}
}
if r.Name != s.Name {
return cmpForNe(r.Name < s.Name)
}
return ssa.CMPeq
}
// cmptyp compares two *Types t and x, returning ssa.CMPlt,
// ssa.CMPeq, ssa.CMPgt as t<x, t==x, t>x, for an arbitrary
// and optimizer-centric notion of comparison.
func cmptyp(t, x *types.Type) ssa.Cmp {
// This follows the structure of eqtype in subr.go
// with two exceptions.
// 1. Symbols are compared more carefully because a <,=,> result is desired.
// 2. Maps are treated specially to avoid endless recursion -- maps
// contain an internal data type not expressible in Go source code.
if t == x {
return ssa.CMPeq
}
if t == nil {
return ssa.CMPlt
}
if x == nil {
return ssa.CMPgt
}
if t.Etype != x.Etype {
return cmpForNe(t.Etype < x.Etype)
}
if t.Sym != nil || x.Sym != nil {
// Special case: we keep byte and uint8 separate
// for error messages. Treat them as equal.
switch t.Etype {
case TUINT8:
if (t == types.Types[TUINT8] || t == types.Bytetype) && (x == types.Types[TUINT8] || x == types.Bytetype) {
return ssa.CMPeq
}
case TINT32:
if (t == types.Types[types.Runetype.Etype] || t == types.Runetype) && (x == types.Types[types.Runetype.Etype] || x == types.Runetype) {
return ssa.CMPeq
}
}
}
if c := cmpsym(t.Sym, x.Sym); c != ssa.CMPeq {
return c
}
if x.Sym != nil {
// Syms non-nil, if vargens match then equal.
if t.Vargen != x.Vargen {
return cmpForNe(t.Vargen < x.Vargen)
}
return ssa.CMPeq
}
// both syms nil, look at structure below.
switch t.Etype {
case TBOOL, TFLOAT32, TFLOAT64, TCOMPLEX64, TCOMPLEX128, TUNSAFEPTR, TUINTPTR,
TINT8, TINT16, TINT32, TINT64, TINT, TUINT8, TUINT16, TUINT32, TUINT64, TUINT:
return ssa.CMPeq
}
switch t.Etype {
case TMAP:
if c := cmptyp(t.Key(), x.Key()); c != ssa.CMPeq {
return c
}
return cmptyp(t.Val(), x.Val())
case TPTR32, TPTR64, TSLICE:
// No special cases for these, they are handled
// by the general code after the switch.
case TSTRUCT:
if t.StructType().Map == nil {
if x.StructType().Map != nil {
return ssa.CMPlt // nil < non-nil
}
// to the fallthrough
} else if x.StructType().Map == nil {
return ssa.CMPgt // nil > non-nil
} else if t.StructType().Map.MapType().Bucket == t {
// Both have non-nil Map
// Special case for Maps which include a recursive type where the recursion is not broken with a named type
if x.StructType().Map.MapType().Bucket != x {
return ssa.CMPlt // bucket maps are least
}
return cmptyp(t.StructType().Map, x.StructType().Map)
} else if x.StructType().Map.MapType().Bucket == x {
return ssa.CMPgt // bucket maps are least
} // If t != t.Map.Bucket, fall through to general case
fallthrough
case TINTER:
t1, ti := types.IterFields(t)
x1, xi := types.IterFields(x)
for ; t1 != nil && x1 != nil; t1, x1 = ti.Next(), xi.Next() {
if t1.Embedded != x1.Embedded {
return cmpForNe(t1.Embedded < x1.Embedded)
}
if t1.Note != x1.Note {
return cmpForNe(t1.Note < x1.Note)
}
if c := cmpsym(t1.Sym, x1.Sym); c != ssa.CMPeq {
return c
}
if c := cmptyp(t1.Type, x1.Type); c != ssa.CMPeq {
return c
}
}
if t1 != x1 {
return cmpForNe(t1 == nil)
}
return ssa.CMPeq
case TFUNC:
for _, f := range types.RecvsParamsResults {
// Loop over fields in structs, ignoring argument names.
ta, ia := types.IterFields(f(t))
tb, ib := types.IterFields(f(x))
for ; ta != nil && tb != nil; ta, tb = ia.Next(), ib.Next() {
if ta.Isddd() != tb.Isddd() {
return cmpForNe(!ta.Isddd())
}
if c := cmptyp(ta.Type, tb.Type); c != ssa.CMPeq {
return c
}
}
if ta != tb {
return cmpForNe(ta == nil)
}
}
return ssa.CMPeq
case TARRAY:
if t.NumElem() != x.NumElem() {
return cmpForNe(t.NumElem() < x.NumElem())
}
case TCHAN:
if t.ChanDir() != x.ChanDir() {
return cmpForNe(t.ChanDir() < x.ChanDir())
}
default:
e := fmt.Sprintf("Do not know how to compare %v with %v", t, x)
panic(e)
}
// Common element type comparison for TARRAY, TCHAN, TPTR32, TPTR64, and TSLICE.
return cmptyp(t.Elem(), x.Elem())
}

View file

@ -0,0 +1,16 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file implements convertions between *types.Node and *Node.
// TODO(gri) try to eliminate these soon
package gc
import (
"cmd/compile/internal/types"
"unsafe"
)
func asNode(n *types.Node) *Node { return (*Node)(unsafe.Pointer(n)) }
func asTypesNode(n *Node) *types.Node { return (*types.Node)(unsafe.Pointer(n)) }

View file

@ -2,16 +2,20 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// TODO(gri) This file should probably become part of package types.
package gc
// builtinpkg is a fake package that declares the universe block.
var builtinpkg *Pkg
import "cmd/compile/internal/types"
var itable *Type // distinguished *byte
// builtinpkg is a fake package that declares the universe block.
var builtinpkg *types.Pkg
var itable *types.Type // distinguished *byte
var basicTypes = [...]struct {
name string
etype EType
etype types.EType
}{
{"int8", TINT8},
{"int16", TINT16},
@ -31,10 +35,10 @@ var basicTypes = [...]struct {
var typedefs = [...]struct {
name string
etype EType
etype types.EType
width *int
sameas32 EType
sameas64 EType
sameas32 types.EType
sameas64 types.EType
}{
{"int", TINT, &Widthint, TINT32, TINT64},
{"uint", TUINT, &Widthint, TUINT32, TUINT64},
@ -82,77 +86,77 @@ func initUniverse() {
func lexinit() {
for _, s := range basicTypes {
etype := s.etype
if int(etype) >= len(Types) {
if int(etype) >= len(types.Types) {
Fatalf("lexinit: %s bad etype", s.name)
}
s2 := builtinpkg.Lookup(s.name)
t := Types[etype]
t := types.Types[etype]
if t == nil {
t = typ(etype)
t = types.New(etype)
t.Sym = s2
if etype != TANY && etype != TSTRING {
dowidth(t)
}
Types[etype] = t
types.Types[etype] = t
}
s2.Def = typenod(t)
s2.Def.Name = new(Name)
s2.Def = asTypesNode(typenod(t))
asNode(s2.Def).Name = new(Name)
}
for _, s := range builtinFuncs {
// TODO(marvin): Fix Node.EType type union.
s2 := builtinpkg.Lookup(s.name)
s2.Def = newname(s2)
s2.Def.Etype = EType(s.op)
s2.Def = asTypesNode(newname(s2))
asNode(s2.Def).Etype = types.EType(s.op)
}
for _, s := range unsafeFuncs {
s2 := unsafepkg.Lookup(s.name)
s2.Def = newname(s2)
s2.Def.Etype = EType(s.op)
s2.Def = asTypesNode(newname(s2))
asNode(s2.Def).Etype = types.EType(s.op)
}
idealstring = typ(TSTRING)
idealbool = typ(TBOOL)
Types[TANY] = typ(TANY)
types.Idealstring = types.New(TSTRING)
types.Idealbool = types.New(TBOOL)
types.Types[TANY] = types.New(TANY)
s := builtinpkg.Lookup("true")
s.Def = nodbool(true)
s.Def.Sym = lookup("true")
s.Def.Name = new(Name)
s.Def.Type = idealbool
s.Def = asTypesNode(nodbool(true))
asNode(s.Def).Sym = lookup("true")
asNode(s.Def).Name = new(Name)
asNode(s.Def).Type = types.Idealbool
s = builtinpkg.Lookup("false")
s.Def = nodbool(false)
s.Def.Sym = lookup("false")
s.Def.Name = new(Name)
s.Def.Type = idealbool
s.Def = asTypesNode(nodbool(false))
asNode(s.Def).Sym = lookup("false")
asNode(s.Def).Name = new(Name)
asNode(s.Def).Type = types.Idealbool
s = lookup("_")
s.Block = -100
s.Def = newname(s)
Types[TBLANK] = typ(TBLANK)
s.Def.Type = Types[TBLANK]
nblank = s.Def
s.Def = asTypesNode(newname(s))
types.Types[TBLANK] = types.New(TBLANK)
asNode(s.Def).Type = types.Types[TBLANK]
nblank = asNode(s.Def)
s = builtinpkg.Lookup("_")
s.Block = -100
s.Def = newname(s)
Types[TBLANK] = typ(TBLANK)
s.Def.Type = Types[TBLANK]
s.Def = asTypesNode(newname(s))
types.Types[TBLANK] = types.New(TBLANK)
asNode(s.Def).Type = types.Types[TBLANK]
Types[TNIL] = typ(TNIL)
types.Types[TNIL] = types.New(TNIL)
s = builtinpkg.Lookup("nil")
var v Val
v.U = new(NilVal)
s.Def = nodlit(v)
s.Def.Sym = s
s.Def.Name = new(Name)
s.Def = asTypesNode(nodlit(v))
asNode(s.Def).Sym = s
asNode(s.Def).Name = new(Name)
s = builtinpkg.Lookup("iota")
s.Def = nod(OIOTA, nil, nil)
s.Def.Sym = s
s.Def.Name = new(Name)
s.Def = asTypesNode(nod(OIOTA, nil, nil))
asNode(s.Def).Sym = s
asNode(s.Def).Name = new(Name)
}
func typeinit() {
@ -160,26 +164,26 @@ func typeinit() {
Fatalf("typeinit before betypeinit")
}
for et := EType(0); et < NTYPE; et++ {
for et := types.EType(0); et < NTYPE; et++ {
simtype[et] = et
}
Types[TPTR32] = typ(TPTR32)
dowidth(Types[TPTR32])
types.Types[TPTR32] = types.New(TPTR32)
dowidth(types.Types[TPTR32])
Types[TPTR64] = typ(TPTR64)
dowidth(Types[TPTR64])
types.Types[TPTR64] = types.New(TPTR64)
dowidth(types.Types[TPTR64])
t := typ(TUNSAFEPTR)
Types[TUNSAFEPTR] = t
t := types.New(TUNSAFEPTR)
types.Types[TUNSAFEPTR] = t
t.Sym = unsafepkg.Lookup("Pointer")
t.Sym.Def = typenod(t)
t.Sym.Def.Name = new(Name)
dowidth(Types[TUNSAFEPTR])
t.Sym.Def = asTypesNode(typenod(t))
asNode(t.Sym.Def).Name = new(Name)
dowidth(types.Types[TUNSAFEPTR])
Tptr = TPTR32
types.Tptr = TPTR32
if Widthptr == 8 {
Tptr = TPTR64
types.Tptr = TPTR64
}
for et := TINT8; et <= TUINT64; et++ {
@ -198,7 +202,7 @@ func typeinit() {
isforw[TFORW] = true
// initialize okfor
for et := EType(0); et < NTYPE; et++ {
for et := types.EType(0); et < NTYPE; et++ {
if isInt[et] || et == TIDEAL {
okforeq[et] = true
okforcmp[et] = true
@ -339,20 +343,20 @@ func typeinit() {
minfltval[TCOMPLEX128] = minfltval[TFLOAT64]
// for walk to use in error messages
Types[TFUNC] = functype(nil, nil, nil)
types.Types[TFUNC] = functype(nil, nil, nil)
// types used in front end
// types[TNIL] got set early in lexinit
Types[TIDEAL] = typ(TIDEAL)
// types.Types[TNIL] got set early in lexinit
types.Types[TIDEAL] = types.New(TIDEAL)
Types[TINTER] = typ(TINTER)
types.Types[TINTER] = types.New(TINTER)
// simple aliases
simtype[TMAP] = Tptr
simtype[TMAP] = types.Tptr
simtype[TCHAN] = Tptr
simtype[TFUNC] = Tptr
simtype[TUNSAFEPTR] = Tptr
simtype[TCHAN] = types.Tptr
simtype[TFUNC] = types.Tptr
simtype[TUNSAFEPTR] = types.Tptr
array_array = int(Rnd(0, int64(Widthptr)))
array_nel = int(Rnd(int64(array_array)+int64(Widthptr), int64(Widthint)))
@ -362,39 +366,39 @@ func typeinit() {
// string is same as slice wo the cap
sizeof_String = int(Rnd(int64(array_nel)+int64(Widthint), int64(Widthptr)))
dowidth(Types[TSTRING])
dowidth(idealstring)
dowidth(types.Types[TSTRING])
dowidth(types.Idealstring)
itable = typPtr(Types[TUINT8])
itable = types.NewPtr(types.Types[TUINT8])
}
func makeErrorInterface() *Type {
field := newField()
field.Type = Types[TSTRING]
f := functypefield(fakethisfield(), nil, []*Field{field})
func makeErrorInterface() *types.Type {
field := types.NewField()
field.Type = types.Types[TSTRING]
f := functypefield(fakethisfield(), nil, []*types.Field{field})
field = newField()
field = types.NewField()
field.Sym = lookup("Error")
field.Type = f
t := typ(TINTER)
t.SetInterface([]*Field{field})
t := types.New(TINTER)
t.SetInterface([]*types.Field{field})
return t
}
func lexinit1() {
// error type
s := builtinpkg.Lookup("error")
errortype = makeErrorInterface()
errortype.Sym = s
types.Errortype = makeErrorInterface()
types.Errortype.Sym = s
// TODO: If we can prove that it's safe to set errortype.Orig here
// than we don't need the special errortype/errorInterface case in
// bexport.go. See also issue #15920.
// errortype.Orig = makeErrorInterface()
s.Def = typenod(errortype)
s.Def = asTypesNode(typenod(types.Errortype))
// We create separate byte and rune types for better error messages
// rather than just creating type alias *Sym's for the uint8 and
// rather than just creating type alias *types.Sym's for the uint8 and
// int32 types. Hence, (bytetype|runtype).Sym.isAlias() is false.
// TODO(gri) Should we get rid of this special case (at the cost
// of less informative error messages involving bytes and runes)?
@ -403,17 +407,17 @@ func lexinit1() {
// byte alias
s = builtinpkg.Lookup("byte")
bytetype = typ(TUINT8)
bytetype.Sym = s
s.Def = typenod(bytetype)
s.Def.Name = new(Name)
types.Bytetype = types.New(TUINT8)
types.Bytetype.Sym = s
s.Def = asTypesNode(typenod(types.Bytetype))
asNode(s.Def).Name = new(Name)
// rune alias
s = builtinpkg.Lookup("rune")
runetype = typ(TINT32)
runetype.Sym = s
s.Def = typenod(runetype)
s.Def.Name = new(Name)
types.Runetype = types.New(TINT32)
types.Runetype.Sym = s
s.Def = asTypesNode(typenod(types.Runetype))
asNode(s.Def).Name = new(Name)
// backend-dependent builtin types (e.g. int).
for _, s := range typedefs {
@ -430,11 +434,11 @@ func lexinit1() {
minintval[s.etype] = minintval[sameas]
maxintval[s.etype] = maxintval[sameas]
t := typ(s.etype)
t := types.New(s.etype)
t.Sym = s1
Types[s.etype] = t
s1.Def = typenod(t)
s1.Def.Name = new(Name)
types.Types[s.etype] = t
s1.Def = asTypesNode(typenod(t))
asNode(s1.Def).Name = new(Name)
s1.Origpkg = builtinpkg
dowidth(t)
@ -461,7 +465,7 @@ func finishUniverse() {
}
nodfp = newname(lookup(".fp"))
nodfp.Type = Types[TINT32]
nodfp.Type = types.Types[TINT32]
nodfp.Class = PPARAM
nodfp.SetUsed(true)
}

View file

@ -5,6 +5,7 @@
package gc
import (
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/sys"
"fmt"
@ -405,9 +406,9 @@ func walkexprlistcheap(s []*Node, init *Nodes) {
// Build name of function for interface conversion.
// Not all names are possible
// (e.g., we'll never generate convE2E or convE2I or convI2E).
func convFuncName(from, to *Type) string {
tkind := to.iet()
switch from.iet() {
func convFuncName(from, to *types.Type) string {
tkind := to.Tie()
switch from.Tie() {
case 'I':
switch tkind {
case 'I':
@ -419,15 +420,15 @@ func convFuncName(from, to *Type) string {
switch {
case from.Size() == 2 && from.Align == 2:
return "convT2E16"
case from.Size() == 4 && from.Align == 4 && !haspointers(from):
case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
return "convT2E32"
case from.Size() == 8 && from.Align == Types[TUINT64].Align && !haspointers(from):
case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
return "convT2E64"
case from.IsString():
return "convT2Estring"
case from.IsSlice():
return "convT2Eslice"
case !haspointers(from):
case !types.Haspointers(from):
return "convT2Enoptr"
}
return "convT2E"
@ -435,21 +436,21 @@ func convFuncName(from, to *Type) string {
switch {
case from.Size() == 2 && from.Align == 2:
return "convT2I16"
case from.Size() == 4 && from.Align == 4 && !haspointers(from):
case from.Size() == 4 && from.Align == 4 && !types.Haspointers(from):
return "convT2I32"
case from.Size() == 8 && from.Align == Types[TUINT64].Align && !haspointers(from):
case from.Size() == 8 && from.Align == types.Types[TUINT64].Align && !types.Haspointers(from):
return "convT2I64"
case from.IsString():
return "convT2Istring"
case from.IsSlice():
return "convT2Islice"
case !haspointers(from):
case !types.Haspointers(from):
return "convT2Inoptr"
}
return "convT2I"
}
}
Fatalf("unknown conv func %c2%c", from.iet(), to.iet())
Fatalf("unknown conv func %c2%c", from.Tie(), to.Tie())
panic("unreachable")
}
@ -822,7 +823,7 @@ opswitch:
// don't generate a = *var if a is _
if !isblank(a) {
var_ := temp(typPtr(t.Val()))
var_ := temp(types.NewPtr(t.Val()))
var_.Typecheck = 1
var_.SetNonNil(true) // mapaccess always returns a non-nil pointer
n.List.SetFirst(var_)
@ -874,10 +875,10 @@ opswitch:
if staticbytes == nil {
staticbytes = newname(Runtimepkg.Lookup("staticbytes"))
staticbytes.Class = PEXTERN
staticbytes.Type = typArray(Types[TUINT8], 256)
staticbytes.Type = types.NewArray(types.Types[TUINT8], 256)
zerobase = newname(Runtimepkg.Lookup("zerobase"))
zerobase.Class = PEXTERN
zerobase.Type = Types[TUINTPTR]
zerobase.Type = types.Types[TUINTPTR]
}
// Optimize convT2{E,I} for many cases in which T is not pointer-shaped,
@ -931,7 +932,7 @@ opswitch:
init.Append(nod(OAS, c, n.Left))
// Get the itab out of the interface.
tmp := temp(typPtr(Types[TUINT8]))
tmp := temp(types.NewPtr(types.Types[TUINT8]))
init.Append(nod(OAS, tmp, typecheck(nod(OITAB, c, nil), Erv)))
// Get the type out of the itab.
@ -940,7 +941,7 @@ opswitch:
init.Append(nif)
// Build the result.
e := nod(OEFACE, tmp, ifaceData(c, typPtr(Types[TUINT8])))
e := nod(OEFACE, tmp, ifaceData(c, types.NewPtr(types.Types[TUINT8])))
e.Type = n.Type // assign type manually, typecheck doesn't understand OEFACE.
e.Typecheck = 1
n = e
@ -989,24 +990,24 @@ opswitch:
if thearch.LinkArch.Family == sys.ARM || thearch.LinkArch.Family == sys.MIPS {
if n.Left.Type.IsFloat() {
if n.Type.Etype == TINT64 {
n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
break
}
if n.Type.Etype == TUINT64 {
n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
break
}
}
if n.Type.IsFloat() {
if n.Left.Type.Etype == TINT64 {
n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type)
n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
break
}
if n.Left.Type.Etype == TUINT64 {
n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type)
n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
break
}
}
@ -1015,31 +1016,31 @@ opswitch:
if thearch.LinkArch.Family == sys.I386 {
if n.Left.Type.IsFloat() {
if n.Type.Etype == TINT64 {
n = mkcall("float64toint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
n = mkcall("float64toint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
break
}
if n.Type.Etype == TUINT64 {
n = mkcall("float64touint64", n.Type, init, conv(n.Left, Types[TFLOAT64]))
n = mkcall("float64touint64", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
break
}
if n.Type.Etype == TUINT32 || n.Type.Etype == TUINT || n.Type.Etype == TUINTPTR {
n = mkcall("float64touint32", n.Type, init, conv(n.Left, Types[TFLOAT64]))
n = mkcall("float64touint32", n.Type, init, conv(n.Left, types.Types[TFLOAT64]))
break
}
}
if n.Type.IsFloat() {
if n.Left.Type.Etype == TINT64 {
n = conv(mkcall("int64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TINT64])), n.Type)
n = conv(mkcall("int64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TINT64])), n.Type)
break
}
if n.Left.Type.Etype == TUINT64 {
n = conv(mkcall("uint64tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT64])), n.Type)
n = conv(mkcall("uint64tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT64])), n.Type)
break
}
if n.Left.Type.Etype == TUINT32 || n.Left.Type.Etype == TUINT || n.Left.Type.Etype == TUINTPTR {
n = conv(mkcall("uint32tofloat64", Types[TFLOAT64], init, conv(n.Left, Types[TUINT32])), n.Type)
n = conv(mkcall("uint32tofloat64", types.Types[TFLOAT64], init, conv(n.Left, types.Types[TUINT32])), n.Type)
break
}
}
@ -1063,7 +1064,7 @@ opswitch:
if isComplex[et] && n.Op == ODIV {
t := n.Type
n = mkcall("complex128div", Types[TCOMPLEX128], init, conv(n.Left, Types[TCOMPLEX128]), conv(n.Right, Types[TCOMPLEX128]))
n = mkcall("complex128div", types.Types[TCOMPLEX128], init, conv(n.Left, types.Types[TCOMPLEX128]), conv(n.Right, types.Types[TCOMPLEX128]))
n = conv(n, t)
break
}
@ -1107,7 +1108,7 @@ opswitch:
} else {
fn += "mod"
}
n = mkcall(fn, n.Type, init, conv(n.Left, Types[et]), conv(n.Right, Types[et]))
n = mkcall(fn, n.Type, init, conv(n.Left, types.Types[et]), conv(n.Right, types.Types[et]))
}
case OINDEX:
@ -1178,13 +1179,13 @@ opswitch:
}
if w := t.Val().Width; w <= 1024 { // 1024 must match ../../../../runtime/hashmap.go:maxZero
n = mkcall1(mapfn(mapaccess1[fast], t), typPtr(t.Val()), init, typename(t), map_, key)
n = mkcall1(mapfn(mapaccess1[fast], t), types.NewPtr(t.Val()), init, typename(t), map_, key)
} else {
z := zeroaddr(w)
n = mkcall1(mapfn("mapaccess1_fat", t), typPtr(t.Val()), init, typename(t), map_, key, z)
n = mkcall1(mapfn("mapaccess1_fat", t), types.NewPtr(t.Val()), init, typename(t), map_, key, z)
}
}
n.Type = typPtr(t.Val())
n.Type = types.NewPtr(t.Val())
n.SetNonNil(true) // mapaccess1* and mapassign always return non-nil pointers.
n = nod(OIND, n, nil)
n.Type = t.Val()
@ -1303,7 +1304,7 @@ opswitch:
n.Left = cheapexpr(n.Left, init)
n.Right = cheapexpr(n.Right, init)
r = mkcall("eqstring", Types[TBOOL], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING]))
r = mkcall("eqstring", types.Types[TBOOL], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
// quick check of len before full compare for == or !=
// eqstring assumes that the lengths are equal
@ -1321,7 +1322,7 @@ opswitch:
r = walkexpr(r, nil)
} else {
// sys_cmpstring(s1, s2) :: 0
r = mkcall("cmpstring", Types[TINT], init, conv(n.Left, Types[TSTRING]), conv(n.Right, Types[TSTRING]))
r = mkcall("cmpstring", types.Types[TINT], init, conv(n.Left, types.Types[TSTRING]), conv(n.Right, types.Types[TSTRING]))
// TODO(marvin): Fix Node.EType type union.
r = nod(Op(n.Etype), r, nodintconst(0))
}
@ -1351,7 +1352,7 @@ opswitch:
n = mkcall1(fn, nil, init, n.Left)
case OMAKECHAN:
n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]))
n = mkcall1(chanfn("makechan", 1, n.Type), n.Type, init, typename(n.Type), conv(n.Left, types.Types[TINT64]))
case OMAKEMAP:
t := n.Type
@ -1380,7 +1381,7 @@ opswitch:
fn := syslook("makemap")
fn = substArgTypes(fn, hmap(t), mapbucket(t), t.Key(), t.Val())
n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, Types[TINT64]), a, r)
n = mkcall1(fn, n.Type, init, typename(n.Type), conv(n.Left, types.Types[TINT64]), a, r)
case OMAKESLICE:
l := n.Left
@ -1396,7 +1397,7 @@ opswitch:
}
// var arr [r]T
// n = arr[:l]
t = typArray(t.Elem(), nonnegintconst(r)) // [r]T
t = types.NewArray(t.Elem(), nonnegintconst(r)) // [r]T
var_ := temp(t)
a := nod(OAS, var_, nil) // zero temp
a = typecheck(a, Etop)
@ -1419,7 +1420,7 @@ opswitch:
len, cap := l, r
fnname := "makeslice64"
argtype := Types[TINT64]
argtype := types.Types[TINT64]
// typechecking guarantees that TIDEAL len/cap are positive and fit in an int.
// The case of len or cap overflow when converting TUINT or TUINTPTR to TINT
@ -1427,7 +1428,7 @@ opswitch:
if (len.Type.IsKind(TIDEAL) || maxintval[len.Type.Etype].Cmp(maxintval[TUINT]) <= 0) &&
(cap.Type.IsKind(TIDEAL) || maxintval[cap.Type.Etype].Cmp(maxintval[TUINT]) <= 0) {
fnname = "makeslice"
argtype = Types[TINT]
argtype = types.Types[TINT]
}
fn := syslook(fnname)
@ -1438,19 +1439,19 @@ opswitch:
case ORUNESTR:
a := nodnil()
if n.Esc == EscNone {
t := typArray(Types[TUINT8], 4)
t := types.NewArray(types.Types[TUINT8], 4)
var_ := temp(t)
a = nod(OADDR, var_, nil)
}
// intstring(*[4]byte, rune)
n = mkcall("intstring", n.Type, init, a, conv(n.Left, Types[TINT64]))
n = mkcall("intstring", n.Type, init, a, conv(n.Left, types.Types[TINT64]))
case OARRAYBYTESTR:
a := nodnil()
if n.Esc == EscNone {
// Create temporary buffer for string on stack.
t := typArray(Types[TUINT8], tmpstringbufsize)
t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
a = nod(OADDR, temp(t), nil)
}
@ -1476,7 +1477,7 @@ opswitch:
if n.Esc == EscNone {
// Create temporary buffer for string on stack.
t := typArray(Types[TUINT8], tmpstringbufsize)
t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
a = nod(OADDR, temp(t), nil)
}
@ -1489,12 +1490,12 @@ opswitch:
if n.Esc == EscNone {
// Create temporary buffer for slice on stack.
t := typArray(Types[TUINT8], tmpstringbufsize)
t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
a = nod(OADDR, temp(t), nil)
}
n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, Types[TSTRING]))
n = mkcall("stringtoslicebyte", n.Type, init, a, conv(n.Left, types.Types[TSTRING]))
case OSTRARRAYBYTETMP:
// []byte(string) conversion that creates a slice
@ -1512,7 +1513,7 @@ opswitch:
if n.Esc == EscNone {
// Create temporary buffer for slice on stack.
t := typArray(Types[TINT32], tmpstringbufsize)
t := types.NewArray(types.Types[TINT32], tmpstringbufsize)
a = nod(OADDR, temp(t), nil)
}
@ -1537,8 +1538,8 @@ opswitch:
rt := nod(OITAB, n.Right, nil)
ld := nod(OIDATA, n.Left, nil)
rd := nod(OIDATA, n.Right, nil)
ld.Type = Types[TUNSAFEPTR]
rd.Type = Types[TUNSAFEPTR]
ld.Type = types.Types[TUNSAFEPTR]
rd.Type = types.Types[TUNSAFEPTR]
ld.Typecheck = 1
rd.Typecheck = 1
call := mkcall1(fn, n.Type, init, lt, ld, rd)
@ -1680,7 +1681,7 @@ func ascompatee(op Op, nl, nr []*Node, init *Nodes) []*Node {
// return 1 if this implies a function call
// evaluating the lv or a function call
// in the conversion of the types
func fncall(l *Node, rt *Type) bool {
func fncall(l *Node, rt *types.Type) bool {
if l.HasCall() || l.Op == OINDEXMAP {
return true
}
@ -1696,7 +1697,7 @@ func fncall(l *Node, rt *Type) bool {
// check assign type list to
// a expression list. called in
// expr-list = func()
func ascompatet(op Op, nl Nodes, nr *Type) []*Node {
func ascompatet(op Op, nl Nodes, nr *types.Type) []*Node {
if nl.Len() != nr.NumFields() {
Fatalf("ascompatet: assignment count mismatch: %d = %d", nl.Len(), nr.NumFields())
}
@ -1734,7 +1735,7 @@ func ascompatet(op Op, nl Nodes, nr *Type) []*Node {
}
// package all the arguments that match a ... T parameter into a []T.
func mkdotargslice(typ *Type, args []*Node, init *Nodes, ddd *Node) *Node {
func mkdotargslice(typ *types.Type, args []*Node, init *Nodes, ddd *Node) *Node {
esc := uint16(EscUnknown)
if ddd != nil {
esc = ddd.Esc
@ -1764,7 +1765,7 @@ func mkdotargslice(typ *Type, args []*Node, init *Nodes, ddd *Node) *Node {
// a type list. called in
// return expr-list
// func(expr-list)
func ascompatte(call *Node, isddd bool, lhs *Type, rhs []*Node, fp int, init *Nodes) []*Node {
func ascompatte(call *Node, isddd bool, lhs *types.Type, rhs []*Node, fp int, init *Nodes) []*Node {
var nn []*Node
// f(g()) where g has multiple return values
@ -1824,8 +1825,8 @@ func walkprint(nn *Node, init *Nodes) *Node {
var r *Node
var n *Node
var on *Node
var t *Type
var et EType
var t *types.Type
var et types.EType
op := nn.Op
all := nn.List
@ -1847,18 +1848,18 @@ func walkprint(nn *Node, init *Nodes) *Node {
if n.Op == OLITERAL {
switch n.Val().Ctype() {
case CTRUNE:
n = defaultlit(n, runetype)
n = defaultlit(n, types.Runetype)
case CTINT:
n = defaultlit(n, Types[TINT64])
n = defaultlit(n, types.Types[TINT64])
case CTFLT:
n = defaultlit(n, Types[TFLOAT64])
n = defaultlit(n, types.Types[TFLOAT64])
}
}
if n.Op != OLITERAL && n.Type != nil && n.Type.Etype == TIDEAL {
n = defaultlit(n, Types[TINT64])
n = defaultlit(n, types.Types[TINT64])
}
n = defaultlit(n, nil)
all.SetIndex(i1, n)
@ -1883,7 +1884,7 @@ func walkprint(nn *Node, init *Nodes) *Node {
on = substArgTypes(on, n.Type) // any-1
} else if isInt[et] {
if et == TUINT64 {
if t.Sym.Pkg.isRuntime() && t.Sym.Name == "hex" {
if isRuntimePkg(t.Sym.Pkg) && t.Sym.Name == "hex" {
on = syslook("printhex")
} else {
on = syslook("printuint")
@ -1932,14 +1933,14 @@ func walkprint(nn *Node, init *Nodes) *Node {
return r
}
func callnew(t *Type) *Node {
func callnew(t *types.Type) *Node {
if t.NotInHeap() {
yyerror("%v is go:notinheap; heap allocation disallowed", t)
}
dowidth(t)
fn := syslook("newobject")
fn = substArgTypes(fn, t)
v := mkcall1(fn, typPtr(t), nil, typename(t))
v := mkcall1(fn, types.NewPtr(t), nil, typename(t))
v.SetNonNil(true)
return v
}
@ -1978,11 +1979,11 @@ func isstack(n *Node) bool {
// isReflectHeaderDataField reports whether l is an expression p.Data
// where p has type reflect.SliceHeader or reflect.StringHeader.
func isReflectHeaderDataField(l *Node) bool {
if l.Type != Types[TUINTPTR] {
if l.Type != types.Types[TUINTPTR] {
return false
}
var tsym *Sym
var tsym *types.Sym
switch l.Op {
case ODOT:
tsym = l.Left.Type.Sym
@ -2022,7 +2023,7 @@ func needwritebarrier(l *Node) bool {
// No write barrier for write of non-pointers.
dowidth(l.Type)
if !haspointers(l.Type) {
if !types.Haspointers(l.Type) {
return false
}
@ -2051,8 +2052,8 @@ func convas(n *Node, init *Nodes) *Node {
n.Typecheck = 1
var lt *Type
var rt *Type
var lt *types.Type
var rt *types.Type
if n.Left == nil || n.Right == nil {
goto out
}
@ -2436,10 +2437,10 @@ func vmatch1(l *Node, r *Node) bool {
// paramstoheap returns code to allocate memory for heap-escaped parameters
// and to copy non-result prameters' values from the stack.
func paramstoheap(params *Type) []*Node {
func paramstoheap(params *types.Type) []*Node {
var nn []*Node
for _, t := range params.Fields().Slice() {
v := t.Nname
v := asNode(t.Nname)
if v != nil && v.Sym != nil && strings.HasPrefix(v.Sym.Name, "~r") { // unnamed result
v = nil
}
@ -2469,7 +2470,7 @@ func zeroResults() {
lno := lineno
lineno = Curfn.Pos
for _, f := range Curfn.Type.Results().Fields().Slice() {
if v := f.Nname; v != nil && v.Name.Param.Heapaddr != nil {
if v := asNode(f.Nname); v != nil && v.Name.Param.Heapaddr != nil {
// The local which points to the return value is the
// thing that needs zeroing. This is already handled
// by a Needzero annotation in plive.go:livenessepilogue.
@ -2483,10 +2484,10 @@ func zeroResults() {
// returnsfromheap returns code to copy values for heap-escaped parameters
// back to the stack.
func returnsfromheap(params *Type) []*Node {
func returnsfromheap(params *types.Type) []*Node {
var nn []*Node
for _, t := range params.Fields().Slice() {
v := t.Nname
v := asNode(t.Nname)
if v == nil {
continue
}
@ -2513,7 +2514,7 @@ func heapmoves() {
lineno = lno
}
func vmkcall(fn *Node, t *Type, init *Nodes, va []*Node) *Node {
func vmkcall(fn *Node, t *types.Type, init *Nodes, va []*Node) *Node {
if fn.Type == nil || fn.Type.Etype != TFUNC {
Fatalf("mkcall %v %v", fn, fn.Type)
}
@ -2532,15 +2533,15 @@ func vmkcall(fn *Node, t *Type, init *Nodes, va []*Node) *Node {
return r
}
func mkcall(name string, t *Type, init *Nodes, args ...*Node) *Node {
func mkcall(name string, t *types.Type, init *Nodes, args ...*Node) *Node {
return vmkcall(syslook(name), t, init, args)
}
func mkcall1(fn *Node, t *Type, init *Nodes, args ...*Node) *Node {
func mkcall1(fn *Node, t *types.Type, init *Nodes, args ...*Node) *Node {
return vmkcall(fn, t, init, args)
}
func conv(n *Node, t *Type) *Node {
func conv(n *Node, t *types.Type) *Node {
if eqtype(n.Type, t) {
return n
}
@ -2554,16 +2555,16 @@ func conv(n *Node, t *Type) *Node {
// We cannot use conv, because we allow converting bool to uint8 here,
// which is forbidden in user code.
func byteindex(n *Node) *Node {
if eqtype(n.Type, Types[TUINT8]) {
if eqtype(n.Type, types.Types[TUINT8]) {
return n
}
n = nod(OCONV, n, nil)
n.Type = Types[TUINT8]
n.Type = types.Types[TUINT8]
n.Typecheck = 1
return n
}
func chanfn(name string, n int, t *Type) *Node {
func chanfn(name string, n int, t *types.Type) *Node {
if !t.IsChan() {
Fatalf("chanfn %v", t)
}
@ -2579,7 +2580,7 @@ func chanfn(name string, n int, t *Type) *Node {
return fn
}
func mapfn(name string, t *Type) *Node {
func mapfn(name string, t *types.Type) *Node {
if !t.IsMap() {
Fatalf("mapfn %v", t)
}
@ -2588,7 +2589,7 @@ func mapfn(name string, t *Type) *Node {
return fn
}
func mapfndel(name string, t *Type) *Node {
func mapfndel(name string, t *types.Type) *Node {
if !t.IsMap() {
Fatalf("mapfn %v", t)
}
@ -2616,7 +2617,7 @@ var mapaccess2 mapnames = mkmapnames("mapaccess2")
var mapassign mapnames = mkmapnames("mapassign")
var mapdelete mapnames = mkmapnames("mapdelete")
func mapfast(t *Type) int {
func mapfast(t *types.Type) int {
// Check ../../runtime/hashmap.go:maxValueSize before changing.
if t.Val().Width > 128 {
return mapslow
@ -2632,7 +2633,7 @@ func mapfast(t *Type) int {
return mapslow
}
func writebarrierfn(name string, l *Type, r *Type) *Node {
func writebarrierfn(name string, l *types.Type, r *types.Type) *Node {
fn := syslook(name)
fn = substArgTypes(fn, l, r)
return fn
@ -2658,7 +2659,7 @@ func addstr(n *Node, init *Nodes) *Node {
// Don't allocate the buffer if the result won't fit.
if sz < tmpstringbufsize {
// Create temporary buffer for result string on stack.
t := typArray(Types[TUINT8], tmpstringbufsize)
t := types.NewArray(types.Types[TUINT8], tmpstringbufsize)
buf = nod(OADDR, temp(t), nil)
}
@ -2667,7 +2668,7 @@ func addstr(n *Node, init *Nodes) *Node {
// build list of string arguments
args := []*Node{buf}
for _, n2 := range n.List.Slice() {
args = append(args, conv(n2, Types[TSTRING]))
args = append(args, conv(n2, types.Types[TSTRING]))
}
var fn string
@ -2679,7 +2680,7 @@ func addstr(n *Node, init *Nodes) *Node {
// large numbers of strings are passed to the runtime as a slice.
fn = "concatstrings"
t := typSlice(Types[TSTRING])
t := types.NewSlice(types.Types[TSTRING])
slice := nod(OCOMPLIT, nil, typenod(t))
if prealloc[n] != nil {
prealloc[slice] = prealloc[n]
@ -2734,14 +2735,14 @@ func appendslice(n *Node, init *Nodes) *Node {
l = append(l, nod(OAS, s, l1)) // s = l1
// n := len(s) + len(l2)
nn := temp(Types[TINT])
nn := temp(types.Types[TINT])
l = append(l, nod(OAS, nn, nod(OADD, nod(OLEN, s, nil), nod(OLEN, l2, nil))))
// if uint(n) > uint(cap(s))
nif := nod(OIF, nil, nil)
nif.Left = nod(OGT, nod(OCONV, nn, nil), nod(OCONV, nod(OCAP, s, nil), nil))
nif.Left.Left.Type = Types[TUINT]
nif.Left.Right.Type = Types[TUINT]
nif.Left.Left.Type = types.Types[TUINT]
nif.Left.Right.Type = types.Types[TUINT]
// instantiate growslice(Type*, []any, int) []any
fn := syslook("growslice")
@ -2757,7 +2758,7 @@ func appendslice(n *Node, init *Nodes) *Node {
nt.Etype = 1
l = append(l, nod(OAS, s, nt))
if haspointers(l1.Type.Elem()) {
if types.Haspointers(l1.Type.Elem()) {
// copy(s[len(l1):], l2)
nptr1 := nod(OSLICE, s, nil)
nptr1.SetSliceBounds(nod(OLEN, l1, nil), nil, nil)
@ -2767,7 +2768,7 @@ func appendslice(n *Node, init *Nodes) *Node {
fn = substArgTypes(fn, l1.Type, l2.Type)
var ln Nodes
ln.Set(l)
nt := mkcall1(fn, Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2)
nt := mkcall1(fn, types.Types[TINT], &ln, typename(l1.Type.Elem()), nptr1, nptr2)
l = append(ln.Slice(), nt)
} else if instrumenting && !compiling_runtime {
// rely on runtime to instrument copy.
@ -2785,7 +2786,7 @@ func appendslice(n *Node, init *Nodes) *Node {
fn = substArgTypes(fn, l1.Type, l2.Type)
var ln Nodes
ln.Set(l)
nt := mkcall1(fn, Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width))
nt := mkcall1(fn, types.Types[TINT], &ln, nptr1, nptr2, nodintconst(s.Type.Elem().Width))
l = append(ln.Slice(), nt)
} else {
// memmove(&s[len(l1)], &l2[0], len(l2)*sizeof(T))
@ -2801,7 +2802,7 @@ func appendslice(n *Node, init *Nodes) *Node {
var ln Nodes
ln.Set(l)
nwid := cheapexpr(conv(nod(OLEN, l2, nil), Types[TUINTPTR]), &ln)
nwid := cheapexpr(conv(nod(OLEN, l2, nil), types.Types[TUINTPTR]), &ln)
nwid = nod(OMUL, nwid, nodintconst(s.Type.Elem().Width))
nt := mkcall1(fn, nil, &ln, nptr1, nptr2, nwid)
@ -2884,7 +2885,7 @@ func walkappend(n *Node, init *Nodes, dst *Node) *Node {
l = append(l, nx)
nn := temp(Types[TINT])
nn := temp(types.Types[TINT])
l = append(l, nod(OAS, nn, nod(OLEN, ns, nil))) // n = len(s)
nx = nod(OSLICE, ns, nil) // ...s[:n+argc]
@ -2920,7 +2921,7 @@ func walkappend(n *Node, init *Nodes, dst *Node) *Node {
// Also works if b is a string.
//
func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
if haspointers(n.Left.Type.Elem()) {
if types.Haspointers(n.Left.Type.Elem()) {
fn := writebarrierfn("typedslicecopy", n.Left.Type, n.Right.Type)
return mkcall1(fn, n.Type, init, typename(n.Left.Type.Elem()), n.Left, n.Right)
}
@ -2947,7 +2948,7 @@ func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
nfrm := nod(OSPTR, nr, nil)
nto := nod(OSPTR, nl, nil)
nlen := temp(Types[TINT])
nlen := temp(types.Types[TINT])
// n = len(to)
l = append(l, nod(OAS, nlen, nod(OLEN, nl, nil)))
@ -2963,8 +2964,8 @@ func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
fn := syslook("memmove")
fn = substArgTypes(fn, nl.Type.Elem(), nl.Type.Elem())
nwid := temp(Types[TUINTPTR])
l = append(l, nod(OAS, nwid, conv(nlen, Types[TUINTPTR])))
nwid := temp(types.Types[TUINTPTR])
l = append(l, nod(OAS, nwid, conv(nlen, types.Types[TUINTPTR])))
nwid = nod(OMUL, nwid, nodintconst(nl.Type.Elem().Width))
l = append(l, mkcall1(fn, nil, init, nto, nfrm, nwid))
@ -2974,7 +2975,7 @@ func copyany(n *Node, init *Nodes, runtimecall bool) *Node {
return nlen
}
func eqfor(t *Type, needsize *int) *Node {
func eqfor(t *types.Type, needsize *int) *Node {
// Should only arrive here with large memory or
// a struct/array containing a non-memory field/element.
// Small memory is handled inline, and single non-memory
@ -2990,9 +2991,9 @@ func eqfor(t *Type, needsize *int) *Node {
n := newname(sym)
n.Class = PFUNC
ntype := nod(OTFUNC, nil, nil)
ntype.List.Append(anonfield(typPtr(t)))
ntype.List.Append(anonfield(typPtr(t)))
ntype.Rlist.Append(anonfield(Types[TBOOL]))
ntype.List.Append(anonfield(types.NewPtr(t)))
ntype.List.Append(anonfield(types.NewPtr(t)))
ntype.Rlist.Append(anonfield(types.Types[TBOOL]))
ntype = typecheck(ntype, Etype)
n.Type = ntype.Type
*needsize = 0
@ -3036,7 +3037,7 @@ func walkcompare(n *Node, init *Nodes) *Node {
tab := nod(OITAB, l, nil)
rtyp := typename(r.Type)
if l.Type.IsEmptyInterface() {
tab.Type = typPtr(Types[TUINT8])
tab.Type = types.NewPtr(types.Types[TUINT8])
tab.Typecheck = 1
eqtype = nod(eq, tab, rtyp)
} else {
@ -3093,13 +3094,13 @@ func walkcompare(n *Node, init *Nodes) *Node {
}
// eq algs take pointers
pl := temp(typPtr(t))
pl := temp(types.NewPtr(t))
al := nod(OAS, pl, nod(OADDR, cmpl, nil))
al.Right.Etype = 1 // addr does not escape
al = typecheck(al, Etop)
init.Append(al)
pr := temp(typPtr(t))
pr := temp(types.NewPtr(t))
ar := nod(OAS, pr, nod(OADDR, cmpr, nil))
ar.Right.Etype = 1 // addr does not escape
ar = typecheck(ar, Etop)
@ -3291,7 +3292,7 @@ func walkinrange(n *Node, init *Nodes) *Node {
// This is equivalent to (a-a) ≤ (b-a) && (b-a) < (c-a),
// which is equivalent to 0 ≤ (b-a) && (b-a) < (c-a),
// which is equivalent to uint(b-a) < uint(c-a).
ut := b.Type.toUnsigned()
ut := b.Type.ToUnsigned()
lhs := conv(nod(OSUB, b, a), ut)
rhs := nodintconst(bound)
if negateResult {
@ -3389,7 +3390,7 @@ func usemethod(n *Node) {
}
p0 := t.Params().Field(0)
res0 := t.Results().Field(0)
var res1 *Field
var res1 *types.Field
if t.Results().NumFields() == 2 {
res1 = t.Results().Field(1)
}
@ -3435,7 +3436,7 @@ func usefield(n *Node) {
if t.IsPtr() {
t = t.Elem()
}
field := dotField[typeSym{t.Orig, n.Sym}]
field := dotField[typeSymKey{t.Orig, n.Sym}]
if field == nil {
Fatalf("usefield %v %v without paramfld", n.Left.Type, n.Sym)
}
@ -3456,7 +3457,7 @@ func usefield(n *Node) {
sym := tracksym(outer, field)
if Curfn.Func.FieldTrack == nil {
Curfn.Func.FieldTrack = make(map[*Sym]struct{})
Curfn.Func.FieldTrack = make(map[*types.Sym]struct{})
}
Curfn.Func.FieldTrack[sym] = struct{}{}
}
@ -3629,3 +3630,21 @@ func walkprintfunc(n *Node, init *Nodes) *Node {
a = walkexpr(a, init)
return a
}
// substArgTypes substitutes the given list of types for
// successive occurrences of the "any" placeholder in the
// type syntax expression n.Type.
// The result of substArgTypes MUST be assigned back to old, e.g.
// n.Left = substArgTypes(n.Left, t1, t2)
func substArgTypes(old *Node, types_ ...*types.Type) *Node {
n := *old // make shallow copy
for _, t := range types_ {
dowidth(t)
}
n.Type = types.SubstAny(n.Type, &types_)
if len(types_) > 0 {
Fatalf("substArgTypes: too many argument types")
}
return &n
}

View file

@ -0,0 +1,70 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package types
import "cmd/internal/obj"
type Pkg struct {
Name string // package name, e.g. "sys"
Path string // string literal used in import statement, e.g. "runtime/internal/sys"
Pathsym *obj.LSym
Prefix string // escaped path for use in symbol table
Imported bool // export data of this package was parsed
Direct bool // imported directly
Syms map[string]*Sym
}
var Nopkg = &Pkg{
Syms: make(map[string]*Sym),
}
func (pkg *Pkg) Lookup(name string) *Sym {
s, _ := pkg.LookupOK(name)
return s
}
var InitSyms []*Sym
// LookupOK looks up name in pkg and reports whether it previously existed.
func (pkg *Pkg) LookupOK(name string) (s *Sym, existed bool) {
if pkg == nil {
pkg = Nopkg
}
if s := pkg.Syms[name]; s != nil {
return s, true
}
s = &Sym{
Name: name,
Pkg: pkg,
}
if name == "init" {
InitSyms = append(InitSyms, s)
}
pkg.Syms[name] = s
return s, false
}
func (pkg *Pkg) LookupBytes(name []byte) *Sym {
if pkg == nil {
pkg = Nopkg
}
if s := pkg.Syms[string(name)]; s != nil {
return s
}
str := InternString(name)
return pkg.Lookup(str)
}
var internedStrings = map[string]string{}
func InternString(b []byte) string {
s, ok := internedStrings[string(b)] // string(b) here doesn't allocate
if !ok {
s = string(b)
internedStrings[s] = s
}
return s
}

View file

@ -0,0 +1,62 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package types
import (
"cmd/internal/obj"
"cmd/internal/src"
)
// Sym represents an object name. Most commonly, this is a Go identifier naming
// an object declared within a package, but Syms are also used to name internal
// synthesized objects.
//
// As an exception, field and method names that are exported use the Sym
// associated with localpkg instead of the package that declared them. This
// allows using Sym pointer equality to test for Go identifier uniqueness when
// handling selector expressions.
type Sym struct {
Link *Sym
Importdef *Pkg // where imported definition was found
Linkname string // link name
// saved and restored by dcopy
Pkg *Pkg
Name string // object name
Def *Node // definition: ONAME OTYPE OPACK or OLITERAL
Lastlineno src.XPos // last declaration for diagnostic
Block int32 // blocknumber to catch redeclaration
flags bitset8
Label *Node // corresponding label (ephemeral)
Origpkg *Pkg // original package for . import
Lsym *obj.LSym
}
const (
symExport = 1 << iota // added to exportlist (no need to add again)
symPackage
symExported // already written out by export
symUniq
symSiggen
symAsm
symAlgGen
)
func (sym *Sym) Export() bool { return sym.flags&symExport != 0 }
func (sym *Sym) Package() bool { return sym.flags&symPackage != 0 }
func (sym *Sym) Exported() bool { return sym.flags&symExported != 0 }
func (sym *Sym) Uniq() bool { return sym.flags&symUniq != 0 }
func (sym *Sym) Siggen() bool { return sym.flags&symSiggen != 0 }
func (sym *Sym) Asm() bool { return sym.flags&symAsm != 0 }
func (sym *Sym) AlgGen() bool { return sym.flags&symAlgGen != 0 }
func (sym *Sym) SetExport(b bool) { sym.flags.set(symExport, b) }
func (sym *Sym) SetPackage(b bool) { sym.flags.set(symPackage, b) }
func (sym *Sym) SetExported(b bool) { sym.flags.set(symExported, b) }
func (sym *Sym) SetUniq(b bool) { sym.flags.set(symUniq, b) }
func (sym *Sym) SetSiggen(b bool) { sym.flags.set(symSiggen, b) }
func (sym *Sym) SetAsm(b bool) { sym.flags.set(symAsm, b) }
func (sym *Sym) SetAlgGen(b bool) { sym.flags.set(symAlgGen, b) }

View file

@ -1,13 +1,8 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file provides methods that let us export a Type as an ../ssa:Type.
// We don't export this package's Type directly because it would lead
// to an import cycle with this package and ../ssa.
// TODO: move Type to its own package, then we don't need to dance around import cycles.
package gc
package types
import (
"cmd/compile/internal/ssa"
@ -16,6 +11,11 @@ import (
"fmt"
)
// Dummy Node so we can refer to *Node without actually
// having a gc.Node. Necessary to break import cycles.
// TODO(gri) try to eliminate soon
type Node struct{ _ int }
// EType describes a kind of type.
type EType uint8
@ -98,23 +98,23 @@ var Types [NTYPE]*Type
var (
// Predeclared alias types. Kept separate for better error messages.
bytetype *Type
runetype *Type
Bytetype *Type
Runetype *Type
// Predeclared error interface type.
errortype *Type
Errortype *Type
// Types to represent untyped string and boolean constants.
idealstring *Type
idealbool *Type
Idealstring *Type
Idealbool *Type
// Types to represent untyped numeric constants.
// Note: Currently these are only used within the binary export
// data format. The rest of the compiler only uses Types[TIDEAL].
idealint = typ(TIDEAL)
idealrune = typ(TIDEAL)
idealfloat = typ(TIDEAL)
idealcomplex = typ(TIDEAL)
Idealint = New(TIDEAL)
Idealrune = New(TIDEAL)
Idealfloat = New(TIDEAL)
Idealcomplex = New(TIDEAL)
)
// A Type represents a Go type.
@ -143,11 +143,11 @@ type Type struct {
methods Fields
allMethods Fields
nod *Node // canonical OTYPE node
Nod *Node // canonical OTYPE node
Orig *Type // original type (type literal or predefined type)
sliceOf *Type
ptrTo *Type
SliceOf *Type
PtrBase *Type
Sym *Sym // symbol containing name, for named types
Vargen int32 // unique name for OTYPE/ONAME
@ -260,7 +260,7 @@ func (t *Type) StructType() *StructType {
// InterType contains Type fields specific to interface types.
type InterType struct {
fields Fields
Fields Fields
}
// PtrType contains Type fields specific to pointer types.
@ -396,8 +396,8 @@ func (f *Fields) Append(s ...*Field) {
*f.s = append(*f.s, s...)
}
// typ returns a new Type of the specified kind.
func typ(et EType) *Type {
// New returns a new Type of the specified kind.
func New(et EType) *Type {
t := &Type{
Etype: et,
Width: BADWIDTH,
@ -429,138 +429,120 @@ func typ(et EType) *Type {
return t
}
// typArray returns a new fixed-length array Type.
func typArray(elem *Type, bound int64) *Type {
// NewArray returns a new fixed-length array Type.
func NewArray(elem *Type, bound int64) *Type {
if bound < 0 {
Fatalf("typArray: invalid bound %v", bound)
Fatalf("NewArray: invalid bound %v", bound)
}
t := typ(TARRAY)
t := New(TARRAY)
t.Extra = &ArrayType{Elem: elem, Bound: bound}
t.SetNotInHeap(elem.NotInHeap())
return t
}
// typSlice returns the slice Type with element type elem.
func typSlice(elem *Type) *Type {
if t := elem.sliceOf; t != nil {
// NewSlice returns the slice Type with element type elem.
func NewSlice(elem *Type) *Type {
if t := elem.SliceOf; t != nil {
if t.Elem() != elem {
Fatalf("elem mismatch")
}
return t
}
t := typ(TSLICE)
t := New(TSLICE)
t.Extra = SliceType{Elem: elem}
elem.sliceOf = t
elem.SliceOf = t
return t
}
// typDDDArray returns a new [...]T array Type.
func typDDDArray(elem *Type) *Type {
t := typ(TARRAY)
// NewDDDArray returns a new [...]T array Type.
func NewDDDArray(elem *Type) *Type {
t := New(TARRAY)
t.Extra = &ArrayType{Elem: elem, Bound: -1}
t.SetNotInHeap(elem.NotInHeap())
return t
}
// typChan returns a new chan Type with direction dir.
func typChan(elem *Type, dir ChanDir) *Type {
t := typ(TCHAN)
// NewChan returns a new chan Type with direction dir.
func NewChan(elem *Type, dir ChanDir) *Type {
t := New(TCHAN)
ct := t.ChanType()
ct.Elem = elem
ct.Dir = dir
return t
}
// typMap returns a new map Type with key type k and element (aka value) type v.
func typMap(k, v *Type) *Type {
t := typ(TMAP)
// NewMap returns a new map Type with key type k and element (aka value) type v.
func NewMap(k, v *Type) *Type {
t := New(TMAP)
mt := t.MapType()
mt.Key = k
mt.Val = v
return t
}
// typPtrCacheEnabled controls whether *T Types are cached in T.
// NewPtrCacheEnabled controls whether *T Types are cached in T.
// Caching is disabled just before starting the backend.
// This allows the backend to run concurrently.
var typPtrCacheEnabled = true
var NewPtrCacheEnabled = true
// typPtr returns the pointer type pointing to t.
func typPtr(elem *Type) *Type {
// NewPtr returns the pointer type pointing to t.
func NewPtr(elem *Type) *Type {
if elem == nil {
Fatalf("typPtr: pointer to elem Type is nil")
Fatalf("NewPtr: pointer to elem Type is nil")
}
if t := elem.ptrTo; t != nil {
if t := elem.PtrBase; t != nil {
if t.Elem() != elem {
Fatalf("typPtr: elem mismatch")
Fatalf("NewPtr: elem mismatch")
}
return t
}
if Tptr == 0 {
Fatalf("typPtr: Tptr not initialized")
Fatalf("NewPtr: Tptr not initialized")
}
t := typ(Tptr)
t := New(Tptr)
t.Extra = PtrType{Elem: elem}
t.Width = int64(Widthptr)
t.Align = uint8(Widthptr)
if typPtrCacheEnabled {
elem.ptrTo = t
if NewPtrCacheEnabled {
elem.PtrBase = t
}
return t
}
// typDDDField returns a new TDDDFIELD type for slice type s.
func typDDDField(s *Type) *Type {
t := typ(TDDDFIELD)
// NewDDDField returns a new TDDDFIELD type for slice type s.
func NewDDDField(s *Type) *Type {
t := New(TDDDFIELD)
t.Extra = DDDFieldType{T: s}
return t
}
// typChanArgs returns a new TCHANARGS type for channel type c.
func typChanArgs(c *Type) *Type {
t := typ(TCHANARGS)
// NewChanArgs returns a new TCHANARGS type for channel type c.
func NewChanArgs(c *Type) *Type {
t := New(TCHANARGS)
t.Extra = ChanArgsType{T: c}
return t
}
// typFuncArgs returns a new TFUNCARGS type for func type f.
func typFuncArgs(f *Type) *Type {
t := typ(TFUNCARGS)
// NewFuncArgs returns a new TFUNCARGS type for func type f.
func NewFuncArgs(f *Type) *Type {
t := New(TFUNCARGS)
t.Extra = FuncArgsType{T: f}
return t
}
func newField() *Field {
func NewField() *Field {
return &Field{
Offset: BADWIDTH,
}
}
// substArgTypes substitutes the given list of types for
// successive occurrences of the "any" placeholder in the
// type syntax expression n.Type.
// The result of substArgTypes MUST be assigned back to old, e.g.
// n.Left = substArgTypes(n.Left, t1, t2)
func substArgTypes(old *Node, types ...*Type) *Node {
n := *old // make shallow copy
for _, t := range types {
dowidth(t)
}
n.Type = substAny(n.Type, &types)
if len(types) > 0 {
Fatalf("substArgTypes: too many argument types")
}
return &n
}
// substAny walks t, replacing instances of "any" with successive
// SubstAny walks t, replacing instances of "any" with successive
// elements removed from types. It returns the substituted type.
func substAny(t *Type, types *[]*Type) *Type {
func SubstAny(t *Type, types *[]*Type) *Type {
if t == nil {
return nil
}
@ -577,36 +559,36 @@ func substAny(t *Type, types *[]*Type) *Type {
*types = (*types)[1:]
case TPTR32, TPTR64:
elem := substAny(t.Elem(), types)
elem := SubstAny(t.Elem(), types)
if elem != t.Elem() {
t = t.Copy()
t.Extra = PtrType{Elem: elem}
}
case TARRAY:
elem := substAny(t.Elem(), types)
elem := SubstAny(t.Elem(), types)
if elem != t.Elem() {
t = t.Copy()
t.Extra.(*ArrayType).Elem = elem
}
case TSLICE:
elem := substAny(t.Elem(), types)
elem := SubstAny(t.Elem(), types)
if elem != t.Elem() {
t = t.Copy()
t.Extra = SliceType{Elem: elem}
}
case TCHAN:
elem := substAny(t.Elem(), types)
elem := SubstAny(t.Elem(), types)
if elem != t.Elem() {
t = t.Copy()
t.Extra.(*ChanType).Elem = elem
}
case TMAP:
key := substAny(t.Key(), types)
val := substAny(t.Val(), types)
key := SubstAny(t.Key(), types)
val := SubstAny(t.Val(), types)
if key != t.Key() || val != t.Val() {
t = t.Copy()
t.Extra.(*MapType).Key = key
@ -614,9 +596,9 @@ func substAny(t *Type, types *[]*Type) *Type {
}
case TFUNC:
recvs := substAny(t.Recvs(), types)
params := substAny(t.Params(), types)
results := substAny(t.Results(), types)
recvs := SubstAny(t.Recvs(), types)
params := SubstAny(t.Params(), types)
results := SubstAny(t.Results(), types)
if recvs != t.Recvs() || params != t.Params() || results != t.Results() {
t = t.Copy()
t.FuncType().Receiver = recvs
@ -628,7 +610,7 @@ func substAny(t *Type, types *[]*Type) *Type {
fields := t.FieldSlice()
var nfs []*Field
for i, f := range fields {
nft := substAny(f.Type, types)
nft := SubstAny(f.Type, types)
if nft == f.Type {
continue
}
@ -695,9 +677,9 @@ type Iter struct {
s []*Field
}
// iterFields returns the first field or method in struct or interface type t
// IterFields returns the first field or method in struct or interface type t
// and an Iter value to continue iterating across the rest.
func iterFields(t *Type) (*Field, Iter) {
func IterFields(t *Type) (*Field, Iter) {
return t.Fields().Iter()
}
@ -739,15 +721,15 @@ func (t *Type) Recv() *Field {
return s.Field(0)
}
// recvsParamsResults stores the accessor functions for a function Type's
// RecvsParamsResults stores the accessor functions for a function Type's
// receiver, parameters, and result parameters, in that order.
// It can be used to iterate over all of a function's parameter lists.
var recvsParamsResults = [3]func(*Type) *Type{
var RecvsParamsResults = [3]func(*Type) *Type{
(*Type).Recvs, (*Type).Params, (*Type).Results,
}
// paramsResults is like recvsParamsResults, but omits receiver parameters.
var paramsResults = [2]func(*Type) *Type{
// ParamsResults is like RecvsParamsResults, but omits receiver parameters.
var ParamsResults = [2]func(*Type) *Type{
(*Type).Params, (*Type).Results,
}
@ -838,8 +820,8 @@ func (t *Type) Fields() *Fields {
case TSTRUCT:
return &t.Extra.(*StructType).fields
case TINTER:
dowidth(t)
return &t.Extra.(*InterType).fields
Dowidth(t)
return &t.Extra.(*InterType).Fields
}
Fatalf("Fields: type %v does not have fields", t)
return nil
@ -887,7 +869,7 @@ func (t *Type) SetInterface(methods []*Field) {
t.Methods().Set(methods)
}
func (t *Type) isDDDArray() bool {
func (t *Type) IsDDDArray() bool {
if t.Etype != TARRAY {
return false
}
@ -906,12 +888,12 @@ func (t *Type) ArgWidth() int64 {
}
func (t *Type) Size() int64 {
dowidth(t)
Dowidth(t)
return t.Width
}
func (t *Type) Alignment() int64 {
dowidth(t)
Dowidth(t)
return int64(t.Align)
}
@ -1001,12 +983,12 @@ func (t *Type) cmp(x *Type) ssa.Cmp {
// for error messages. Treat them as equal.
switch t.Etype {
case TUINT8:
if (t == Types[TUINT8] || t == bytetype) && (x == Types[TUINT8] || x == bytetype) {
if (t == Types[TUINT8] || t == Bytetype) && (x == Types[TUINT8] || x == Bytetype) {
return ssa.CMPeq
}
case TINT32:
if (t == Types[runetype.Etype] || t == runetype) && (x == Types[runetype.Etype] || x == runetype) {
if (t == Types[Runetype.Etype] || t == Runetype) && (x == Types[Runetype.Etype] || x == Runetype) {
return ssa.CMPeq
}
}
@ -1101,7 +1083,7 @@ func (t *Type) cmp(x *Type) ssa.Cmp {
return ssa.CMPeq
case TFUNC:
for _, f := range recvsParamsResults {
for _, f := range RecvsParamsResults {
// Loop over fields in structs, ignoring argument names.
tfs := f(t).FieldSlice()
xfs := f(x).FieldSlice()
@ -1163,8 +1145,8 @@ var unsignedEType = [...]EType{
TUINTPTR: TUINTPTR,
}
// toUnsigned returns the unsigned equivalent of integer type t.
func (t *Type) toUnsigned() *Type {
// ToUnsigned returns the unsigned equivalent of integer type t.
func (t *Type) ToUnsigned() *Type {
if !t.IsInteger() {
Fatalf("unsignedType(%v)", t)
}
@ -1255,7 +1237,7 @@ func (t *Type) ElemType() ssa.Type {
return t.Elem()
}
func (t *Type) PtrTo() ssa.Type {
return typPtr(t)
return NewPtr(t)
}
func (t *Type) NumFields() int {
@ -1268,7 +1250,7 @@ func (t *Type) FieldOff(i int) int64 {
return t.Field(i).Offset
}
func (t *Type) FieldName(i int) string {
return t.Field(i).Sym.Name
return FieldName(t.Field(i))
}
func (t *Type) NumElem() int64 {
@ -1281,8 +1263,8 @@ func (t *Type) NumElem() int64 {
}
// SetNumElem sets the number of elements in an array type.
// The only allowed use is on array types created with typDDDArray.
// For other uses, create a new array with typArray instead.
// The only allowed use is on array types created with NewDDDArray.
// For other uses, create a new array with NewArray instead.
func (t *Type) SetNumElem(n int64) {
t.wantEtype(TARRAY)
at := t.Extra.(*ArrayType)
@ -1309,7 +1291,7 @@ func (t *Type) IsUntyped() bool {
if t == nil {
return false
}
if t == idealstring || t == idealbool {
if t == Idealstring || t == Idealbool {
return true
}
switch t.Etype {
@ -1319,7 +1301,7 @@ func (t *Type) IsUntyped() bool {
return false
}
func haspointers(t *Type) bool {
func Haspointers(t *Type) bool {
switch t.Etype {
case TINT, TUINT, TINT8, TUINT8, TINT16, TUINT16, TINT32, TUINT32, TINT64,
TUINT64, TUINTPTR, TFLOAT32, TFLOAT64, TCOMPLEX64, TCOMPLEX128, TBOOL:
@ -1329,11 +1311,11 @@ func haspointers(t *Type) bool {
if t.NumElem() == 0 { // empty array has no pointers
return false
}
return haspointers(t.Elem())
return Haspointers(t.Elem())
case TSTRUCT:
for _, t1 := range t.Fields().Slice() {
if haspointers(t1.Type) {
if Haspointers(t1.Type) {
return true
}
}
@ -1350,9 +1332,22 @@ func (t *Type) HasPointer() bool {
if t.IsPtr() && t.Elem().NotInHeap() {
return false
}
return haspointers(t)
return Haspointers(t)
}
func (t *Type) Symbol() *obj.LSym {
return Linksym(typenamesym(t))
return TypeLinkSym(t)
}
// Tie returns 'T' if t is a concrete type,
// 'I' if t is an interface type, and 'E' if t is an empty interface type.
// It is used to build calls to the conv* and assert* runtime routines.
func (t *Type) Tie() byte {
if t.IsEmptyInterface() {
return 'E'
}
if t.IsInterface() {
return 'I'
}
return 'T'
}

View file

@ -0,0 +1,126 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package types
import (
"cmd/compile/internal/ssa"
"cmd/internal/obj"
"fmt"
)
const BADWIDTH = -1000000000
// Initialized by frontend. Exists only here.
var Tptr EType // either TPTR32 or TPTR64
// The following variables must be initialized early by the frontend.
// They are here to break import cycles.
// TODO(gri) eliminate these dependencies.
var (
Widthptr int
Dowidth func(*Type)
Fatalf func(string, ...interface{})
Sconv func(*Sym, int, int) string // orig: func sconv(s *Sym, flag FmtFlag, mode fmtMode) string
Tconv func(*Type, int, int, int) string // orig: func tconv(t *Type, flag FmtFlag, mode fmtMode, depth int) string
FormatSym func(*Sym, fmt.State, rune, int) // orig: func symFormat(sym *Sym, s fmt.State, verb rune, mode fmtMode)
FormatType func(*Type, fmt.State, rune, int) // orig: func typeFormat(t *Type, s fmt.State, verb rune, mode fmtMode)
Cmptyp func(_, _ *Type) ssa.Cmp
FieldName func(*Field) string
TypeLinkSym func(*Type) *obj.LSym
FmtLeft int
FmtUnsigned int
FErr int
)
func (s *Sym) String() string {
return Sconv(s, 0, FErr)
}
func (sym *Sym) Format(s fmt.State, verb rune) {
FormatSym(sym, s, verb, FErr)
}
func (t *Type) String() string {
// This is an external entry point, so we pass depth 0 to tconv.
// The implementation of tconv (including typefmt and fldconv)
// must take care not to use a type in a formatting string
// to avoid resetting the recursion counter.
return Tconv(t, 0, FErr, 0)
}
// ShortString generates a short description of t.
// It is used in autogenerated method names, reflection,
// and itab names.
func (t *Type) ShortString() string {
return Tconv(t, FmtLeft, FErr, 0)
}
// LongString generates a complete description of t.
// It is useful for reflection,
// or when a unique fingerprint or hash of a type is required.
func (t *Type) LongString() string {
return Tconv(t, FmtLeft|FmtUnsigned, FErr, 0)
}
func (t *Type) Format(s fmt.State, verb rune) {
FormatType(t, s, verb, FErr)
}
type bitset8 uint8
func (f *bitset8) set(mask uint8, b bool) {
if b {
*(*uint8)(f) |= mask
} else {
*(*uint8)(f) &^= mask
}
}
var etnames = []string{
Txxx: "Txxx",
TINT: "INT",
TUINT: "UINT",
TINT8: "INT8",
TUINT8: "UINT8",
TINT16: "INT16",
TUINT16: "UINT16",
TINT32: "INT32",
TUINT32: "UINT32",
TINT64: "INT64",
TUINT64: "UINT64",
TUINTPTR: "UINTPTR",
TFLOAT32: "FLOAT32",
TFLOAT64: "FLOAT64",
TCOMPLEX64: "COMPLEX64",
TCOMPLEX128: "COMPLEX128",
TBOOL: "BOOL",
TPTR32: "PTR32",
TPTR64: "PTR64",
TFUNC: "FUNC",
TARRAY: "ARRAY",
TSLICE: "SLICE",
TSTRUCT: "STRUCT",
TCHAN: "CHAN",
TMAP: "MAP",
TINTER: "INTER",
TFORW: "FORW",
TSTRING: "STRING",
TUNSAFEPTR: "TUNSAFEPTR",
TANY: "ANY",
TIDEAL: "TIDEAL",
TNIL: "TNIL",
TBLANK: "TBLANK",
TFUNCARGS: "TFUNCARGS",
TCHANARGS: "TCHANARGS",
TDDDFIELD: "TDDDFIELD",
}
func (et EType) String() string {
if int(et) < len(etnames) && etnames[et] != "" {
return etnames[et]
}
return fmt.Sprintf("E-%d", et)
}

View file

@ -40,6 +40,7 @@ var bootstrapDirs = []string{
"cmd/compile/internal/mips",
"cmd/compile/internal/mips64",
"cmd/compile/internal/ppc64",
"cmd/compile/internal/types",
"cmd/compile/internal/s390x",
"cmd/compile/internal/ssa",
"cmd/compile/internal/syntax",