[dev.regabi] cmd/compile: split out package typecheck [generated]

This commit splits the typechecking logic into its own package,
the first of a sequence of CLs to break package gc into more
manageable units.

[git-generate]
cd src/cmd/compile/internal/gc
rf '
	# The binary import/export has to be part of typechecking,
	# because we load inlined function bodies lazily, but "exporter"
	# should not be. Move that out of bexport.go.
	mv exporter exporter.markObject exporter.markType export.go

	# Use the typechecking helpers, so that the calls left behind
	# in package gc do not need access to ctxExpr etc.
	ex {
		import "cmd/compile/internal/ir"

		# TODO(rsc): Should not be necessary.
		avoid TypecheckExpr
		avoid TypecheckStmt
		avoid TypecheckExprs
		avoid TypecheckStmts
		avoid TypecheckAssignExpr
		avoid TypecheckCallee

		var n ir.Node
		var ns []ir.Node
		typecheck(n, ctxExpr) -> TypecheckExpr(n)
		typecheck(n, ctxStmt) -> TypecheckStmt(n)
		typecheckslice(ns, ctxExpr) -> TypecheckExprs(ns)
		typecheckslice(ns, ctxStmt) -> TypecheckStmts(ns)
		typecheck(n, ctxExpr|ctxAssign) -> TypecheckAssignExpr(n)
		typecheck(n, ctxExpr|ctxCallee) -> TypecheckCallee(n)
	}

	# Move some typechecking API to typecheck.
	mv syslook LookupRuntime
	mv substArgTypes SubstArgTypes
	mv LookupRuntime SubstArgTypes syms.go

	mv conv Conv
	mv convnop ConvNop
	mv Conv ConvNop typecheck.go

	mv colasdefn AssignDefn
	mv colasname assignableName

	mv Target target.go

	mv initname autoexport exportsym dcl.go
	mv exportsym Export

	# Export API to be called from outside typecheck.
	# The ones with "Typecheck" prefixes will be renamed later to drop the prefix.
	mv adddot AddImplicitDots
	mv assignconv AssignConv
	mv expandmeth CalcMethods
	mv capturevarscomplete CaptureVarsComplete
	mv checkMapKeys CheckMapKeys
	mv checkreturn CheckReturn
	mv dclcontext DeclContext
	mv dclfunc DeclFunc
	mv declare Declare
	mv dotImportRefs DotImportRefs
	mv declImporter DeclImporter
	mv variter DeclVars
	mv defaultlit DefaultLit
	mv evalConst EvalConst
	mv expandInline ImportBody
	mv finishUniverse declareUniverse
	mv funcbody FinishFuncBody
	mv funchdr StartFuncBody
	mv indexconst IndexConst
	mv initTodo InitTodoFunc
	mv lookup Lookup
	mv resolve Resolve
	mv lookupN LookupNum
	mv nodAddr NodAddr
	mv nodAddrAt NodAddrAt
	mv nodnil NodNil
	mv origBoolConst OrigBool
	mv origConst OrigConst
	mv origIntConst OrigInt
	mv redeclare Redeclared
	mv tostruct NewStructType
	mv functype NewFuncType
	mv methodfunc NewMethodType
	mv structargs NewFuncParams
	mv temp Temp
	mv tempAt TempAt
	mv typecheckok TypecheckAllowed
	mv typecheck _typecheck # make room for typecheck pkg
	mv typecheckinl TypecheckImportedBody
	mv typecheckFunc TypecheckFunc
	mv iimport ReadImports
	mv iexport WriteExports
	mv sysfunc LookupRuntimeFunc
	mv sysvar LookupRuntimeVar

	# Move function constructors to typecheck.
	mv mkdotargslice MakeDotArgs
	mv fixVariadicCall FixVariadicCall
	mv closureType ClosureType
	mv partialCallType PartialCallType
	mv capturevars CaptureVars
	mv MakeDotArgs FixVariadicCall ClosureType PartialCallType CaptureVars typecheckclosure func.go

	mv autolabel AutoLabel
	mv AutoLabel syms.go

	mv Dlist dlist
	mv Symlink symlink

	mv \
		AssignDefn assignableName \
		AssignConv \
		CaptureVarsComplete \
		DeclContext \
		DeclFunc \
		DeclImporter \
		DeclVars \
		Declare \
		DotImportRefs \
		Export \
		InitTodoFunc \
		Lookup \
		LookupNum \
		LookupRuntimeFunc \
		LookupRuntimeVar \
		NewFuncParams \
		NewName \
		NodAddr \
		NodAddrAt \
		NodNil \
		Redeclared \
		StartFuncBody \
		FinishFuncBody \
		TypecheckImportedBody \
		AddImplicitDots \
		CalcMethods \
		CheckFuncStack \
		NewFuncType \
		NewMethodType \
		NewStructType \
		TypecheckAllowed \
		Temp \
		TempAt \
		adddot1 \
		dotlist \
		addmethod \
		assignconvfn \
		assignop \
		autotmpname \
		autoexport \
		bexport.go \
		checkdupfields \
		checkembeddedtype \
		closurename \
		convertop \
		declare_typegen \
		decldepth \
		dlist \
		dotpath \
		expand0 \
		expand1 \
		expandDecl \
		fakeRecvField \
		fnpkg \
		funcStack \
		funcStackEnt \
		funcarg \
		funcarg2 \
		funcargs \
		funcargs2 \
		globClosgen \
		ifacelookdot \
		implements \
		importalias \
		importconst \
		importfunc \
		importobj \
		importsym \
		importtype \
		importvar \
		inimport \
		initname \
		isptrto \
		loadsys \
		lookdot0 \
		lookdot1 \
		makepartialcall \
		okfor \
		okforlen \
		operandType \
		slist \
		symlink \
		tointerface \
		typeSet \
		typeSet.add \
		typeSetEntry \
		typecheckExprSwitch \
		typecheckTypeSwitch \
		typecheckpartialcall \
		typecheckrange \
		typecheckrangeExpr \
		typecheckselect \
		typecheckswitch \
		vargen \
		builtin.go \
		builtin_test.go \
		const.go \
		func.go \
		iexport.go \
		iimport.go \
		mapfile_mmap.go \
		syms.go \
		target.go \
		typecheck.go \
		unsafe.go \
		universe.go \
		cmd/compile/internal/typecheck
'
rm gen.go types.go types_acc.go

sed -i '' 's/package gc/package typecheck/' mapfile_read.go mkbuiltin.go
mv mapfile_read.go ../typecheck # not part of default build
mv mkbuiltin.go ../typecheck # package main helper
mv builtin ../typecheck

cd ../typecheck
mv dcl.go dcl1.go
mv typecheck.go typecheck1.go
mv universe.go universe1.go
rf '
	# Sweep some small files into larger ones.
	# "mv sym... file1.go file.go" (after the mv file1.go file.go above)
	# lets us insert sym... at the top of file.go.
	mv okfor okforeq universe1.go universe.go
	mv DeclContext vargen dcl1.go Temp TempAt autotmpname NewMethodType dcl.go
	mv InitTodoFunc inimport decldepth TypecheckAllowed typecheck1.go typecheck.go
	mv inl.go closure.go func.go
	mv range.go select.go swt.go stmt.go
	mv Lookup loadsys LookupRuntimeFunc LookupRuntimeVar syms.go
	mv unsafe.go const.go

	mv TypecheckAssignExpr AssignExpr
	mv TypecheckExpr Expr
	mv TypecheckStmt Stmt
	mv TypecheckExprs Exprs
	mv TypecheckStmts Stmts
	mv TypecheckCall Call
	mv TypecheckCallee Callee
	mv _typecheck check
	mv TypecheckFunc Func
	mv TypecheckFuncBody FuncBody
	mv TypecheckImports AllImportedBodies
	mv TypecheckImportedBody ImportedBody
	mv TypecheckInit Init
	mv TypecheckPackage Package
'
rm gen.go go.go init.go main.go reflect.go

Change-Id: Iea6a7aaf6407d690670ec58aeb36cc0b280f80b0
Reviewed-on: https://go-review.googlesource.com/c/go/+/279236
Trust: Russ Cox <rsc@golang.org>
Run-TryBot: Russ Cox <rsc@golang.org>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
Russ Cox 2020-12-23 00:41:49 -05:00
parent dac0de3748
commit b9693d7627
50 changed files with 4206 additions and 4131 deletions

View file

@ -7,6 +7,7 @@ package gc
import (
"bufio"
"cmd/compile/internal/base"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/obj/x86"
@ -42,7 +43,7 @@ func TestMain(m *testing.M) {
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return typenamesym(t).Linksym()
}
TypecheckInit()
typecheck.Init()
os.Exit(m.Run())
}

View file

@ -9,6 +9,7 @@ package gc
import (
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
@ -19,7 +20,7 @@ import (
func mkParamResultField(t *types.Type, s *types.Sym, which ir.Class) *types.Field {
field := types.NewField(src.NoXPos, s, t)
n := NewName(s)
n := typecheck.NewName(s)
n.Class_ = which
field.Nname = n
n.SetType(t)
@ -42,7 +43,7 @@ func mkstruct(fieldtypes []*types.Type) *types.Type {
}
func mkFuncType(rcvr *types.Type, ins []*types.Type, outs []*types.Type) *types.Type {
q := lookup("?")
q := typecheck.Lookup("?")
inf := []*types.Field{}
for _, it := range ins {
inf = append(inf, mkParamResultField(it, q, ir.PPARAM))

View file

@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"fmt"
@ -106,7 +107,7 @@ func genhash(t *types.Type) *obj.LSym {
return closure
}
if memhashvarlen == nil {
memhashvarlen = sysfunc("memhash_varlen")
memhashvarlen = typecheck.LookupRuntimeFunc("memhash_varlen")
}
ot := 0
ot = dsymptr(closure, ot, memhashvarlen, 0)
@ -143,17 +144,17 @@ func genhash(t *types.Type) *obj.LSym {
}
base.Pos = base.AutogeneratedPos // less confusing than end of input
dclcontext = ir.PEXTERN
typecheck.DeclContext = ir.PEXTERN
// func sym(p *T, h uintptr) uintptr
args := []*ir.Field{
ir.NewField(base.Pos, lookup("p"), nil, types.NewPtr(t)),
ir.NewField(base.Pos, lookup("h"), nil, types.Types[types.TUINTPTR]),
ir.NewField(base.Pos, typecheck.Lookup("p"), nil, types.NewPtr(t)),
ir.NewField(base.Pos, typecheck.Lookup("h"), nil, types.Types[types.TUINTPTR]),
}
results := []*ir.Field{ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR])}
tfn := ir.NewFuncType(base.Pos, nil, args, results)
fn := dclfunc(sym, tfn)
fn := typecheck.DeclFunc(sym, tfn)
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
nh := ir.AsNode(tfn.Type().Params().Field(1).Nname)
@ -165,7 +166,7 @@ func genhash(t *types.Type) *obj.LSym {
hashel := hashfor(t.Elem())
// for i := 0; i < nelem; i++
ni := temp(types.Types[types.TINT])
ni := typecheck.Temp(types.Types[types.TINT])
init := ir.NewAssignStmt(base.Pos, ni, ir.NewInt(0))
cond := ir.NewBinaryExpr(base.Pos, ir.OLT, ni, ir.NewInt(t.NumElem()))
post := ir.NewAssignStmt(base.Pos, ni, ir.NewBinaryExpr(base.Pos, ir.OADD, ni, ir.NewInt(1)))
@ -177,7 +178,7 @@ func genhash(t *types.Type) *obj.LSym {
nx := ir.NewIndexExpr(base.Pos, np, ni)
nx.SetBounded(true)
na := nodAddr(nx)
na := typecheck.NodAddr(nx)
call.Args.Append(na)
call.Args.Append(nh)
loop.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
@ -201,7 +202,7 @@ func genhash(t *types.Type) *obj.LSym {
hashel := hashfor(f.Type)
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := nodAddr(nx)
na := typecheck.NodAddr(nx)
call.Args.Append(na)
call.Args.Append(nh)
fn.Body.Append(ir.NewAssignStmt(base.Pos, nh, call))
@ -216,7 +217,7 @@ func genhash(t *types.Type) *obj.LSym {
hashel := hashmem(f.Type)
call := ir.NewCallExpr(base.Pos, ir.OCALL, hashel, nil)
nx := ir.NewSelectorExpr(base.Pos, ir.OXDOT, np, f.Sym) // TODO: fields from other packages?
na := nodAddr(nx)
na := typecheck.NodAddr(nx)
call.Args.Append(na)
call.Args.Append(nh)
call.Args.Append(ir.NewInt(size))
@ -234,13 +235,13 @@ func genhash(t *types.Type) *obj.LSym {
ir.DumpList("genhash body", fn.Body)
}
funcbody()
typecheck.FinishFuncBody()
fn.SetDupok(true)
typecheckFunc(fn)
typecheck.Func(fn)
ir.CurFunc = fn
typecheckslice(fn.Body, ctxStmt)
typecheck.Stmts(fn.Body)
ir.CurFunc = nil
if base.Debug.DclStack != 0 {
@ -248,7 +249,7 @@ func genhash(t *types.Type) *obj.LSym {
}
fn.SetNilCheckDisabled(true)
Target.Decls = append(Target.Decls, fn)
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
// Build closure. It doesn't close over any variables, so
// it contains just the function pointer.
@ -284,9 +285,9 @@ func hashfor(t *types.Type) ir.Node {
sym = typesymprefix(".hash", t)
}
n := NewName(sym)
n := typecheck.NewName(sym)
ir.MarkFunc(n)
n.SetType(functype(nil, []*ir.Field{
n.SetType(typecheck.NewFuncType(nil, []*ir.Field{
ir.NewField(base.Pos, nil, nil, types.NewPtr(t)),
ir.NewField(base.Pos, nil, nil, types.Types[types.TUINTPTR]),
}, []*ir.Field{
@ -298,9 +299,9 @@ func hashfor(t *types.Type) ir.Node {
// sysClosure returns a closure which will call the
// given runtime function (with no closed-over variables).
func sysClosure(name string) *obj.LSym {
s := sysvar(name + "·f")
s := typecheck.LookupRuntimeVar(name + "·f")
if len(s.P) == 0 {
f := sysfunc(name)
f := typecheck.LookupRuntimeFunc(name)
dsymptr(s, 0, f, 0)
ggloblsym(s, int32(types.PtrSize), obj.DUPOK|obj.RODATA)
}
@ -349,7 +350,7 @@ func geneq(t *types.Type) *obj.LSym {
return closure
}
if memequalvarlen == nil {
memequalvarlen = sysvar("memequal_varlen") // asm func
memequalvarlen = typecheck.LookupRuntimeVar("memequal_varlen") // asm func
}
ot := 0
ot = dsymptr(closure, ot, memequalvarlen, 0)
@ -372,20 +373,20 @@ func geneq(t *types.Type) *obj.LSym {
// Autogenerate code for equality of structs and arrays.
base.Pos = base.AutogeneratedPos // less confusing than end of input
dclcontext = ir.PEXTERN
typecheck.DeclContext = ir.PEXTERN
// func sym(p, q *T) bool
tfn := ir.NewFuncType(base.Pos, nil,
[]*ir.Field{ir.NewField(base.Pos, lookup("p"), nil, types.NewPtr(t)), ir.NewField(base.Pos, lookup("q"), nil, types.NewPtr(t))},
[]*ir.Field{ir.NewField(base.Pos, lookup("r"), nil, types.Types[types.TBOOL])})
[]*ir.Field{ir.NewField(base.Pos, typecheck.Lookup("p"), nil, types.NewPtr(t)), ir.NewField(base.Pos, typecheck.Lookup("q"), nil, types.NewPtr(t))},
[]*ir.Field{ir.NewField(base.Pos, typecheck.Lookup("r"), nil, types.Types[types.TBOOL])})
fn := dclfunc(sym, tfn)
fn := typecheck.DeclFunc(sym, tfn)
np := ir.AsNode(tfn.Type().Params().Field(0).Nname)
nq := ir.AsNode(tfn.Type().Params().Field(1).Nname)
nr := ir.AsNode(tfn.Type().Results().Field(0).Nname)
// Label to jump to if an equality test fails.
neq := autolabel(".neq")
neq := typecheck.AutoLabel(".neq")
// We reach here only for types that have equality but
// cannot be handled by the standard algorithms,
@ -450,7 +451,7 @@ func geneq(t *types.Type) *obj.LSym {
} else {
// Generate a for loop.
// for i := 0; i < nelem; i++
i := temp(types.Types[types.TINT])
i := typecheck.Temp(types.Types[types.TINT])
init := ir.NewAssignStmt(base.Pos, i, ir.NewInt(0))
cond := ir.NewBinaryExpr(base.Pos, ir.OLT, i, ir.NewInt(nelem))
post := ir.NewAssignStmt(base.Pos, i, ir.NewBinaryExpr(base.Pos, ir.OADD, i, ir.NewInt(1)))
@ -586,7 +587,7 @@ func geneq(t *types.Type) *obj.LSym {
// ret:
// return
ret := autolabel(".ret")
ret := typecheck.AutoLabel(".ret")
fn.Body.Append(ir.NewLabelStmt(base.Pos, ret))
fn.Body.Append(ir.NewReturnStmt(base.Pos, nil))
@ -610,13 +611,13 @@ func geneq(t *types.Type) *obj.LSym {
ir.DumpList("geneq body", fn.Body)
}
funcbody()
typecheck.FinishFuncBody()
fn.SetDupok(true)
typecheckFunc(fn)
typecheck.Func(fn)
ir.CurFunc = fn
typecheckslice(fn.Body, ctxStmt)
typecheck.Stmts(fn.Body)
ir.CurFunc = nil
if base.Debug.DclStack != 0 {
@ -628,7 +629,7 @@ func geneq(t *types.Type) *obj.LSym {
// neither of which can be nil, and our comparisons
// are shallow.
fn.SetNilCheckDisabled(true)
Target.Decls = append(Target.Decls, fn)
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
// Generate a closure which points at the function we just generated.
dsymptr(closure, 0, sym.Linksym(), 0)
@ -660,20 +661,20 @@ func eqfield(p ir.Node, q ir.Node, field *types.Sym) ir.Node {
// which can be used to construct string equality comparison.
// eqlen must be evaluated before eqmem, and shortcircuiting is required.
func eqstring(s, t ir.Node) (eqlen *ir.BinaryExpr, eqmem *ir.CallExpr) {
s = conv(s, types.Types[types.TSTRING])
t = conv(t, types.Types[types.TSTRING])
s = typecheck.Conv(s, types.Types[types.TSTRING])
t = typecheck.Conv(t, types.Types[types.TSTRING])
sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
tptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, t)
slen := conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, s), types.Types[types.TUINTPTR])
tlen := conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, t), types.Types[types.TUINTPTR])
slen := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, s), types.Types[types.TUINTPTR])
tlen := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, t), types.Types[types.TUINTPTR])
fn := syslook("memequal")
fn = substArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
fn := typecheck.LookupRuntime("memequal")
fn = typecheck.SubstArgTypes(fn, types.Types[types.TUINT8], types.Types[types.TUINT8])
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, []ir.Node{sptr, tptr, ir.Copy(slen)})
TypecheckCall(call)
typecheck.Call(call)
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, slen, tlen)
cmp = typecheck(cmp, ctxExpr).(*ir.BinaryExpr)
cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
cmp.SetType(types.Types[types.TBOOL])
return cmp, call
}
@ -692,9 +693,9 @@ func eqinterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
// func efaceeq(typ *uintptr, x, y unsafe.Pointer) (ret bool)
var fn ir.Node
if s.Type().IsEmptyInterface() {
fn = syslook("efaceeq")
fn = typecheck.LookupRuntime("efaceeq")
} else {
fn = syslook("ifaceeq")
fn = typecheck.LookupRuntime("ifaceeq")
}
stab := ir.NewUnaryExpr(base.Pos, ir.OITAB, s)
@ -707,10 +708,10 @@ func eqinterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
tdata.SetTypecheck(1)
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, []ir.Node{stab, sdata, tdata})
TypecheckCall(call)
typecheck.Call(call)
cmp := ir.NewBinaryExpr(base.Pos, ir.OEQ, stab, ttab)
cmp = typecheck(cmp, ctxExpr).(*ir.BinaryExpr)
cmp = typecheck.Expr(cmp).(*ir.BinaryExpr)
cmp.SetType(types.Types[types.TBOOL])
return cmp, call
}
@ -718,8 +719,8 @@ func eqinterface(s, t ir.Node) (eqtab *ir.BinaryExpr, eqdata *ir.CallExpr) {
// eqmem returns the node
// memequal(&p.field, &q.field [, size])
func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
nx := typecheck(nodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, p, field)), ctxExpr)
ny := typecheck(nodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, q, field)), ctxExpr)
nx := typecheck.Expr(typecheck.NodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, p, field)))
ny := typecheck.Expr(typecheck.NodAddr(ir.NewSelectorExpr(base.Pos, ir.OXDOT, q, field)))
fn, needsize := eqmemfunc(size, nx.Type().Elem())
call := ir.NewCallExpr(base.Pos, ir.OCALL, fn, nil)
@ -735,14 +736,14 @@ func eqmem(p ir.Node, q ir.Node, field *types.Sym, size int64) ir.Node {
func eqmemfunc(size int64, t *types.Type) (fn *ir.Name, needsize bool) {
switch size {
default:
fn = syslook("memequal")
fn = typecheck.LookupRuntime("memequal")
needsize = true
case 1, 2, 4, 8, 16:
buf := fmt.Sprintf("memequal%d", int(size)*8)
fn = syslook(buf)
fn = typecheck.LookupRuntime(buf)
}
fn = substArgTypes(fn, t, t)
fn = typecheck.SubstArgTypes(fn, t, t)
return fn, needsize
}

View file

@ -1,185 +0,0 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
import (
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
)
type exporter struct {
marked map[*types.Type]bool // types already seen by markType
}
// markObject visits a reachable object.
func (p *exporter) markObject(n ir.Node) {
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
if n.Class_ == ir.PFUNC {
inlFlood(n, exportsym)
}
}
p.markType(n.Type())
}
// markType recursively visits types reachable from t to identify
// functions whose inline bodies may be needed.
func (p *exporter) markType(t *types.Type) {
if p.marked[t] {
return
}
p.marked[t] = true
// If this is a named type, mark all of its associated
// methods. Skip interface types because t.Methods contains
// only their unexpanded method set (i.e., exclusive of
// interface embeddings), and the switch statement below
// handles their full method set.
if t.Sym() != nil && t.Kind() != types.TINTER {
for _, m := range t.Methods().Slice() {
if types.IsExported(m.Sym.Name) {
p.markObject(ir.AsNode(m.Nname))
}
}
}
// Recursively mark any types that can be produced given a
// value of type t: dereferencing a pointer; indexing or
// iterating over an array, slice, or map; receiving from a
// channel; accessing a struct field or interface method; or
// calling a function.
//
// Notably, we don't mark function parameter types, because
// the user already needs some way to construct values of
// those types.
switch t.Kind() {
case types.TPTR, types.TARRAY, types.TSLICE:
p.markType(t.Elem())
case types.TCHAN:
if t.ChanDir().CanRecv() {
p.markType(t.Elem())
}
case types.TMAP:
p.markType(t.Key())
p.markType(t.Elem())
case types.TSTRUCT:
for _, f := range t.FieldSlice() {
if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
p.markType(f.Type)
}
}
case types.TFUNC:
for _, f := range t.Results().FieldSlice() {
p.markType(f.Type)
}
case types.TINTER:
for _, f := range t.FieldSlice() {
if types.IsExported(f.Sym.Name) {
p.markType(f.Type)
}
}
}
}
// ----------------------------------------------------------------------------
// Export format
// Tags. Must be < 0.
const (
// Objects
packageTag = -(iota + 1)
constTag
typeTag
varTag
funcTag
endTag
// Types
namedTag
arrayTag
sliceTag
dddTag
structTag
pointerTag
signatureTag
interfaceTag
mapTag
chanTag
// Values
falseTag
trueTag
int64Tag
floatTag
fractionTag // not used by gc
complexTag
stringTag
nilTag
unknownTag // not used by gc (only appears in packages with errors)
// Type aliases
aliasTag
)
var predecl []*types.Type // initialized lazily
func predeclared() []*types.Type {
if predecl == nil {
// initialize lazily to be sure that all
// elements have been initialized before
predecl = []*types.Type{
// basic types
types.Types[types.TBOOL],
types.Types[types.TINT],
types.Types[types.TINT8],
types.Types[types.TINT16],
types.Types[types.TINT32],
types.Types[types.TINT64],
types.Types[types.TUINT],
types.Types[types.TUINT8],
types.Types[types.TUINT16],
types.Types[types.TUINT32],
types.Types[types.TUINT64],
types.Types[types.TUINTPTR],
types.Types[types.TFLOAT32],
types.Types[types.TFLOAT64],
types.Types[types.TCOMPLEX64],
types.Types[types.TCOMPLEX128],
types.Types[types.TSTRING],
// basic type aliases
types.ByteType,
types.RuneType,
// error
types.ErrorType,
// untyped types
types.UntypedBool,
types.UntypedInt,
types.UntypedRune,
types.UntypedFloat,
types.UntypedComplex,
types.UntypedString,
types.Types[types.TNIL],
// package unsafe
types.Types[types.TUNSAFEPTR],
// invalid type (package contains errors)
types.Types[types.Txxx],
// any type, for builtin export data
types.Types[types.TANY],
}
}
return predecl
}

View file

@ -1,344 +0,0 @@
// Code generated by mkbuiltin.go. DO NOT EDIT.
package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
)
var runtimeDecls = [...]struct {
name string
tag int
typ int
}{
{"newobject", funcTag, 4},
{"mallocgc", funcTag, 8},
{"panicdivide", funcTag, 9},
{"panicshift", funcTag, 9},
{"panicmakeslicelen", funcTag, 9},
{"panicmakeslicecap", funcTag, 9},
{"throwinit", funcTag, 9},
{"panicwrap", funcTag, 9},
{"gopanic", funcTag, 11},
{"gorecover", funcTag, 14},
{"goschedguarded", funcTag, 9},
{"goPanicIndex", funcTag, 16},
{"goPanicIndexU", funcTag, 18},
{"goPanicSliceAlen", funcTag, 16},
{"goPanicSliceAlenU", funcTag, 18},
{"goPanicSliceAcap", funcTag, 16},
{"goPanicSliceAcapU", funcTag, 18},
{"goPanicSliceB", funcTag, 16},
{"goPanicSliceBU", funcTag, 18},
{"goPanicSlice3Alen", funcTag, 16},
{"goPanicSlice3AlenU", funcTag, 18},
{"goPanicSlice3Acap", funcTag, 16},
{"goPanicSlice3AcapU", funcTag, 18},
{"goPanicSlice3B", funcTag, 16},
{"goPanicSlice3BU", funcTag, 18},
{"goPanicSlice3C", funcTag, 16},
{"goPanicSlice3CU", funcTag, 18},
{"printbool", funcTag, 19},
{"printfloat", funcTag, 21},
{"printint", funcTag, 23},
{"printhex", funcTag, 25},
{"printuint", funcTag, 25},
{"printcomplex", funcTag, 27},
{"printstring", funcTag, 29},
{"printpointer", funcTag, 30},
{"printuintptr", funcTag, 31},
{"printiface", funcTag, 30},
{"printeface", funcTag, 30},
{"printslice", funcTag, 30},
{"printnl", funcTag, 9},
{"printsp", funcTag, 9},
{"printlock", funcTag, 9},
{"printunlock", funcTag, 9},
{"concatstring2", funcTag, 34},
{"concatstring3", funcTag, 35},
{"concatstring4", funcTag, 36},
{"concatstring5", funcTag, 37},
{"concatstrings", funcTag, 39},
{"cmpstring", funcTag, 40},
{"intstring", funcTag, 43},
{"slicebytetostring", funcTag, 44},
{"slicebytetostringtmp", funcTag, 45},
{"slicerunetostring", funcTag, 48},
{"stringtoslicebyte", funcTag, 50},
{"stringtoslicerune", funcTag, 53},
{"slicecopy", funcTag, 54},
{"decoderune", funcTag, 55},
{"countrunes", funcTag, 56},
{"convI2I", funcTag, 57},
{"convT16", funcTag, 58},
{"convT32", funcTag, 58},
{"convT64", funcTag, 58},
{"convTstring", funcTag, 58},
{"convTslice", funcTag, 58},
{"convT2E", funcTag, 59},
{"convT2Enoptr", funcTag, 59},
{"convT2I", funcTag, 59},
{"convT2Inoptr", funcTag, 59},
{"assertE2I", funcTag, 57},
{"assertE2I2", funcTag, 60},
{"assertI2I", funcTag, 57},
{"assertI2I2", funcTag, 60},
{"panicdottypeE", funcTag, 61},
{"panicdottypeI", funcTag, 61},
{"panicnildottype", funcTag, 62},
{"ifaceeq", funcTag, 64},
{"efaceeq", funcTag, 64},
{"fastrand", funcTag, 66},
{"makemap64", funcTag, 68},
{"makemap", funcTag, 69},
{"makemap_small", funcTag, 70},
{"mapaccess1", funcTag, 71},
{"mapaccess1_fast32", funcTag, 72},
{"mapaccess1_fast64", funcTag, 72},
{"mapaccess1_faststr", funcTag, 72},
{"mapaccess1_fat", funcTag, 73},
{"mapaccess2", funcTag, 74},
{"mapaccess2_fast32", funcTag, 75},
{"mapaccess2_fast64", funcTag, 75},
{"mapaccess2_faststr", funcTag, 75},
{"mapaccess2_fat", funcTag, 76},
{"mapassign", funcTag, 71},
{"mapassign_fast32", funcTag, 72},
{"mapassign_fast32ptr", funcTag, 72},
{"mapassign_fast64", funcTag, 72},
{"mapassign_fast64ptr", funcTag, 72},
{"mapassign_faststr", funcTag, 72},
{"mapiterinit", funcTag, 77},
{"mapdelete", funcTag, 77},
{"mapdelete_fast32", funcTag, 78},
{"mapdelete_fast64", funcTag, 78},
{"mapdelete_faststr", funcTag, 78},
{"mapiternext", funcTag, 79},
{"mapclear", funcTag, 80},
{"makechan64", funcTag, 82},
{"makechan", funcTag, 83},
{"chanrecv1", funcTag, 85},
{"chanrecv2", funcTag, 86},
{"chansend1", funcTag, 88},
{"closechan", funcTag, 30},
{"writeBarrier", varTag, 90},
{"typedmemmove", funcTag, 91},
{"typedmemclr", funcTag, 92},
{"typedslicecopy", funcTag, 93},
{"selectnbsend", funcTag, 94},
{"selectnbrecv", funcTag, 95},
{"selectnbrecv2", funcTag, 97},
{"selectsetpc", funcTag, 98},
{"selectgo", funcTag, 99},
{"block", funcTag, 9},
{"makeslice", funcTag, 100},
{"makeslice64", funcTag, 101},
{"makeslicecopy", funcTag, 102},
{"growslice", funcTag, 104},
{"memmove", funcTag, 105},
{"memclrNoHeapPointers", funcTag, 106},
{"memclrHasPointers", funcTag, 106},
{"memequal", funcTag, 107},
{"memequal0", funcTag, 108},
{"memequal8", funcTag, 108},
{"memequal16", funcTag, 108},
{"memequal32", funcTag, 108},
{"memequal64", funcTag, 108},
{"memequal128", funcTag, 108},
{"f32equal", funcTag, 109},
{"f64equal", funcTag, 109},
{"c64equal", funcTag, 109},
{"c128equal", funcTag, 109},
{"strequal", funcTag, 109},
{"interequal", funcTag, 109},
{"nilinterequal", funcTag, 109},
{"memhash", funcTag, 110},
{"memhash0", funcTag, 111},
{"memhash8", funcTag, 111},
{"memhash16", funcTag, 111},
{"memhash32", funcTag, 111},
{"memhash64", funcTag, 111},
{"memhash128", funcTag, 111},
{"f32hash", funcTag, 111},
{"f64hash", funcTag, 111},
{"c64hash", funcTag, 111},
{"c128hash", funcTag, 111},
{"strhash", funcTag, 111},
{"interhash", funcTag, 111},
{"nilinterhash", funcTag, 111},
{"int64div", funcTag, 112},
{"uint64div", funcTag, 113},
{"int64mod", funcTag, 112},
{"uint64mod", funcTag, 113},
{"float64toint64", funcTag, 114},
{"float64touint64", funcTag, 115},
{"float64touint32", funcTag, 116},
{"int64tofloat64", funcTag, 117},
{"uint64tofloat64", funcTag, 118},
{"uint32tofloat64", funcTag, 119},
{"complex128div", funcTag, 120},
{"racefuncenter", funcTag, 31},
{"racefuncenterfp", funcTag, 9},
{"racefuncexit", funcTag, 9},
{"raceread", funcTag, 31},
{"racewrite", funcTag, 31},
{"racereadrange", funcTag, 121},
{"racewriterange", funcTag, 121},
{"msanread", funcTag, 121},
{"msanwrite", funcTag, 121},
{"msanmove", funcTag, 122},
{"checkptrAlignment", funcTag, 123},
{"checkptrArithmetic", funcTag, 125},
{"libfuzzerTraceCmp1", funcTag, 127},
{"libfuzzerTraceCmp2", funcTag, 129},
{"libfuzzerTraceCmp4", funcTag, 130},
{"libfuzzerTraceCmp8", funcTag, 131},
{"libfuzzerTraceConstCmp1", funcTag, 127},
{"libfuzzerTraceConstCmp2", funcTag, 129},
{"libfuzzerTraceConstCmp4", funcTag, 130},
{"libfuzzerTraceConstCmp8", funcTag, 131},
{"x86HasPOPCNT", varTag, 6},
{"x86HasSSE41", varTag, 6},
{"x86HasFMA", varTag, 6},
{"armHasVFPv4", varTag, 6},
{"arm64HasATOMICS", varTag, 6},
}
func runtimeTypes() []*types.Type {
var typs [132]*types.Type
typs[0] = types.ByteType
typs[1] = types.NewPtr(typs[0])
typs[2] = types.Types[types.TANY]
typs[3] = types.NewPtr(typs[2])
typs[4] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[5] = types.Types[types.TUINTPTR]
typs[6] = types.Types[types.TBOOL]
typs[7] = types.Types[types.TUNSAFEPTR]
typs[8] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[9] = functype(nil, nil, nil)
typs[10] = types.Types[types.TINTER]
typs[11] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])}, nil)
typs[12] = types.Types[types.TINT32]
typs[13] = types.NewPtr(typs[12])
typs[14] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[13])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])})
typs[15] = types.Types[types.TINT]
typs[16] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
typs[17] = types.Types[types.TUINT]
typs[18] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[17]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
typs[19] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])}, nil)
typs[20] = types.Types[types.TFLOAT64]
typs[21] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, nil)
typs[22] = types.Types[types.TINT64]
typs[23] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, nil)
typs[24] = types.Types[types.TUINT64]
typs[25] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
typs[26] = types.Types[types.TCOMPLEX128]
typs[27] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])}, nil)
typs[28] = types.Types[types.TSTRING]
typs[29] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, nil)
typs[30] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
typs[31] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[32] = types.NewArray(typs[0], 32)
typs[33] = types.NewPtr(typs[32])
typs[34] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[35] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[36] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[37] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[38] = types.NewSlice(typs[28])
typs[39] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[38])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[40] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[41] = types.NewArray(typs[0], 4)
typs[42] = types.NewPtr(typs[41])
typs[43] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[42]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[44] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[45] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[46] = types.RuneType
typs[47] = types.NewSlice(typs[46])
typs[48] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[47])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[49] = types.NewSlice(typs[0])
typs[50] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[49])})
typs[51] = types.NewArray(typs[46], 32)
typs[52] = types.NewPtr(typs[51])
typs[53] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[52]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[47])})
typs[54] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[55] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[46]), ir.NewField(base.Pos, nil, nil, typs[15])})
typs[56] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[57] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
typs[58] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[59] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
typs[60] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[61] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
typs[62] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
typs[63] = types.NewPtr(typs[5])
typs[64] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[65] = types.Types[types.TUINT32]
typs[66] = functype(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
typs[67] = types.NewMap(typs[2], typs[2])
typs[68] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[69] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[70] = functype(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[71] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[72] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[73] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[74] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[75] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[76] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[77] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[78] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
typs[79] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[80] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67])}, nil)
typs[81] = types.NewChan(typs[2], types.Cboth)
typs[82] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
typs[83] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
typs[84] = types.NewChan(typs[2], types.Crecv)
typs[85] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[86] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[87] = types.NewChan(typs[2], types.Csend)
typs[88] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[89] = types.NewArray(typs[0], 3)
typs[90] = tostruct([]*ir.Field{ir.NewField(base.Pos, lookup("enabled"), nil, typs[6]), ir.NewField(base.Pos, lookup("pad"), nil, typs[89]), ir.NewField(base.Pos, lookup("needed"), nil, typs[6]), ir.NewField(base.Pos, lookup("cgo"), nil, typs[6]), ir.NewField(base.Pos, lookup("alignme"), nil, typs[24])})
typs[91] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[92] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[93] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[94] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[95] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[96] = types.NewPtr(typs[6])
typs[97] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[96]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[98] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63])}, nil)
typs[99] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[100] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[101] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[102] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[103] = types.NewSlice(typs[2])
typs[104] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[103]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[103])})
typs[105] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[106] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[107] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[108] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[109] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[110] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
typs[111] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
typs[112] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
typs[113] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
typs[114] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
typs[115] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
typs[116] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
typs[117] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[118] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[119] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[120] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26]), ir.NewField(base.Pos, nil, nil, typs[26])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])})
typs[121] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[122] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[123] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[124] = types.NewSlice(typs[7])
typs[125] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[124])}, nil)
typs[126] = types.Types[types.TUINT8]
typs[127] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[126]), ir.NewField(base.Pos, nil, nil, typs[126])}, nil)
typs[128] = types.Types[types.TUINT16]
typs[129] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[128]), ir.NewField(base.Pos, nil, nil, typs[128])}, nil)
typs[130] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65]), ir.NewField(base.Pos, nil, nil, typs[65])}, nil)
typs[131] = functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
return typs[:]
}

View file

@ -8,9 +8,9 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
)
func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
@ -72,156 +72,6 @@ func (p *noder) funcLit(expr *syntax.FuncLit) ir.Node {
return clo
}
// typecheckclosure typechecks an OCLOSURE node. It also creates the named
// function associated with the closure.
// TODO: This creation of the named function should probably really be done in a
// separate pass from type-checking.
func typecheckclosure(clo *ir.ClosureExpr, top int) {
fn := clo.Func
// Set current associated iota value, so iota can be used inside
// function in ConstSpec, see issue #22344
if x := getIotaValue(); x >= 0 {
fn.Iota = x
}
fn.ClosureType = typecheck(fn.ClosureType, ctxType)
clo.SetType(fn.ClosureType.Type())
fn.SetClosureCalled(top&ctxCallee != 0)
// Do not typecheck fn twice, otherwise, we will end up pushing
// fn to Target.Decls multiple times, causing initLSym called twice.
// See #30709
if fn.Typecheck() == 1 {
return
}
for _, ln := range fn.ClosureVars {
n := ln.Defn
if !n.Name().Captured() {
n.Name().SetCaptured(true)
if n.Name().Decldepth == 0 {
base.Fatalf("typecheckclosure: var %v does not have decldepth assigned", n)
}
// Ignore assignments to the variable in straightline code
// preceding the first capturing by a closure.
if n.Name().Decldepth == decldepth {
n.Name().SetAssigned(false)
}
}
}
fn.Nname.SetSym(closurename(ir.CurFunc))
ir.MarkFunc(fn.Nname)
typecheckFunc(fn)
// Type check the body now, but only if we're inside a function.
// At top level (in a variable initialization: curfn==nil) we're not
// ready to type check code yet; we'll check it later, because the
// underlying closure function we create is added to Target.Decls.
if ir.CurFunc != nil && clo.Type() != nil {
oldfn := ir.CurFunc
ir.CurFunc = fn
olddd := decldepth
decldepth = 1
typecheckslice(fn.Body, ctxStmt)
decldepth = olddd
ir.CurFunc = oldfn
}
Target.Decls = append(Target.Decls, fn)
}
// globClosgen is like Func.Closgen, but for the global scope.
var globClosgen int32
// closurename generates a new unique name for a closure within
// outerfunc.
func closurename(outerfunc *ir.Func) *types.Sym {
outer := "glob."
prefix := "func"
gen := &globClosgen
if outerfunc != nil {
if outerfunc.OClosure != nil {
prefix = ""
}
outer = ir.FuncName(outerfunc)
// There may be multiple functions named "_". In those
// cases, we can't use their individual Closgens as it
// would lead to name clashes.
if !ir.IsBlank(outerfunc.Nname) {
gen = &outerfunc.Closgen
}
}
*gen++
return lookup(fmt.Sprintf("%s.%s%d", outer, prefix, *gen))
}
// capturevarscomplete is set to true when the capturevars phase is done.
var capturevarscomplete bool
// capturevars is called in a separate phase after all typechecking is done.
// It decides whether each variable captured by a closure should be captured
// by value or by reference.
// We use value capturing for values <= 128 bytes that are never reassigned
// after capturing (effectively constant).
func capturevars(fn *ir.Func) {
lno := base.Pos
base.Pos = fn.Pos()
cvars := fn.ClosureVars
out := cvars[:0]
for _, v := range cvars {
if v.Type() == nil {
// If v.Type is nil, it means v looked like it
// was going to be used in the closure, but
// isn't. This happens in struct literals like
// s{f: x} where we can't distinguish whether
// f is a field identifier or expression until
// resolving s.
continue
}
out = append(out, v)
// type check the & of closed variables outside the closure,
// so that the outer frame also grabs them and knows they escape.
types.CalcSize(v.Type())
var outer ir.Node
outer = v.Outer
outermost := v.Defn.(*ir.Name)
// out parameters will be assigned to implicitly upon return.
if outermost.Class_ != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
v.SetByval(true)
} else {
outermost.Name().SetAddrtaken(true)
outer = nodAddr(outer)
}
if base.Flag.LowerM > 1 {
var name *types.Sym
if v.Curfn != nil && v.Curfn.Nname != nil {
name = v.Curfn.Sym()
}
how := "ref"
if v.Byval() {
how = "value"
}
base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Name().Addrtaken(), outermost.Name().Assigned(), int32(v.Type().Width))
}
outer = typecheck(outer, ctxExpr)
fn.ClosureEnter.Append(outer)
}
fn.ClosureVars = out
base.Pos = lno
}
// transformclosure is called in a separate phase after escape analysis.
// It transform closure bodies to properly reference captured variables.
func transformclosure(fn *ir.Func) {
@ -256,7 +106,7 @@ func transformclosure(fn *ir.Func) {
// we introduce function param &v *T
// and v remains PAUTOHEAP with &v heapaddr
// (accesses will implicitly deref &v).
addr := NewName(lookup("&" + v.Sym().Name))
addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
v.Heapaddr = addr
v = addr
@ -300,7 +150,7 @@ func transformclosure(fn *ir.Func) {
} else {
// Declare variable holding addresses taken from closure
// and initialize in entry prologue.
addr := NewName(lookup("&" + v.Sym().Name))
addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
addr.Class_ = ir.PAUTO
addr.SetUsed(true)
@ -309,14 +159,14 @@ func transformclosure(fn *ir.Func) {
v.Heapaddr = addr
var src ir.Node = cr
if v.Byval() {
src = nodAddr(cr)
src = typecheck.NodAddr(cr)
}
body = append(body, ir.NewAssignStmt(base.Pos, addr, src))
}
}
if len(body) > 0 {
typecheckslice(body, ctxStmt)
typecheck.Stmts(body)
fn.Enter.Set(body)
fn.SetNeedctxt(true)
}
@ -346,38 +196,6 @@ func closuredebugruntimecheck(clo *ir.ClosureExpr) {
}
}
// closureType returns the struct type used to hold all the information
// needed in the closure for clo (clo must be a OCLOSURE node).
// The address of a variable of the returned type can be cast to a func.
func closureType(clo *ir.ClosureExpr) *types.Type {
// Create closure in the form of a composite literal.
// supposing the closure captures an int i and a string s
// and has one float64 argument and no results,
// the generated code looks like:
//
// clos = &struct{.F uintptr; i *int; s *string}{func.1, &i, &s}
//
// The use of the struct provides type information to the garbage
// collector so that it can walk the closure. We could use (in this case)
// [3]unsafe.Pointer instead, but that would leave the gc in the dark.
// The information appears in the binary in the form of type descriptors;
// the struct is unnamed so that closures in multiple packages with the
// same struct type can share the descriptor.
fields := []*ir.Field{
ir.NewField(base.Pos, lookup(".F"), nil, types.Types[types.TUINTPTR]),
}
for _, v := range clo.Func.ClosureVars {
typ := v.Type()
if !v.Byval() {
typ = types.NewPtr(typ)
}
fields = append(fields, ir.NewField(base.Pos, v.Sym(), nil, typ))
}
typ := tostruct(fields)
typ.SetNoalg(true)
return typ
}
func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
fn := clo.Func
@ -390,17 +208,17 @@ func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
}
closuredebugruntimecheck(clo)
typ := closureType(clo)
typ := typecheck.ClosureType(clo)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos.SetEsc(clo.Esc())
clos.List.Set(append([]ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, fn.Nname)}, fn.ClosureEnter...))
addr := nodAddr(clos)
addr := typecheck.NodAddr(clos)
addr.SetEsc(clo.Esc())
// Force type conversion from *struct to the func type.
cfn := convnop(addr, clo.Type())
cfn := typecheck.ConvNop(addr, clo.Type())
// non-escaping temp to use, if any.
if x := clo.Prealloc; x != nil {
@ -414,110 +232,6 @@ func walkclosure(clo *ir.ClosureExpr, init *ir.Nodes) ir.Node {
return walkexpr(cfn, init)
}
func typecheckpartialcall(n ir.Node, sym *types.Sym) *ir.CallPartExpr {
switch n.Op() {
case ir.ODOTINTER, ir.ODOTMETH:
break
default:
base.Fatalf("invalid typecheckpartialcall")
}
dot := n.(*ir.SelectorExpr)
// Create top-level function.
fn := makepartialcall(dot, dot.Type(), sym)
fn.SetWrapper(true)
return ir.NewCallPartExpr(dot.Pos(), dot.X, dot.Selection, fn)
}
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls.
func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.Func {
rcvrtype := dot.X.Type()
sym := ir.MethodSymSuffix(rcvrtype, meth, "-fm")
if sym.Uniq() {
return sym.Def.(*ir.Func)
}
sym.SetUniq(true)
savecurfn := ir.CurFunc
saveLineNo := base.Pos
ir.CurFunc = nil
// Set line number equal to the line number where the method is declared.
var m *types.Field
if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() {
base.Pos = m.Pos
}
// Note: !m.Pos.IsKnown() happens for method expressions where
// the method is implicitly declared. The Error method of the
// built-in error type is one such method. We leave the line
// number at the use of the method expression in this
// case. See issue 29389.
tfn := ir.NewFuncType(base.Pos, nil,
structargs(t0.Params(), true),
structargs(t0.Results(), false))
fn := dclfunc(sym, tfn)
fn.SetDupok(true)
fn.SetNeedctxt(true)
// Declare and initialize variable holding receiver.
cr := ir.NewClosureRead(rcvrtype, types.Rnd(int64(types.PtrSize), int64(rcvrtype.Align)))
ptr := NewName(lookup(".this"))
declare(ptr, ir.PAUTO)
ptr.SetUsed(true)
var body []ir.Node
if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
ptr.SetType(rcvrtype)
body = append(body, ir.NewAssignStmt(base.Pos, ptr, cr))
} else {
ptr.SetType(types.NewPtr(rcvrtype))
body = append(body, ir.NewAssignStmt(base.Pos, ptr, nodAddr(cr)))
}
call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
call.Args.Set(ir.ParamNames(tfn.Type()))
call.IsDDD = tfn.Type().IsVariadic()
if t0.NumResults() != 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
ret.Results = []ir.Node{call}
body = append(body, ret)
} else {
body = append(body, call)
}
fn.Body.Set(body)
funcbody()
typecheckFunc(fn)
// Need to typecheck the body of the just-generated wrapper.
// typecheckslice() requires that Curfn is set when processing an ORETURN.
ir.CurFunc = fn
typecheckslice(fn.Body, ctxStmt)
sym.Def = fn
Target.Decls = append(Target.Decls, fn)
ir.CurFunc = savecurfn
base.Pos = saveLineNo
return fn
}
// partialCallType returns the struct type used to hold all the information
// needed in the closure for n (n must be a OCALLPART node).
// The address of a variable of the returned type can be cast to a func.
func partialCallType(n *ir.CallPartExpr) *types.Type {
t := tostruct([]*ir.Field{
ir.NewField(base.Pos, lookup("F"), nil, types.Types[types.TUINTPTR]),
ir.NewField(base.Pos, lookup("R"), nil, n.X.Type()),
})
t.SetNoalg(true)
return t
}
func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
// Create closure in the form of a composite literal.
// For x.M with receiver (x) type T, the generated code looks like:
@ -532,24 +246,24 @@ func walkpartialcall(n *ir.CallPartExpr, init *ir.Nodes) ir.Node {
n.X = cheapexpr(n.X, init)
n.X = walkexpr(n.X, nil)
tab := typecheck(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X), ctxExpr)
tab := typecheck.Expr(ir.NewUnaryExpr(base.Pos, ir.OITAB, n.X))
c := ir.NewUnaryExpr(base.Pos, ir.OCHECKNIL, tab)
c.SetTypecheck(1)
init.Append(c)
}
typ := partialCallType(n)
typ := typecheck.PartialCallType(n)
clos := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
clos.SetEsc(n.Esc())
clos.List = []ir.Node{ir.NewUnaryExpr(base.Pos, ir.OCFUNC, n.Func.Nname), n.X}
addr := nodAddr(clos)
addr := typecheck.NodAddr(clos)
addr.SetEsc(n.Esc())
// Force type conversion from *struct to the func type.
cfn := convnop(addr, n.Type())
cfn := typecheck.ConvNop(addr, n.Type())
// non-escaping temp to use, if any.
if x := n.Prealloc; x != nil {

View file

@ -8,11 +8,11 @@ import (
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
"fmt"
"strings"
)
func EnableNoWriteBarrierRecCheck() {
@ -28,154 +28,6 @@ func NoWriteBarrierRecCheck() {
var nowritebarrierrecCheck *nowritebarrierrecChecker
// redeclare emits a diagnostic about symbol s being redeclared at pos.
func redeclare(pos src.XPos, s *types.Sym, where string) {
if !s.Lastlineno.IsKnown() {
pkgName := dotImportRefs[s.Def.(*ir.Ident)]
base.ErrorfAt(pos, "%v redeclared %s\n"+
"\t%v: previous declaration during import %q", s, where, base.FmtPos(pkgName.Pos()), pkgName.Pkg.Path)
} else {
prevPos := s.Lastlineno
// When an import and a declaration collide in separate files,
// present the import as the "redeclared", because the declaration
// is visible where the import is, but not vice versa.
// See issue 4510.
if s.Def == nil {
pos, prevPos = prevPos, pos
}
base.ErrorfAt(pos, "%v redeclared %s\n"+
"\t%v: previous declaration", s, where, base.FmtPos(prevPos))
}
}
var vargen int
// declare individual names - var, typ, const
var declare_typegen int
// declare records that Node n declares symbol n.Sym in the specified
// declaration context.
func declare(n *ir.Name, ctxt ir.Class) {
if ir.IsBlank(n) {
return
}
s := n.Sym()
// kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later.
if !inimport && !typecheckok && s.Pkg != types.LocalPkg {
base.ErrorfAt(n.Pos(), "cannot declare name %v", s)
}
gen := 0
if ctxt == ir.PEXTERN {
if s.Name == "init" {
base.ErrorfAt(n.Pos(), "cannot declare init - must be func")
}
if s.Name == "main" && s.Pkg.Name == "main" {
base.ErrorfAt(n.Pos(), "cannot declare main - must be func")
}
Target.Externs = append(Target.Externs, n)
} else {
if ir.CurFunc == nil && ctxt == ir.PAUTO {
base.Pos = n.Pos()
base.Fatalf("automatic outside function")
}
if ir.CurFunc != nil && ctxt != ir.PFUNC && n.Op() == ir.ONAME {
ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
}
if n.Op() == ir.OTYPE {
declare_typegen++
gen = declare_typegen
} else if n.Op() == ir.ONAME && ctxt == ir.PAUTO && !strings.Contains(s.Name, "·") {
vargen++
gen = vargen
}
types.Pushdcl(s)
n.Curfn = ir.CurFunc
}
if ctxt == ir.PAUTO {
n.SetFrameOffset(0)
}
if s.Block == types.Block {
// functype will print errors about duplicate function arguments.
// Don't repeat the error here.
if ctxt != ir.PPARAM && ctxt != ir.PPARAMOUT {
redeclare(n.Pos(), s, "in this block")
}
}
s.Block = types.Block
s.Lastlineno = base.Pos
s.Def = n
n.Vargen = int32(gen)
n.Class_ = ctxt
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
}
autoexport(n, ctxt)
}
// declare variables from grammar
// new_name_list (type | [type] = expr_list)
func variter(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node {
var init []ir.Node
doexpr := len(el) > 0
if len(el) == 1 && len(vl) > 1 {
e := el[0]
as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as2.Rhs = []ir.Node{e}
for _, v := range vl {
as2.Lhs.Append(v)
declare(v, dclcontext)
v.Ntype = t
v.Defn = as2
if ir.CurFunc != nil {
init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
}
}
return append(init, as2)
}
for i, v := range vl {
var e ir.Node
if doexpr {
if i >= len(el) {
base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
break
}
e = el[i]
}
declare(v, dclcontext)
v.Ntype = t
if e != nil || ir.CurFunc != nil || ir.IsBlank(v) {
if ir.CurFunc != nil {
init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
}
as := ir.NewAssignStmt(base.Pos, v, e)
init = append(init, as)
if e != nil {
v.Defn = as
}
}
}
if len(el) > len(vl) {
base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
}
return init
}
// oldname returns the Node that declares symbol s in the current scope.
// If no such Node currently exists, an ONONAME Node is returned instead.
// Automatically creates a new closure variable if the referenced symbol was
@ -204,7 +56,7 @@ func oldname(s *types.Sym) ir.Node {
c := n.Name().Innermost
if c == nil || c.Curfn != ir.CurFunc {
// Do not have a closure var for the active closure yet; make one.
c = NewName(s)
c = typecheck.NewName(s)
c.Class_ = ir.PAUTOHEAP
c.SetIsClosureVar(true)
c.SetIsDDD(n.IsDDD())
@ -236,419 +88,10 @@ func importName(sym *types.Sym) ir.Node {
return n
}
// := declarations
func colasname(n ir.Node) bool {
switch n.Op() {
case ir.ONAME,
ir.ONONAME,
ir.OPACK,
ir.OTYPE,
ir.OLITERAL:
return n.Sym() != nil
}
return false
}
func colasdefn(left []ir.Node, defn ir.Node) {
for _, n := range left {
if n.Sym() != nil {
n.Sym().SetUniq(true)
}
}
var nnew, nerr int
for i, n := range left {
if ir.IsBlank(n) {
continue
}
if !colasname(n) {
base.ErrorfAt(defn.Pos(), "non-name %v on left side of :=", n)
nerr++
continue
}
if !n.Sym().Uniq() {
base.ErrorfAt(defn.Pos(), "%v repeated on left side of :=", n.Sym())
n.SetDiag(true)
nerr++
continue
}
n.Sym().SetUniq(false)
if n.Sym().Block == types.Block {
continue
}
nnew++
n := NewName(n.Sym())
declare(n, dclcontext)
n.Defn = defn
defn.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
left[i] = n
}
if nnew == 0 && nerr == 0 {
base.ErrorfAt(defn.Pos(), "no new variables on left side of :=")
}
}
// declare the function proper
// and declare the arguments.
// called in extern-declaration context
// returns in auto-declaration context.
func funchdr(fn *ir.Func) {
// change the declaration context from extern to auto
funcStack = append(funcStack, funcStackEnt{ir.CurFunc, dclcontext})
ir.CurFunc = fn
dclcontext = ir.PAUTO
types.Markdcl()
if fn.Nname.Ntype != nil {
funcargs(fn.Nname.Ntype.(*ir.FuncType))
} else {
funcargs2(fn.Type())
}
}
func funcargs(nt *ir.FuncType) {
if nt.Op() != ir.OTFUNC {
base.Fatalf("funcargs %v", nt.Op())
}
// re-start the variable generation number
// we want to use small numbers for the return variables,
// so let them have the chunk starting at 1.
//
// TODO(mdempsky): This is ugly, and only necessary because
// esc.go uses Vargen to figure out result parameters' index
// within the result tuple.
vargen = len(nt.Results)
// declare the receiver and in arguments.
if nt.Recv != nil {
funcarg(nt.Recv, ir.PPARAM)
}
for _, n := range nt.Params {
funcarg(n, ir.PPARAM)
}
oldvargen := vargen
vargen = 0
// declare the out arguments.
gen := len(nt.Params)
for _, n := range nt.Results {
if n.Sym == nil {
// Name so that escape analysis can track it. ~r stands for 'result'.
n.Sym = lookupN("~r", gen)
gen++
}
if n.Sym.IsBlank() {
// Give it a name so we can assign to it during return. ~b stands for 'blank'.
// The name must be different from ~r above because if you have
// func f() (_ int)
// func g() int
// f is allowed to use a plain 'return' with no arguments, while g is not.
// So the two cases must be distinguished.
n.Sym = lookupN("~b", gen)
gen++
}
funcarg(n, ir.PPARAMOUT)
}
vargen = oldvargen
}
func funcarg(n *ir.Field, ctxt ir.Class) {
if n.Sym == nil {
return
}
name := ir.NewNameAt(n.Pos, n.Sym)
n.Decl = name
name.Ntype = n.Ntype
name.SetIsDDD(n.IsDDD)
declare(name, ctxt)
vargen++
n.Decl.Vargen = int32(vargen)
}
// Same as funcargs, except run over an already constructed TFUNC.
// This happens during import, where the hidden_fndcl rule has
// used functype directly to parse the function's type.
func funcargs2(t *types.Type) {
if t.Kind() != types.TFUNC {
base.Fatalf("funcargs2 %v", t)
}
for _, f := range t.Recvs().Fields().Slice() {
funcarg2(f, ir.PPARAM)
}
for _, f := range t.Params().Fields().Slice() {
funcarg2(f, ir.PPARAM)
}
for _, f := range t.Results().Fields().Slice() {
funcarg2(f, ir.PPARAMOUT)
}
}
func funcarg2(f *types.Field, ctxt ir.Class) {
if f.Sym == nil {
return
}
n := ir.NewNameAt(f.Pos, f.Sym)
f.Nname = n
n.SetType(f.Type)
n.SetIsDDD(f.IsDDD())
declare(n, ctxt)
}
var funcStack []funcStackEnt // stack of previous values of Curfn/dclcontext
type funcStackEnt struct {
curfn *ir.Func
dclcontext ir.Class
}
func CheckFuncStack() {
if len(funcStack) != 0 {
base.Fatalf("funcStack is non-empty: %v", len(funcStack))
}
}
// finish the body.
// called in auto-declaration context.
// returns in extern-declaration context.
func funcbody() {
// change the declaration context from auto to previous context
types.Popdcl()
var e funcStackEnt
funcStack, e = funcStack[:len(funcStack)-1], funcStack[len(funcStack)-1]
ir.CurFunc, dclcontext = e.curfn, e.dclcontext
}
// structs, functions, and methods.
// they don't belong here, but where do they belong?
func checkembeddedtype(t *types.Type) {
if t == nil {
return
}
if t.Sym() == nil && t.IsPtr() {
t = t.Elem()
if t.IsInterface() {
base.Errorf("embedded type cannot be a pointer to interface")
}
}
if t.IsPtr() || t.IsUnsafePtr() {
base.Errorf("embedded type cannot be a pointer")
} else if t.Kind() == types.TFORW && !t.ForwardType().Embedlineno.IsKnown() {
t.ForwardType().Embedlineno = base.Pos
}
}
// checkdupfields emits errors for duplicately named fields or methods in
// a list of struct or interface types.
func checkdupfields(what string, fss ...[]*types.Field) {
seen := make(map[*types.Sym]bool)
for _, fs := range fss {
for _, f := range fs {
if f.Sym == nil || f.Sym.IsBlank() {
continue
}
if seen[f.Sym] {
base.ErrorfAt(f.Pos, "duplicate %s %s", what, f.Sym.Name)
continue
}
seen[f.Sym] = true
}
}
}
// convert a parsed id/type list into
// a type for struct/interface/arglist
func tostruct(l []*ir.Field) *types.Type {
lno := base.Pos
fields := make([]*types.Field, len(l))
for i, n := range l {
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
f := types.NewField(n.Pos, n.Sym, n.Type)
if n.Embedded {
checkembeddedtype(n.Type)
f.Embedded = 1
}
f.Note = n.Note
fields[i] = f
}
checkdupfields("field", fields)
base.Pos = lno
return types.NewStruct(types.LocalPkg, fields)
}
func tointerface(nmethods []*ir.Field) *types.Type {
if len(nmethods) == 0 {
return types.Types[types.TINTER]
}
lno := base.Pos
methods := make([]*types.Field, len(nmethods))
for i, n := range nmethods {
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
methods[i] = types.NewField(n.Pos, n.Sym, n.Type)
}
base.Pos = lno
return types.NewInterface(types.LocalPkg, methods)
}
func fakeRecv() *ir.Field {
return ir.NewField(base.Pos, nil, nil, types.FakeRecvType())
}
func fakeRecvField() *types.Field {
return types.NewField(src.NoXPos, nil, types.FakeRecvType())
}
// turn a parsed function declaration into a type
func functype(nrecv *ir.Field, nparams, nresults []*ir.Field) *types.Type {
funarg := func(n *ir.Field) *types.Field {
lno := base.Pos
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
f := types.NewField(n.Pos, n.Sym, n.Type)
f.SetIsDDD(n.IsDDD)
if n.Decl != nil {
n.Decl.SetType(f.Type)
f.Nname = n.Decl
}
base.Pos = lno
return f
}
funargs := func(nn []*ir.Field) []*types.Field {
res := make([]*types.Field, len(nn))
for i, n := range nn {
res[i] = funarg(n)
}
return res
}
var recv *types.Field
if nrecv != nil {
recv = funarg(nrecv)
}
t := types.NewSignature(types.LocalPkg, recv, funargs(nparams), funargs(nresults))
checkdupfields("argument", t.Recvs().FieldSlice(), t.Params().FieldSlice(), t.Results().FieldSlice())
return t
}
// Add a method, declared as a function.
// - msym is the method symbol
// - t is function type (with receiver)
// Returns a pointer to the existing or added Field; or nil if there's an error.
func addmethod(n *ir.Func, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field {
if msym == nil {
base.Fatalf("no method symbol")
}
// get parent type sym
rf := t.Recv() // ptr to this structure
if rf == nil {
base.Errorf("missing receiver")
return nil
}
mt := types.ReceiverBaseType(rf.Type)
if mt == nil || mt.Sym() == nil {
pa := rf.Type
t := pa
if t != nil && t.IsPtr() {
if t.Sym() != nil {
base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
return nil
}
t = t.Elem()
}
switch {
case t == nil || t.Broke():
// rely on typecheck having complained before
case t.Sym() == nil:
base.Errorf("invalid receiver type %v (%v is not a defined type)", pa, t)
case t.IsPtr():
base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
case t.IsInterface():
base.Errorf("invalid receiver type %v (%v is an interface type)", pa, t)
default:
// Should have picked off all the reasons above,
// but just in case, fall back to generic error.
base.Errorf("invalid receiver type %v (%L / %L)", pa, pa, t)
}
return nil
}
if local && mt.Sym().Pkg != types.LocalPkg {
base.Errorf("cannot define new methods on non-local type %v", mt)
return nil
}
if msym.IsBlank() {
return nil
}
if mt.IsStruct() {
for _, f := range mt.Fields().Slice() {
if f.Sym == msym {
base.Errorf("type %v has both field and method named %v", mt, msym)
f.SetBroke(true)
return nil
}
}
}
for _, f := range mt.Methods().Slice() {
if msym.Name != f.Sym.Name {
continue
}
// types.Identical only checks that incoming and result parameters match,
// so explicitly check that the receiver parameters match too.
if !types.Identical(t, f.Type) || !types.Identical(t.Recv().Type, f.Type.Recv().Type) {
base.Errorf("method redeclared: %v.%v\n\t%v\n\t%v", mt, msym, f.Type, t)
}
return f
}
f := types.NewField(base.Pos, msym, t)
f.Nname = n.Nname
f.SetNointerface(nointerface)
mt.Methods().Append(f)
return f
}
// funcsym returns s·f.
func funcsym(s *types.Sym) *types.Sym {
// funcsymsmu here serves to protect not just mutations of funcsyms (below),
@ -700,21 +143,6 @@ func makefuncsym(s *types.Sym) {
}
}
func dclfunc(sym *types.Sym, tfn ir.Ntype) *ir.Func {
if tfn.Op() != ir.OTFUNC {
base.Fatalf("expected OTFUNC node, got %v", tfn)
}
fn := ir.NewFunc(base.Pos)
fn.Nname = ir.NewFuncNameAt(base.Pos, sym, fn)
fn.Nname.Defn = fn
fn.Nname.Ntype = tfn
ir.MarkFunc(fn.Nname)
funchdr(fn)
fn.Nname.Ntype = typecheckNtype(fn.Nname.Ntype)
return fn
}
type nowritebarrierrecChecker struct {
// extraCalls contains extra function calls that may not be
// visible during later analysis. It maps from the ODCLFUNC of
@ -742,7 +170,7 @@ func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
// important to handle it for this check, so we model it
// directly. This has to happen before transformclosure since
// it's a lot harder to work out the argument after.
for _, n := range Target.Decls {
for _, n := range typecheck.Target.Decls {
if n.Op() != ir.ODCLFUNC {
continue
}
@ -819,7 +247,7 @@ func (c *nowritebarrierrecChecker) check() {
// q is the queue of ODCLFUNC Nodes to visit in BFS order.
var q ir.NameQueue
for _, n := range Target.Decls {
for _, n := range typecheck.Target.Decls {
if n.Op() != ir.ODCLFUNC {
continue
}

View file

@ -8,6 +8,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
@ -61,13 +62,13 @@ func varEmbed(p *noder, names []*ir.Name, typ ir.Ntype, exprs []ir.Node, embeds
p.errorAt(pos, "go:embed cannot apply to var without type")
return exprs
}
if dclcontext != ir.PEXTERN {
if typecheck.DeclContext != ir.PEXTERN {
p.errorAt(pos, "go:embed cannot apply to var inside func")
return exprs
}
v := names[0]
Target.Embeds = append(Target.Embeds, v)
typecheck.Target.Embeds = append(typecheck.Target.Embeds, v)
v.Embed = new([]ir.Embed)
for _, e := range embeds {
*v.Embed = append(*v.Embed, ir.Embed{Pos: p.makeXPos(e.Pos), Patterns: e.Patterns})
@ -184,7 +185,7 @@ func embedFileLess(x, y string) bool {
}
func dumpembeds() {
for _, v := range Target.Embeds {
for _, v := range typecheck.Target.Embeds {
initEmbed(v)
}
}

View file

@ -8,6 +8,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
@ -870,7 +871,7 @@ func (e *Escape) call(ks []EscHole, call, where ir.Node) {
case ir.OCALLFUNC, ir.OCALLMETH, ir.OCALLINTER:
call := call.(*ir.CallExpr)
fixVariadicCall(call)
typecheck.FixVariadicCall(call)
// Pick out the function callee, if statically known.
var fn *ir.Name
@ -1877,10 +1878,10 @@ func heapAllocReason(n ir.Node) string {
return "too large for stack"
}
if n.Op() == ir.OCLOSURE && closureType(n.(*ir.ClosureExpr)).Size() >= ir.MaxImplicitStackVarSize {
if n.Op() == ir.OCLOSURE && typecheck.ClosureType(n.(*ir.ClosureExpr)).Size() >= ir.MaxImplicitStackVarSize {
return "too large for stack"
}
if n.Op() == ir.OCALLPART && partialCallType(n.(*ir.CallPartExpr)).Size() >= ir.MaxImplicitStackVarSize {
if n.Op() == ir.OCALLPART && typecheck.PartialCallType(n.(*ir.CallPartExpr)).Size() >= ir.MaxImplicitStackVarSize {
return "too large for stack"
}
@ -1992,8 +1993,8 @@ func moveToHeap(n *ir.Name) {
// Allocate a local stack variable to hold the pointer to the heap copy.
// temp will add it to the function declaration list automatically.
heapaddr := temp(types.NewPtr(n.Type()))
heapaddr.SetSym(lookup("&" + n.Sym().Name))
heapaddr := typecheck.Temp(types.NewPtr(n.Type()))
heapaddr.SetSym(typecheck.Lookup("&" + n.Sym().Name))
heapaddr.SetPos(n.Pos())
// Unset AutoTemp to persist the &foo variable name through SSA to
@ -2013,7 +2014,7 @@ func moveToHeap(n *ir.Name) {
// Preserve a copy so we can still write code referring to the original,
// and substitute that copy into the function declaration list
// so that analyses of the local (on-stack) variables use it.
stackcopy := NewName(n.Sym())
stackcopy := typecheck.NewName(n.Sym())
stackcopy.SetType(n.Type())
stackcopy.SetFrameOffset(n.FrameOffset())
stackcopy.Class_ = n.Class_

View file

@ -7,9 +7,9 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/src"
"fmt"
"go/constant"
)
@ -21,54 +21,16 @@ func exportf(bout *bio.Writer, format string, args ...interface{}) {
}
}
// exportsym marks n for export (or reexport).
func exportsym(n *ir.Name) {
if n.Sym().OnExportList() {
return
}
n.Sym().SetOnExportList(true)
if base.Flag.E != 0 {
fmt.Printf("export symbol %v\n", n.Sym())
}
Target.Exports = append(Target.Exports, n)
}
func initname(s string) bool {
return s == "init"
}
func autoexport(n *ir.Name, ctxt ir.Class) {
if n.Sym().Pkg != types.LocalPkg {
return
}
if (ctxt != ir.PEXTERN && ctxt != ir.PFUNC) || dclcontext != ir.PEXTERN {
return
}
if n.Type() != nil && n.Type().IsKind(types.TFUNC) && ir.IsMethod(n) {
return
}
if types.IsExported(n.Sym().Name) || initname(n.Sym().Name) {
exportsym(n)
}
if base.Flag.AsmHdr != "" && !n.Sym().Asm() {
n.Sym().SetAsm(true)
Target.Asms = append(Target.Asms, n)
}
}
func dumpexport(bout *bio.Writer) {
p := &exporter{marked: make(map[*types.Type]bool)}
for _, n := range Target.Exports {
for _, n := range typecheck.Target.Exports {
p.markObject(n)
}
// The linker also looks for the $$ marker - use char after $$ to distinguish format.
exportf(bout, "\n$$B\n") // indicate binary export format
off := bout.Offset()
iexport(bout.Writer)
typecheck.WriteExports(bout.Writer)
size := bout.Offset() - off
exportf(bout, "\n$$\n")
@ -77,78 +39,13 @@ func dumpexport(bout *bio.Writer) {
}
}
func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class) *ir.Name {
if n := s.PkgDef(); n != nil {
base.Fatalf("importsym of symbol that already exists: %v", n)
}
n := ir.NewDeclNameAt(pos, op, s)
n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
s.SetPkgDef(n)
s.Importdef = ipkg
return n
}
// importtype returns the named type declared by symbol s.
// If no such type has been declared yet, a forward declaration is returned.
// ipkg is the package being imported
func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *ir.Name {
n := importsym(ipkg, pos, s, ir.OTYPE, ir.PEXTERN)
n.SetType(types.NewNamed(n))
return n
}
// importobj declares symbol s as an imported object representable by op.
// ipkg is the package being imported
func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Name {
n := importsym(ipkg, pos, s, op, ctxt)
n.SetType(t)
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
}
return n
}
// importconst declares symbol s as an imported constant with type t and value val.
// ipkg is the package being imported
func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val constant.Value) *ir.Name {
n := importobj(ipkg, pos, s, ir.OLITERAL, ir.PEXTERN, t)
n.SetVal(val)
return n
}
// importfunc declares symbol s as an imported function with type t.
// ipkg is the package being imported
func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
n := importobj(ipkg, pos, s, ir.ONAME, ir.PFUNC, t)
fn := ir.NewFunc(pos)
fn.SetType(t)
n.SetFunc(fn)
fn.Nname = n
return n
}
// importvar declares symbol s as an imported variable with type t.
// ipkg is the package being imported
func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
return importobj(ipkg, pos, s, ir.ONAME, ir.PEXTERN, t)
}
// importalias declares symbol s as an imported type alias with type t.
// ipkg is the package being imported
func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
return importobj(ipkg, pos, s, ir.OTYPE, ir.PEXTERN, t)
}
func dumpasmhdr() {
b, err := bio.Create(base.Flag.AsmHdr)
if err != nil {
base.Fatalf("%v", err)
}
fmt.Fprintf(b, "// generated by compile -asmhdr from package %s\n\n", types.LocalPkg.Name)
for _, n := range Target.Asms {
for _, n := range typecheck.Target.Asms {
if n.Sym().IsBlank() {
continue
}
@ -176,3 +73,83 @@ func dumpasmhdr() {
b.Close()
}
type exporter struct {
marked map[*types.Type]bool // types already seen by markType
}
// markObject visits a reachable object.
func (p *exporter) markObject(n ir.Node) {
if n.Op() == ir.ONAME {
n := n.(*ir.Name)
if n.Class_ == ir.PFUNC {
inlFlood(n, typecheck.Export)
}
}
p.markType(n.Type())
}
// markType recursively visits types reachable from t to identify
// functions whose inline bodies may be needed.
func (p *exporter) markType(t *types.Type) {
if p.marked[t] {
return
}
p.marked[t] = true
// If this is a named type, mark all of its associated
// methods. Skip interface types because t.Methods contains
// only their unexpanded method set (i.e., exclusive of
// interface embeddings), and the switch statement below
// handles their full method set.
if t.Sym() != nil && t.Kind() != types.TINTER {
for _, m := range t.Methods().Slice() {
if types.IsExported(m.Sym.Name) {
p.markObject(ir.AsNode(m.Nname))
}
}
}
// Recursively mark any types that can be produced given a
// value of type t: dereferencing a pointer; indexing or
// iterating over an array, slice, or map; receiving from a
// channel; accessing a struct field or interface method; or
// calling a function.
//
// Notably, we don't mark function parameter types, because
// the user already needs some way to construct values of
// those types.
switch t.Kind() {
case types.TPTR, types.TARRAY, types.TSLICE:
p.markType(t.Elem())
case types.TCHAN:
if t.ChanDir().CanRecv() {
p.markType(t.Elem())
}
case types.TMAP:
p.markType(t.Key())
p.markType(t.Elem())
case types.TSTRUCT:
for _, f := range t.FieldSlice() {
if types.IsExported(f.Sym.Name) || f.Embedded != 0 {
p.markType(f.Type)
}
}
case types.TFUNC:
for _, f := range t.Results().FieldSlice() {
p.markType(f.Type)
}
case types.TINTER:
for _, f := range t.FieldSlice() {
if types.IsExported(f.Sym.Name) {
p.markType(f.Type)
}
}
}
}

View file

@ -1,76 +0,0 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
"strconv"
)
// sysfunc looks up Go function name in package runtime. This function
// must follow the internal calling convention.
func sysfunc(name string) *obj.LSym {
s := ir.Pkgs.Runtime.Lookup(name)
s.SetFunc(true)
return s.Linksym()
}
// sysvar looks up a variable (or assembly function) name in package
// runtime. If this is a function, it may have a special calling
// convention.
func sysvar(name string) *obj.LSym {
return ir.Pkgs.Runtime.Lookup(name).Linksym()
}
// autotmpname returns the name for an autotmp variable numbered n.
func autotmpname(n int) string {
// Give each tmp a different name so that they can be registerized.
// Add a preceding . to avoid clashing with legal names.
const prefix = ".autotmp_"
// Start with a buffer big enough to hold a large n.
b := []byte(prefix + " ")[:len(prefix)]
b = strconv.AppendInt(b, int64(n), 10)
return types.InternString(b)
}
// make a new Node off the books
func tempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
if curfn == nil {
base.Fatalf("no curfn for tempAt")
}
if curfn.Op() == ir.OCLOSURE {
ir.Dump("tempAt", curfn)
base.Fatalf("adding tempAt to wrong closure function")
}
if t == nil {
base.Fatalf("tempAt called with nil type")
}
s := &types.Sym{
Name: autotmpname(len(curfn.Dcl)),
Pkg: types.LocalPkg,
}
n := ir.NewNameAt(pos, s)
s.Def = n
n.SetType(t)
n.Class_ = ir.PAUTO
n.SetEsc(ir.EscNever)
n.Curfn = curfn
n.SetUsed(true)
n.SetAutoTemp(true)
curfn.Dcl = append(curfn.Dcl, n)
types.CalcSize(t)
return n
}
func temp(t *types.Type) *ir.Name {
return tempAt(base.Pos, ir.CurFunc, t)
}

View file

@ -5,7 +5,6 @@
package gc
import (
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/types"
"cmd/internal/obj"
@ -14,37 +13,13 @@ import (
var pragcgobuf [][]string
var decldepth int32
var inimport bool // set during import
var zerosize int64
var (
okforeq [types.NTYPE]bool
okforadd [types.NTYPE]bool
okforand [types.NTYPE]bool
okfornone [types.NTYPE]bool
okforbool [types.NTYPE]bool
okforcap [types.NTYPE]bool
okforlen [types.NTYPE]bool
okforarith [types.NTYPE]bool
)
var (
okfor [ir.OEND][]bool
iscmp [ir.OEND]bool
)
var (
funcsymsmu sync.Mutex // protects funcsyms and associated package lookups (see func funcsym)
funcsyms []*types.Sym
)
var dclcontext ir.Class // PEXTERN/PAUTO
var typecheckok bool
// interface to back end
type Arch struct {

View file

@ -34,6 +34,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/objabi"
@ -196,11 +197,11 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
// Q: is this needed?
savepos := base.Pos
savedclcontext := dclcontext
savedclcontext := typecheck.DeclContext
savedcurfn := ir.CurFunc
base.Pos = base.AutogeneratedPos
dclcontext = ir.PEXTERN
typecheck.DeclContext = ir.PEXTERN
// At the moment we don't support wrapping a method, we'd need machinery
// below to handle the receiver. Panic if we see this scenario.
@ -213,11 +214,11 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
var noReceiver *ir.Field
tfn := ir.NewFuncType(base.Pos,
noReceiver,
structargs(ft.Params(), true),
structargs(ft.Results(), false))
typecheck.NewFuncParams(ft.Params(), true),
typecheck.NewFuncParams(ft.Results(), false))
// Reuse f's types.Sym to create a new ODCLFUNC/function.
fn := dclfunc(f.Nname.Sym(), tfn)
fn := typecheck.DeclFunc(f.Nname.Sym(), tfn)
fn.SetDupok(true)
fn.SetWrapper(true) // ignore frame for panic+recover matching
@ -281,22 +282,22 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
}
fn.Body.Append(tail)
funcbody()
typecheck.FinishFuncBody()
if base.Debug.DclStack != 0 {
types.CheckDclstack()
}
typecheckFunc(fn)
typecheck.Func(fn)
ir.CurFunc = fn
typecheckslice(fn.Body, ctxStmt)
typecheck.Stmts(fn.Body)
escapeFuncs([]*ir.Func{fn}, false)
Target.Decls = append(Target.Decls, fn)
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
// Restore previous context.
base.Pos = savepos
dclcontext = savedclcontext
typecheck.DeclContext = savedclcontext
ir.CurFunc = savedcurfn
}

View file

@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
)
@ -17,12 +18,8 @@ import (
// the name, normally "pkg.init", is altered to "pkg.init.0".
var renameinitgen int
// Function collecting autotmps generated during typechecking,
// to be included in the package-level init function.
var initTodo = ir.NewFunc(base.Pos)
func renameinit() *types.Sym {
s := lookupN("init.", renameinitgen)
s := typecheck.LookupNum("init.", renameinitgen)
renameinitgen++
return s
}
@ -34,14 +31,14 @@ func renameinit() *types.Sym {
// 2) Initialize all the variables that have initializers.
// 3) Run any init functions.
func fninit() *ir.Name {
nf := initOrder(Target.Decls)
nf := initOrder(typecheck.Target.Decls)
var deps []*obj.LSym // initTask records for packages the current package depends on
var fns []*obj.LSym // functions to call for package initialization
// Find imported packages with init tasks.
for _, pkg := range Target.Imports {
n := resolve(ir.NewIdent(base.Pos, pkg.Lookup(".inittask")))
for _, pkg := range typecheck.Target.Imports {
n := typecheck.Resolve(ir.NewIdent(base.Pos, pkg.Lookup(".inittask")))
if n.Op() == ir.ONONAME {
continue
}
@ -54,34 +51,34 @@ func fninit() *ir.Name {
// Make a function that contains all the initialization statements.
if len(nf) > 0 {
base.Pos = nf[0].Pos() // prolog/epilog gets line number of first init stmt
initializers := lookup("init")
fn := dclfunc(initializers, ir.NewFuncType(base.Pos, nil, nil, nil))
for _, dcl := range initTodo.Dcl {
initializers := typecheck.Lookup("init")
fn := typecheck.DeclFunc(initializers, ir.NewFuncType(base.Pos, nil, nil, nil))
for _, dcl := range typecheck.InitTodoFunc.Dcl {
dcl.Curfn = fn
}
fn.Dcl = append(fn.Dcl, initTodo.Dcl...)
initTodo.Dcl = nil
fn.Dcl = append(fn.Dcl, typecheck.InitTodoFunc.Dcl...)
typecheck.InitTodoFunc.Dcl = nil
fn.Body.Set(nf)
funcbody()
typecheck.FinishFuncBody()
typecheckFunc(fn)
typecheck.Func(fn)
ir.CurFunc = fn
typecheckslice(nf, ctxStmt)
typecheck.Stmts(nf)
ir.CurFunc = nil
Target.Decls = append(Target.Decls, fn)
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
fns = append(fns, initializers.Linksym())
}
if initTodo.Dcl != nil {
if typecheck.InitTodoFunc.Dcl != nil {
// We only generate temps using initTodo if there
// are package-scope initialization statements, so
// something's weird if we get here.
base.Fatalf("initTodo still has declarations")
}
initTodo = nil
typecheck.InitTodoFunc = nil
// Record user init functions.
for _, fn := range Target.Inits {
for _, fn := range typecheck.Target.Inits {
// Skip init functions with empty bodies.
if len(fn.Body) == 1 {
if stmt := fn.Body[0]; stmt.Op() == ir.OBLOCK && len(stmt.(*ir.BlockStmt).List) == 0 {
@ -96,8 +93,8 @@ func fninit() *ir.Name {
}
// Make an .inittask structure.
sym := lookup(".inittask")
task := NewName(sym)
sym := typecheck.Lookup(".inittask")
task := typecheck.NewName(sym)
task.SetType(types.Types[types.TUINT8]) // fake type
task.Class_ = ir.PEXTERN
sym.Def = task

View file

@ -30,6 +30,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
@ -54,7 +55,7 @@ const (
func InlinePackage() {
// Find functions that can be inlined and clone them before walk expands them.
ir.VisitFuncsBottomUp(Target.Decls, func(list []*ir.Func, recursive bool) {
ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
numfns := numNonClosures(list)
for _, n := range list {
if !recursive || numfns > 1 {
@ -72,63 +73,6 @@ func InlinePackage() {
})
}
// Get the function's package. For ordinary functions it's on the ->sym, but for imported methods
// the ->sym can be re-used in the local package, so peel it off the receiver's type.
func fnpkg(fn *ir.Name) *types.Pkg {
if ir.IsMethod(fn) {
// method
rcvr := fn.Type().Recv().Type
if rcvr.IsPtr() {
rcvr = rcvr.Elem()
}
if rcvr.Sym() == nil {
base.Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym(), fn, rcvr)
}
return rcvr.Sym().Pkg
}
// non-method
return fn.Sym().Pkg
}
// Lazy typechecking of imported bodies. For local functions, caninl will set ->typecheck
// because they're a copy of an already checked body.
func typecheckinl(fn *ir.Func) {
lno := ir.SetPos(fn.Nname)
expandInline(fn)
// typecheckinl is only for imported functions;
// their bodies may refer to unsafe as long as the package
// was marked safe during import (which was checked then).
// the ->inl of a local function has been typechecked before caninl copied it.
pkg := fnpkg(fn.Nname)
if pkg == types.LocalPkg || pkg == nil {
return // typecheckinl on local function
}
if base.Flag.LowerM > 2 || base.Debug.Export != 0 {
fmt.Printf("typecheck import [%v] %L { %v }\n", fn.Sym(), fn, ir.Nodes(fn.Inl.Body))
}
savefn := ir.CurFunc
ir.CurFunc = fn
typecheckslice(fn.Inl.Body, ctxStmt)
ir.CurFunc = savefn
// During expandInline (which imports fn.Func.Inl.Body),
// declarations are added to fn.Func.Dcl by funcHdr(). Move them
// to fn.Func.Inl.Dcl for consistency with how local functions
// behave. (Append because typecheckinl may be called multiple
// times.)
fn.Inl.Dcl = append(fn.Inl.Dcl, fn.Dcl...)
fn.Dcl = nil
base.Pos = lno
}
// Caninl determines whether fn is inlineable.
// If so, caninl saves fn->nbody in fn->inl and substitutes it with a copy.
// fn and ->nbody will already have been typechecked.
@ -270,7 +214,7 @@ func inlFlood(n *ir.Name, exportsym func(*ir.Name)) {
}
fn.SetExportInline(true)
typecheckinl(fn)
typecheck.ImportedBody(fn)
// Recursively identify all referenced functions for
// reexport. We want to include even non-called functions,
@ -601,7 +545,7 @@ func inlnode(n ir.Node, maxCost int32, inlMap map[*ir.Func]bool, edit func(ir.No
as.Rhs.Set(inlconv2list(as.Rhs[0].(*ir.InlinedCallExpr)))
as.SetOp(ir.OAS2)
as.SetTypecheck(0)
n = typecheck(as, ctxStmt)
n = typecheck.Stmt(as)
}
}
@ -768,7 +712,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
inlMap[fn] = false
}()
if base.Debug.TypecheckInl == 0 {
typecheckinl(fn)
typecheck.ImportedBody(fn)
}
// We have a function node, and it has an inlineable body.
@ -824,21 +768,21 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
}
if v.Byval() {
iv := typecheck(inlvar(v), ctxExpr)
iv := typecheck.Expr(inlvar(v))
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, iv))
ninit.Append(typecheck(ir.NewAssignStmt(base.Pos, iv, o), ctxStmt))
ninit.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, iv, o)))
inlvars[v] = iv
} else {
addr := NewName(lookup("&" + v.Sym().Name))
addr := typecheck.NewName(typecheck.Lookup("&" + v.Sym().Name))
addr.SetType(types.NewPtr(v.Type()))
ia := typecheck(inlvar(addr), ctxExpr)
ia := typecheck.Expr(inlvar(addr))
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, ia))
ninit.Append(typecheck(ir.NewAssignStmt(base.Pos, ia, nodAddr(o)), ctxStmt))
ninit.Append(typecheck.Stmt(ir.NewAssignStmt(base.Pos, ia, typecheck.NodAddr(o))))
inlvars[addr] = ia
// When capturing by reference, all occurrence of the captured var
// must be substituted with dereference of the temporary address
inlvars[v] = typecheck(ir.NewStarExpr(base.Pos, ia), ctxExpr)
inlvars[v] = typecheck.Expr(ir.NewStarExpr(base.Pos, ia))
}
}
}
@ -857,7 +801,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
// nothing should have moved to the heap yet.
base.Fatalf("impossible: %v", ln)
}
inlf := typecheck(inlvar(ln), ctxExpr)
inlf := typecheck.Expr(inlvar(ln))
inlvars[ln] = inlf
if base.Flag.GenDwarfInl > 0 {
if ln.Class_ == ir.PPARAM {
@ -889,7 +833,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
if n := ir.AsNode(t.Nname); n != nil && !ir.IsBlank(n) && !strings.HasPrefix(n.Sym().Name, "~r") {
n := n.(*ir.Name)
m = inlvar(n)
m = typecheck(m, ctxExpr)
m = typecheck.Expr(m)
inlvars[n] = m
delayretvars = false // found a named result parameter
} else {
@ -951,7 +895,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
vas = ir.NewAssignStmt(base.Pos, nil, nil)
vas.X = inlParam(param, vas, inlvars)
if len(varargs) == 0 {
vas.Y = nodnil()
vas.Y = typecheck.NodNil()
vas.Y.SetType(param.Type)
} else {
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(param.Type).(ir.Ntype), nil)
@ -961,11 +905,11 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
}
if len(as.Rhs) != 0 {
ninit.Append(typecheck(as, ctxStmt))
ninit.Append(typecheck.Stmt(as))
}
if vas != nil {
ninit.Append(typecheck(vas, ctxStmt))
ninit.Append(typecheck.Stmt(vas))
}
if !delayretvars {
@ -973,11 +917,11 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
for _, n := range retvars {
ninit.Append(ir.NewDecl(base.Pos, ir.ODCL, n))
ras := ir.NewAssignStmt(base.Pos, n, nil)
ninit.Append(typecheck(ras, ctxStmt))
ninit.Append(typecheck.Stmt(ras))
}
}
retlabel := autolabel(".i")
retlabel := typecheck.AutoLabel(".i")
inlgen++
@ -1021,7 +965,7 @@ func mkinlcall(n *ir.CallExpr, fn *ir.Func, maxCost int32, inlMap map[*ir.Func]b
lab := ir.NewLabelStmt(base.Pos, retlabel)
body = append(body, lab)
typecheckslice(body, ctxStmt)
typecheck.Stmts(body)
if base.Flag.GenDwarfInl > 0 {
for _, v := range inlfvars {
@ -1061,7 +1005,7 @@ func inlvar(var_ ir.Node) ir.Node {
fmt.Printf("inlvar %+v\n", var_)
}
n := NewName(var_.Sym())
n := typecheck.NewName(var_.Sym())
n.SetType(var_.Type())
n.Class_ = ir.PAUTO
n.SetUsed(true)
@ -1074,7 +1018,7 @@ func inlvar(var_ ir.Node) ir.Node {
// Synthesize a variable to store the inlined function's results in.
func retvar(t *types.Field, i int) ir.Node {
n := NewName(lookupN("~R", i))
n := typecheck.NewName(typecheck.LookupNum("~R", i))
n.SetType(t.Type)
n.Class_ = ir.PAUTO
n.SetUsed(true)
@ -1086,7 +1030,7 @@ func retvar(t *types.Field, i int) ir.Node {
// Synthesize a variable to store the inlined function's arguments
// when they come from a multiple return call.
func argvar(t *types.Type, i int) ir.Node {
n := NewName(lookupN("~arg", i))
n := typecheck.NewName(typecheck.LookupNum("~arg", i))
n.SetType(t.Elem())
n.Class_ = ir.PAUTO
n.SetUsed(true)
@ -1198,10 +1142,10 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
}
}
init = append(init, typecheck(as, ctxStmt))
init = append(init, typecheck.Stmt(as))
}
init = append(init, ir.NewBranchStmt(base.Pos, ir.OGOTO, subst.retlabel))
typecheckslice(init, ctxStmt)
typecheck.Stmts(init)
return ir.NewBlockStmt(base.Pos, init)
case ir.OGOTO:
@ -1210,7 +1154,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
m.Label = lookup(p)
m.Label = typecheck.Lookup(p)
return m
case ir.OLABEL:
@ -1219,7 +1163,7 @@ func (subst *inlsubst) node(n ir.Node) ir.Node {
m.SetPos(subst.updatedPos(m.Pos()))
m.PtrInit().Set(nil)
p := fmt.Sprintf("%s·%d", n.Label.Name, inlgen)
m.Label = lookup(p)
m.Label = typecheck.Lookup(p)
return m
}
@ -1284,7 +1228,7 @@ func devirtualizeCall(call *ir.CallExpr) {
dt := ir.NewTypeAssertExpr(sel.Pos(), sel.X, nil)
dt.SetType(typ)
x := typecheck(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sel), ctxExpr|ctxCallee)
x := typecheck.Callee(ir.NewSelectorExpr(sel.Pos(), ir.OXDOT, dt, sel.Sel))
switch x.Op() {
case ir.ODOTMETH:
x := x.(*ir.SelectorExpr)

View file

@ -13,6 +13,7 @@ import (
"cmd/compile/internal/ir"
"cmd/compile/internal/logopt"
"cmd/compile/internal/ssa"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/dwarf"
@ -49,9 +50,6 @@ func hidePanic() {
}
}
// Target is the package being compiled.
var Target *ir.Package
// Main parses flags and Go source files specified in the command-line
// arguments, type-checks the parsed Go package, compiles functions to machine
// code, and finally writes the compiled package definition to disk.
@ -197,18 +195,18 @@ func Main(archInit func(*Arch)) {
return typenamesym(t).Linksym()
}
Target = new(ir.Package)
typecheck.Target = new(ir.Package)
NeedFuncSym = makefuncsym
NeedITab = func(t, iface *types.Type) { itabname(t, iface) }
NeedRuntimeType = addsignat // TODO(rsc): typenamesym for lock?
typecheck.NeedFuncSym = makefuncsym
typecheck.NeedITab = func(t, iface *types.Type) { itabname(t, iface) }
typecheck.NeedRuntimeType = addsignat // TODO(rsc): typenamesym for lock?
base.AutogeneratedPos = makePos(src.NewFileBase("<autogenerated>", "<autogenerated>"), 1, 0)
types.TypeLinkSym = func(t *types.Type) *obj.LSym {
return typenamesym(t).Linksym()
}
TypecheckInit()
typecheck.Init()
// Parse input.
base.Timer.Start("fe", "parse")
@ -219,7 +217,7 @@ func Main(archInit func(*Arch)) {
recordPackageName()
// Typecheck.
TypecheckPackage()
typecheck.Package()
// With all user code typechecked, it's now safe to verify unused dot imports.
checkDotImports()
@ -227,7 +225,7 @@ func Main(archInit func(*Arch)) {
// Build init task.
if initTask := fninit(); initTask != nil {
exportsym(initTask)
typecheck.Export(initTask)
}
// Inlining
@ -237,7 +235,7 @@ func Main(archInit func(*Arch)) {
}
// Devirtualize.
for _, n := range Target.Decls {
for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
devirtualize(n.(*ir.Func))
}
@ -253,7 +251,7 @@ func Main(archInit func(*Arch)) {
// Large values are also moved off stack in escape analysis;
// because large values may contain pointers, it must happen early.
base.Timer.Start("fe", "escapes")
escapes(Target.Decls)
escapes(typecheck.Target.Decls)
// Collect information for go:nowritebarrierrec
// checking. This must happen before transformclosure.
@ -267,7 +265,7 @@ func Main(archInit func(*Arch)) {
// This needs to happen before walk, because closures must be transformed
// before walk reaches a call of a closure.
base.Timer.Start("fe", "xclosures")
for _, n := range Target.Decls {
for _, n := range typecheck.Target.Decls {
if n.Op() == ir.ODCLFUNC {
n := n.(*ir.Func)
if n.OClosure != nil {
@ -292,8 +290,8 @@ func Main(archInit func(*Arch)) {
// Don't use range--walk can add functions to Target.Decls.
base.Timer.Start("be", "compilefuncs")
fcount := int64(0)
for i := 0; i < len(Target.Decls); i++ {
n := Target.Decls[i]
for i := 0; i < len(typecheck.Target.Decls); i++ {
n := typecheck.Target.Decls[i]
if n.Op() == ir.ODCLFUNC {
funccompile(n.(*ir.Func))
fcount++
@ -327,7 +325,7 @@ func Main(archInit func(*Arch)) {
}
CheckLargeStacks()
CheckFuncStack()
typecheck.CheckFuncStack()
if len(compilequeue) != 0 {
base.Fatalf("%d uncompiled functions", len(compilequeue))
@ -363,7 +361,7 @@ func CheckLargeStacks() {
func cgoSymABIs() {
// The linker expects an ABI0 wrapper for all cgo-exported
// functions.
for _, prag := range Target.CgoPragmas {
for _, prag := range typecheck.Target.CgoPragmas {
switch prag[0] {
case "cgo_export_static", "cgo_export_dynamic":
if symabiRefs == nil {
@ -581,33 +579,6 @@ func findpkg(name string) (file string, ok bool) {
return "", false
}
// loadsys loads the definitions for the low-level runtime functions,
// so that the compiler can generate calls to them,
// but does not make them visible to user code.
func loadsys() {
types.Block = 1
inimport = true
typecheckok = true
typs := runtimeTypes()
for _, d := range &runtimeDecls {
sym := ir.Pkgs.Runtime.Lookup(d.name)
typ := typs[d.typ]
switch d.tag {
case funcTag:
importfunc(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
case varTag:
importvar(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
default:
base.Fatalf("unhandled declaration tag %v", d.tag)
}
}
typecheckok = false
inimport = false
}
// myheight tracks the local package's height based on packages
// imported so far.
var myheight int
@ -776,7 +747,7 @@ func importfile(f constant.Value) *types.Pkg {
base.Errorf("import %s: unexpected package format byte: %v", file, c)
base.ErrorExit()
}
fingerprint = iimport(importpkg, imp)
fingerprint = typecheck.ReadImports(importpkg, imp)
default:
base.Errorf("no import in %q", path_)

View file

@ -19,6 +19,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/syntax"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/objabi"
"cmd/internal/src"
@ -160,7 +161,7 @@ type noder struct {
func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
oldScope := p.scope
p.scope = 0
funchdr(fn)
typecheck.StartFuncBody(fn)
if block != nil {
body := p.stmts(block.List)
@ -173,7 +174,7 @@ func (p *noder) funcBody(fn *ir.Func, block *syntax.BlockStmt) {
fn.Endlineno = base.Pos
}
funcbody()
typecheck.FinishFuncBody()
p.scope = oldScope
}
@ -261,7 +262,7 @@ func (p *noder) node() {
p.checkUnused(pragma)
}
Target.Decls = append(Target.Decls, p.decls(p.file.DeclList)...)
typecheck.Target.Decls = append(typecheck.Target.Decls, p.decls(p.file.DeclList)...)
base.Pos = src.NoXPos
clearImports()
@ -273,7 +274,7 @@ func (p *noder) processPragmas() {
p.errorAt(l.pos, "//go:linkname only allowed in Go files that import \"unsafe\"")
continue
}
n := ir.AsNode(lookup(l.local).Def)
n := ir.AsNode(typecheck.Lookup(l.local).Def)
if n == nil || n.Op() != ir.ONAME {
// TODO(mdempsky): Change to p.errorAt before Go 1.17 release.
// base.WarnfAt(p.makeXPos(l.pos), "//go:linkname must refer to declared function or variable (will be an error in Go 1.17)")
@ -285,7 +286,7 @@ func (p *noder) processPragmas() {
}
n.Sym().Linkname = l.remote
}
Target.CgoPragmas = append(Target.CgoPragmas, p.pragcgobuf...)
typecheck.Target.CgoPragmas = append(typecheck.Target.CgoPragmas, p.pragcgobuf...)
}
func (p *noder) decls(decls []syntax.Decl) (l []ir.Node) {
@ -342,7 +343,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
}
if !ipkg.Direct {
Target.Imports = append(Target.Imports, ipkg)
typecheck.Target.Imports = append(typecheck.Target.Imports, ipkg)
}
ipkg.Direct = true
@ -350,7 +351,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
if imp.LocalPkgName != nil {
my = p.name(imp.LocalPkgName)
} else {
my = lookup(ipkg.Name)
my = typecheck.Lookup(ipkg.Name)
}
pack := ir.NewPkgName(p.pos(imp), my, ipkg)
@ -366,7 +367,7 @@ func (p *noder) importDecl(imp *syntax.ImportDecl) {
return
}
if my.Def != nil {
redeclare(pack.Pos(), my, "as imported package name")
typecheck.Redeclared(pack.Pos(), my, "as imported package name")
}
my.Def = pack
my.Lastlineno = pack.Pos()
@ -401,7 +402,7 @@ func (p *noder) varDecl(decl *syntax.VarDecl) []ir.Node {
}
p.setlineno(decl)
return variter(names, typ, exprs)
return typecheck.DeclVars(names, typ, exprs)
}
// constState tracks state between constant specifiers within a
@ -449,7 +450,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []ir.Node {
if decl.Values == nil {
v = ir.DeepCopy(n.Pos(), v)
}
declare(n, dclcontext)
typecheck.Declare(n, typecheck.DeclContext)
n.Ntype = typ
n.Defn = v
@ -469,7 +470,7 @@ func (p *noder) constDecl(decl *syntax.ConstDecl, cs *constState) []ir.Node {
func (p *noder) typeDecl(decl *syntax.TypeDecl) ir.Node {
n := p.declName(ir.OTYPE, decl.Name)
declare(n, dclcontext)
typecheck.Declare(n, typecheck.DeclContext)
// decl.Type may be nil but in that case we got a syntax error during parsing
typ := p.typeExprOrNil(decl.Type)
@ -514,7 +515,7 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) ir.Node {
if len(t.Params) > 0 || len(t.Results) > 0 {
base.ErrorfAt(f.Pos(), "func init must have no arguments and no return values")
}
Target.Inits = append(Target.Inits, f)
typecheck.Target.Inits = append(typecheck.Target.Inits, f)
}
if types.LocalPkg.Name == "main" && name.Name == "main" {
@ -541,7 +542,7 @@ func (p *noder) funcDecl(fun *syntax.FuncDecl) ir.Node {
}
if fun.Recv == nil {
declare(f.Nname, ir.PFUNC)
typecheck.Declare(f.Nname, ir.PFUNC)
}
p.funcBody(f, fun.Body)
@ -704,7 +705,7 @@ func (p *noder) expr(expr syntax.Expr) ir.Node {
pos, op := p.pos(expr), p.unOp(expr.Op)
switch op {
case ir.OADDR:
return nodAddrAt(pos, x)
return typecheck.NodAddrAt(pos, x)
case ir.ODEREF:
return ir.NewStarExpr(pos, x)
}
@ -950,7 +951,7 @@ func (p *noder) embedded(typ syntax.Expr) *ir.Field {
}
sym := p.packname(typ)
n := ir.NewField(p.pos(typ), lookup(sym.Name), importName(sym).(ir.Ntype), nil)
n := ir.NewField(p.pos(typ), typecheck.Lookup(sym.Name), importName(sym).(ir.Ntype), nil)
n.Embedded = true
if isStar {
@ -1136,8 +1137,8 @@ func (p *noder) assignList(expr syntax.Expr, defn ir.Node, colas bool) []ir.Node
}
newOrErr = true
n := NewName(sym)
declare(n, dclcontext)
n := typecheck.NewName(sym)
typecheck.Declare(n, typecheck.DeclContext)
n.Defn = defn
defn.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
res[i] = n
@ -1245,8 +1246,8 @@ func (p *noder) caseClauses(clauses []*syntax.CaseClause, tswitch *ir.TypeSwitch
n.List.Set(p.exprList(clause.Cases))
}
if tswitch != nil && tswitch.Tag != nil {
nn := NewName(tswitch.Tag.Sym())
declare(nn, dclcontext)
nn := typecheck.NewName(tswitch.Tag.Sym())
typecheck.Declare(nn, typecheck.DeclContext)
n.Vars = []ir.Node{nn}
// keep track of the instances for reporting unused
nn.Defn = tswitch
@ -1466,7 +1467,7 @@ var tokenForLitKind = [...]token.Token{
}
func (p *noder) name(name *syntax.Name) *types.Sym {
return lookup(name.Value)
return typecheck.Lookup(name.Value)
}
func (p *noder) mkname(name *syntax.Name) ir.Node {

View file

@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/obj"
@ -117,14 +118,14 @@ func dumpCompilerObj(bout *bio.Writer) {
}
func dumpdata() {
numExterns := len(Target.Externs)
numDecls := len(Target.Decls)
numExterns := len(typecheck.Target.Externs)
numDecls := len(typecheck.Target.Decls)
dumpglobls(Target.Externs)
dumpglobls(typecheck.Target.Externs)
dumpfuncsyms()
addptabs()
numExports := len(Target.Exports)
addsignats(Target.Externs)
numExports := len(typecheck.Target.Exports)
addsignats(typecheck.Target.Externs)
dumpsignats()
dumptabs()
numPTabs, numITabs := CountTabs()
@ -140,22 +141,22 @@ func dumpdata() {
// In the typical case, we loop 0 or 1 times.
// It was not until issue 24761 that we found any code that required a loop at all.
for {
for i := numDecls; i < len(Target.Decls); i++ {
n := Target.Decls[i]
for i := numDecls; i < len(typecheck.Target.Decls); i++ {
n := typecheck.Target.Decls[i]
if n.Op() == ir.ODCLFUNC {
funccompile(n.(*ir.Func))
}
}
numDecls = len(Target.Decls)
numDecls = len(typecheck.Target.Decls)
compileFunctions()
dumpsignats()
if numDecls == len(Target.Decls) {
if numDecls == len(typecheck.Target.Decls) {
break
}
}
// Dump extra globals.
dumpglobls(Target.Externs[numExterns:])
dumpglobls(typecheck.Target.Externs[numExterns:])
if zerosize > 0 {
zero := ir.Pkgs.Map.Lookup("zero")
@ -164,7 +165,7 @@ func dumpdata() {
addGCLocals()
if numExports != len(Target.Exports) {
if numExports != len(typecheck.Target.Exports) {
base.Fatalf("Target.Exports changed after compile functions loop")
}
newNumPTabs, newNumITabs := CountTabs()
@ -179,11 +180,11 @@ func dumpdata() {
func dumpLinkerObj(bout *bio.Writer) {
printObjHeader(bout)
if len(Target.CgoPragmas) != 0 {
if len(typecheck.Target.CgoPragmas) != 0 {
// write empty export section; must be before cgo section
fmt.Fprintf(bout, "\n$$\n\n$$\n\n")
fmt.Fprintf(bout, "\n$$ // cgo\n")
if err := json.NewEncoder(bout).Encode(Target.CgoPragmas); err != nil {
if err := json.NewEncoder(bout).Encode(typecheck.Target.CgoPragmas); err != nil {
base.Fatalf("serializing pragcgobuf: %v", err)
}
fmt.Fprintf(bout, "\n$$\n\n")
@ -198,7 +199,7 @@ func addptabs() {
if !base.Ctxt.Flag_dynlink || types.LocalPkg.Name != "main" {
return
}
for _, exportn := range Target.Exports {
for _, exportn := range typecheck.Target.Exports {
s := exportn.Sym()
nn := ir.AsNode(s.Def)
if nn == nil {
@ -474,7 +475,7 @@ func slicedata(pos src.XPos, s string) *ir.Name {
slicedataGen++
symname := fmt.Sprintf(".gobytes.%d", slicedataGen)
sym := types.LocalPkg.Lookup(symname)
symnode := NewName(sym)
symnode := typecheck.NewName(sym)
sym.Def = symnode
lsym := sym.Linksym()

View file

@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
@ -63,7 +64,7 @@ func order(fn *ir.Func) {
// append typechecks stmt and appends it to out.
func (o *Order) append(stmt ir.Node) {
o.out = append(o.out, typecheck(stmt, ctxStmt))
o.out = append(o.out, typecheck.Stmt(stmt))
}
// newTemp allocates a new temporary with the given type,
@ -85,7 +86,7 @@ func (o *Order) newTemp(t *types.Type, clear bool) *ir.Name {
}
}
if v == nil {
v = temp(t)
v = typecheck.Temp(t)
}
if clear {
o.append(ir.NewAssignStmt(base.Pos, v, nil))
@ -142,7 +143,7 @@ func (o *Order) cheapExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.UnaryExpr)
a.X = l
return typecheck(a, ctxExpr)
return typecheck.Expr(a)
}
return o.copyExpr(n)
@ -168,7 +169,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.UnaryExpr)
a.X = l
return typecheck(a, ctxExpr)
return typecheck.Expr(a)
case ir.ODOT:
n := n.(*ir.SelectorExpr)
@ -178,7 +179,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.SelectorExpr)
a.X = l
return typecheck(a, ctxExpr)
return typecheck.Expr(a)
case ir.ODOTPTR:
n := n.(*ir.SelectorExpr)
@ -188,7 +189,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.SelectorExpr)
a.X = l
return typecheck(a, ctxExpr)
return typecheck.Expr(a)
case ir.ODEREF:
n := n.(*ir.StarExpr)
@ -198,7 +199,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
}
a := ir.SepCopy(n).(*ir.StarExpr)
a.X = l
return typecheck(a, ctxExpr)
return typecheck.Expr(a)
case ir.OINDEX, ir.OINDEXMAP:
n := n.(*ir.IndexExpr)
@ -215,7 +216,7 @@ func (o *Order) safeExpr(n ir.Node) ir.Node {
a := ir.SepCopy(n).(*ir.IndexExpr)
a.X = l
a.Index = r
return typecheck(a, ctxExpr)
return typecheck.Expr(a)
default:
base.Fatalf("order.safeExpr %v", n.Op())
@ -241,7 +242,7 @@ func isaddrokay(n ir.Node) bool {
func (o *Order) addrTemp(n ir.Node) ir.Node {
if n.Op() == ir.OLITERAL || n.Op() == ir.ONIL {
// TODO: expand this to all static composite literal nodes?
n = defaultlit(n, nil)
n = typecheck.DefaultLit(n, nil)
types.CalcSize(n.Type())
vstat := readonlystaticname(n.Type())
var s InitSchedule
@ -249,7 +250,7 @@ func (o *Order) addrTemp(n ir.Node) ir.Node {
if s.out != nil {
base.Fatalf("staticassign of const generated code: %+v", n)
}
vstat = typecheck(vstat, ctxExpr).(*ir.Name)
vstat = typecheck.Expr(vstat).(*ir.Name)
return vstat
}
if isaddrokay(n) {
@ -336,7 +337,7 @@ func (o *Order) cleanTempNoPop(mark ordermarker) []ir.Node {
var out []ir.Node
for i := len(o.temp) - 1; i >= int(mark); i-- {
n := o.temp[i]
out = append(out, typecheck(ir.NewUnaryExpr(base.Pos, ir.OVARKILL, n), ctxStmt))
out = append(out, typecheck.Stmt(ir.NewUnaryExpr(base.Pos, ir.OVARKILL, n)))
}
return out
}
@ -388,7 +389,7 @@ func orderMakeSliceCopy(s []ir.Node) {
mk.Cap = cp.Y
// Set bounded when m = OMAKESLICE([]T, len(s)); OCOPY(m, s)
mk.SetBounded(mk.Len.Op() == ir.OLEN && ir.SameSafeExpr(mk.Len.(*ir.UnaryExpr).X, cp.Y))
as.Y = typecheck(mk, ctxExpr)
as.Y = typecheck.Expr(mk)
s[1] = nil // remove separate copy call
}
@ -495,7 +496,7 @@ func (o *Order) call(nn ir.Node) {
}
n := nn.(*ir.CallExpr)
fixVariadicCall(n)
typecheck.FixVariadicCall(n)
n.X = o.expr(n.X, nil)
o.exprList(n.Args)
@ -513,7 +514,7 @@ func (o *Order) call(nn ir.Node) {
x := o.copyExpr(arg.X)
arg.X = x
x.Name().SetAddrtaken(true) // ensure SSA keeps the x variable
n.Body.Append(typecheck(ir.NewUnaryExpr(base.Pos, ir.OVARLIVE, x), ctxStmt))
n.Body.Append(typecheck.Stmt(ir.NewUnaryExpr(base.Pos, ir.OVARLIVE, x)))
}
}
}
@ -584,7 +585,7 @@ func (o *Order) mapAssign(n ir.Node) {
t := o.newTemp(m.Type(), false)
n.Lhs[i] = t
a := ir.NewAssignStmt(base.Pos, m, t)
post = append(post, typecheck(a, ctxStmt))
post = append(post, typecheck.Stmt(a))
}
}
@ -653,8 +654,8 @@ func (o *Order) stmt(n ir.Node) {
l2.Assigned = false
}
l2 = o.copyExpr(l2)
r := o.expr(typecheck(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y), ctxExpr), nil)
as := typecheck(ir.NewAssignStmt(n.Pos(), l1, r), ctxStmt)
r := o.expr(typecheck.Expr(ir.NewBinaryExpr(n.Pos(), n.AsOp, l2, n.Y)), nil)
as := typecheck.Stmt(ir.NewAssignStmt(n.Pos(), l1, r))
o.mapAssign(as)
o.cleanTemp(t)
return
@ -858,7 +859,7 @@ func (o *Order) stmt(n ir.Node) {
if r.Type().IsString() && r.Type() != types.Types[types.TSTRING] {
r = ir.NewConvExpr(base.Pos, ir.OCONV, nil, r)
r.SetType(types.Types[types.TSTRING])
r = typecheck(r, ctxExpr)
r = typecheck.Expr(r)
}
n.X = o.copyExpr(r)
@ -949,11 +950,11 @@ func (o *Order) stmt(n ir.Node) {
if len(init) > 0 && init[0].Op() == ir.ODCL && init[0].(*ir.Decl).X == n {
init = init[1:]
}
dcl := typecheck(ir.NewDecl(base.Pos, ir.ODCL, n), ctxStmt)
dcl := typecheck.Stmt(ir.NewDecl(base.Pos, ir.ODCL, n))
ncas.PtrInit().Append(dcl)
}
tmp := o.newTemp(t, t.HasPointers())
as := typecheck(ir.NewAssignStmt(base.Pos, n, conv(tmp, n.Type())), ctxStmt)
as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, n, typecheck.Conv(tmp, n.Type())))
ncas.PtrInit().Append(as)
r.Lhs[i] = tmp
}
@ -1217,7 +1218,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Evaluate left-hand side.
lhs := o.expr(n.X, nil)
o.out = append(o.out, typecheck(ir.NewAssignStmt(base.Pos, r, lhs), ctxStmt))
o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, lhs)))
// Evaluate right-hand side, save generated code.
saveout := o.out
@ -1225,7 +1226,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
t := o.markTemp()
o.edge()
rhs := o.expr(n.Y, nil)
o.out = append(o.out, typecheck(ir.NewAssignStmt(base.Pos, r, rhs), ctxStmt))
o.out = append(o.out, typecheck.Stmt(ir.NewAssignStmt(base.Pos, r, rhs)))
o.cleanTemp(t)
gen := o.out
o.out = saveout
@ -1307,7 +1308,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
case ir.OCLOSURE:
n := n.(*ir.ClosureExpr)
if n.Transient() && len(n.Func.ClosureVars) > 0 {
n.Prealloc = o.newTemp(closureType(n), false)
n.Prealloc = o.newTemp(typecheck.ClosureType(n), false)
}
return n
@ -1315,7 +1316,7 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
n := n.(*ir.CallPartExpr)
n.X = o.expr(n.X, nil)
if n.Transient() {
t := partialCallType(n)
t := typecheck.PartialCallType(n)
n.Prealloc = o.newTemp(t, false)
}
return n
@ -1415,13 +1416,13 @@ func (o *Order) expr1(n, lhs ir.Node) ir.Node {
// Emit the creation of the map (with all its static entries).
m := o.newTemp(n.Type(), false)
as := ir.NewAssignStmt(base.Pos, m, n)
typecheck(as, ctxStmt)
typecheck.Stmt(as)
o.stmt(as)
// Emit eval+insert of dynamic entries, one at a time.
for _, r := range dynamics {
as := ir.NewAssignStmt(base.Pos, ir.NewIndexExpr(base.Pos, m, r.Key), r.Value)
typecheck(as, ctxStmt) // Note: this converts the OINDEX to an OINDEXMAP
typecheck.Stmt(as) // Note: this converts the OINDEX to an OINDEXMAP
o.stmt(as)
}
return m
@ -1455,7 +1456,7 @@ func (o *Order) as2(n *ir.AssignListStmt) {
as := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as.Lhs.Set(left)
as.Rhs.Set(tmplist)
o.stmt(typecheck(as, ctxStmt))
o.stmt(typecheck.Stmt(as))
}
// okAs2 orders OAS2XXX with ok.
@ -1475,12 +1476,12 @@ func (o *Order) okAs2(n *ir.AssignListStmt) {
if tmp1 != nil {
r := ir.NewAssignStmt(base.Pos, n.Lhs[0], tmp1)
o.mapAssign(typecheck(r, ctxStmt))
o.mapAssign(typecheck.Stmt(r))
n.Lhs[0] = tmp1
}
if tmp2 != nil {
r := ir.NewAssignStmt(base.Pos, n.Lhs[1], conv(tmp2, n.Lhs[1].Type()))
o.mapAssign(typecheck(r, ctxStmt))
r := ir.NewAssignStmt(base.Pos, n.Lhs[1], typecheck.Conv(tmp2, n.Lhs[1].Type()))
o.mapAssign(typecheck.Stmt(r))
n.Lhs[1] = tmp2
}
}

View file

@ -8,6 +8,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/dwarf"
"cmd/internal/obj"
@ -146,7 +147,7 @@ func (s *ssafn) AllocFrame(f *ssa.Func) {
}
if f.Config.NeedsFpScratch && scratchUsed {
s.scratchFpMem = tempAt(src.NoXPos, s.curfn, types.Types[types.TUINT64])
s.scratchFpMem = typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT64])
}
sort.Sort(byStackVar(fn.Dcl))
@ -214,11 +215,11 @@ func funccompile(fn *ir.Func) {
return
}
dclcontext = ir.PAUTO
typecheck.DeclContext = ir.PAUTO
ir.CurFunc = fn
compile(fn)
ir.CurFunc = nil
dclcontext = ir.PEXTERN
typecheck.DeclContext = ir.PEXTERN
}
func compile(fn *ir.Func) {

View file

@ -6,6 +6,7 @@ package gc
import (
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"reflect"
@ -41,7 +42,7 @@ func TestCmpstackvar(t *testing.T) {
if s == nil {
s = &types.Sym{Name: "."}
}
n := NewName(s)
n := typecheck.NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
n.Class_ = cl
@ -156,7 +157,7 @@ func TestCmpstackvar(t *testing.T) {
func TestStackvarSort(t *testing.T) {
nod := func(xoffset int64, t *types.Type, s *types.Sym, cl ir.Class) *ir.Name {
n := NewName(s)
n := typecheck.NewName(s)
n.SetType(t)
n.SetFrameOffset(xoffset)
n.Class_ = cl

View file

@ -7,136 +7,12 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/sys"
"unicode/utf8"
)
// range
func typecheckrange(n *ir.RangeStmt) {
// Typechecking order is important here:
// 0. first typecheck range expression (slice/map/chan),
// it is evaluated only once and so logically it is not part of the loop.
// 1. typecheck produced values,
// this part can declare new vars and so it must be typechecked before body,
// because body can contain a closure that captures the vars.
// 2. decldepth++ to denote loop body.
// 3. typecheck body.
// 4. decldepth--.
typecheckrangeExpr(n)
// second half of dance, the first half being typecheckrangeExpr
n.SetTypecheck(1)
ls := n.Vars
for i1, n1 := range ls {
if n1.Typecheck() == 0 {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
}
}
decldepth++
typecheckslice(n.Body, ctxStmt)
decldepth--
}
func typecheckrangeExpr(n *ir.RangeStmt) {
n.X = typecheck(n.X, ctxExpr)
t := n.X.Type()
if t == nil {
return
}
// delicate little dance. see typecheckas2
ls := n.Vars
for i1, n1 := range ls {
if !ir.DeclaredBy(n1, n) {
ls[i1] = typecheck(ls[i1], ctxExpr|ctxAssign)
}
}
if t.IsPtr() && t.Elem().IsArray() {
t = t.Elem()
}
n.SetType(t)
var t1, t2 *types.Type
toomany := false
switch t.Kind() {
default:
base.ErrorfAt(n.Pos(), "cannot range over %L", n.X)
return
case types.TARRAY, types.TSLICE:
t1 = types.Types[types.TINT]
t2 = t.Elem()
case types.TMAP:
t1 = t.Key()
t2 = t.Elem()
case types.TCHAN:
if !t.ChanDir().CanRecv() {
base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.X, n.X.Type())
return
}
t1 = t.Elem()
t2 = nil
if len(n.Vars) == 2 {
toomany = true
}
case types.TSTRING:
t1 = types.Types[types.TINT]
t2 = types.RuneType
}
if len(n.Vars) > 2 || toomany {
base.ErrorfAt(n.Pos(), "too many variables in range")
}
var v1, v2 ir.Node
if len(n.Vars) != 0 {
v1 = n.Vars[0]
}
if len(n.Vars) > 1 {
v2 = n.Vars[1]
}
// this is not only an optimization but also a requirement in the spec.
// "if the second iteration variable is the blank identifier, the range
// clause is equivalent to the same clause with only the first variable
// present."
if ir.IsBlank(v2) {
if v1 != nil {
n.Vars = []ir.Node{v1}
}
v2 = nil
}
if v1 != nil {
if ir.DeclaredBy(v1, n) {
v1.SetType(t1)
} else if v1.Type() != nil {
if op, why := assignop(t1, v1.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t1, v1, why)
}
}
checkassign(n, v1)
}
if v2 != nil {
if ir.DeclaredBy(v2, n) {
v2.SetType(t2)
} else if v2.Type() != nil {
if op, why := assignop(t2, v2.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t2, v2, why)
}
}
checkassign(n, v2)
}
}
func cheapComputableIndex(width int64) bool {
switch thearch.LinkArch.Family {
// MIPS does not have R+R addressing
@ -221,8 +97,8 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// order.stmt arranged for a copy of the array/slice variable if needed.
ha := a
hv1 := temp(types.Types[types.TINT])
hn := temp(types.Types[types.TINT])
hv1 := typecheck.Temp(types.Types[types.TINT])
hn := typecheck.Temp(types.Types[types.TINT])
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
init = append(init, ir.NewAssignStmt(base.Pos, hn, ir.NewUnaryExpr(base.Pos, ir.OLEN, ha)))
@ -271,10 +147,10 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
ifGuard.Cond = ir.NewBinaryExpr(base.Pos, ir.OLT, hv1, hn)
nfor.SetOp(ir.OFORUNTIL)
hp := temp(types.NewPtr(nrange.Type().Elem()))
hp := typecheck.Temp(types.NewPtr(nrange.Type().Elem()))
tmp := ir.NewIndexExpr(base.Pos, ha, ir.NewInt(0))
tmp.SetBounded(true)
init = append(init, ir.NewAssignStmt(base.Pos, hp, nodAddr(tmp)))
init = append(init, ir.NewAssignStmt(base.Pos, hp, typecheck.NodAddr(tmp)))
// Use OAS2 to correctly handle assignments
// of the form "v1, a[v1] := range".
@ -289,7 +165,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// advancing the pointer is safe and won't go past the
// end of the allocation.
as := ir.NewAssignStmt(base.Pos, hp, addptr(hp, t.Elem().Width))
nfor.Late = []ir.Node{typecheck(as, ctxStmt)}
nfor.Late = []ir.Node{typecheck.Stmt(as)}
case types.TMAP:
// order.stmt allocated the iterator for us.
@ -301,15 +177,15 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
keysym := th.Field(0).Sym // depends on layout of iterator struct. See reflect.go:hiter
elemsym := th.Field(1).Sym // ditto
fn := syslook("mapiterinit")
fn := typecheck.LookupRuntime("mapiterinit")
fn = substArgTypes(fn, t.Key(), t.Elem(), th)
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, nodAddr(hit)))
nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), nodnil())
fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem(), th)
init = append(init, mkcall1(fn, nil, nil, typename(t), ha, typecheck.NodAddr(hit)))
nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym), typecheck.NodNil())
fn = syslook("mapiternext")
fn = substArgTypes(fn, th)
nfor.Post = mkcall1(fn, nil, nil, nodAddr(hit))
fn = typecheck.LookupRuntime("mapiternext")
fn = typecheck.SubstArgTypes(fn, th)
nfor.Post = mkcall1(fn, nil, nil, typecheck.NodAddr(hit))
key := ir.NewStarExpr(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, hit, keysym))
if v1 == nil {
@ -328,12 +204,12 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// order.stmt arranged for a copy of the channel variable.
ha := a
hv1 := temp(t.Elem())
hv1 := typecheck.Temp(t.Elem())
hv1.SetTypecheck(1)
if t.Elem().HasPointers() {
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
}
hb := temp(types.Types[types.TBOOL])
hb := typecheck.Temp(types.Types[types.TBOOL])
nfor.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, hb, ir.NewBool(false))
a := ir.NewAssignListStmt(base.Pos, ir.OAS2RECV, nil, nil)
@ -370,9 +246,9 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// order.stmt arranged for a copy of the string variable.
ha := a
hv1 := temp(types.Types[types.TINT])
hv1t := temp(types.Types[types.TINT])
hv2 := temp(types.RuneType)
hv1 := typecheck.Temp(types.Types[types.TINT])
hv1t := typecheck.Temp(types.Types[types.TINT])
hv2 := typecheck.Temp(types.RuneType)
// hv1 := 0
init = append(init, ir.NewAssignStmt(base.Pos, hv1, nil))
@ -388,7 +264,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// hv2 := rune(ha[hv1])
nind := ir.NewIndexExpr(base.Pos, ha, hv1)
nind.SetBounded(true)
body = append(body, ir.NewAssignStmt(base.Pos, hv2, conv(nind, types.RuneType)))
body = append(body, ir.NewAssignStmt(base.Pos, hv2, typecheck.Conv(nind, types.RuneType)))
// if hv2 < utf8.RuneSelf
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
@ -403,7 +279,7 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
// hv2, hv1 = decoderune(ha, hv1)
eif.Lhs = []ir.Node{hv2, hv1}
fn := syslook("decoderune")
fn := typecheck.LookupRuntime("decoderune")
eif.Rhs = []ir.Node{mkcall1(fn, fn.Type().Results(), nil, ha, hv1)}
body = append(body, nif)
@ -422,21 +298,21 @@ func walkrange(nrange *ir.RangeStmt) ir.Node {
}
}
typecheckslice(init, ctxStmt)
typecheck.Stmts(init)
if ifGuard != nil {
ifGuard.PtrInit().Append(init...)
ifGuard = typecheck(ifGuard, ctxStmt).(*ir.IfStmt)
ifGuard = typecheck.Stmt(ifGuard).(*ir.IfStmt)
} else {
nfor.PtrInit().Append(init...)
}
typecheckslice(nfor.Cond.Init(), ctxStmt)
typecheck.Stmts(nfor.Cond.Init())
nfor.Cond = typecheck(nfor.Cond, ctxExpr)
nfor.Cond = defaultlit(nfor.Cond, nil)
nfor.Post = typecheck(nfor.Post, ctxStmt)
typecheckslice(body, ctxStmt)
nfor.Cond = typecheck.Expr(nfor.Cond)
nfor.Cond = typecheck.DefaultLit(nfor.Cond, nil)
nfor.Post = typecheck.Stmt(nfor.Post)
typecheck.Stmts(body)
nfor.Body.Append(body...)
nfor.Body.Append(nrange.Body...)
@ -505,10 +381,10 @@ func mapClear(m ir.Node) ir.Node {
t := m.Type()
// instantiate mapclear(typ *type, hmap map[any]any)
fn := syslook("mapclear")
fn = substArgTypes(fn, t.Key(), t.Elem())
fn := typecheck.LookupRuntime("mapclear")
fn = typecheck.SubstArgTypes(fn, t.Key(), t.Elem())
n := mkcall1(fn, nil, nil, typename(t), m)
return walkstmt(typecheck(n, ctxStmt))
return walkstmt(typecheck.Stmt(n))
}
// Lower n into runtime·memclr if possible, for
@ -566,16 +442,16 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
n.Cond = ir.NewBinaryExpr(base.Pos, ir.ONE, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(0))
// hp = &a[0]
hp := temp(types.Types[types.TUNSAFEPTR])
hp := typecheck.Temp(types.Types[types.TUNSAFEPTR])
ix := ir.NewIndexExpr(base.Pos, a, ir.NewInt(0))
ix.SetBounded(true)
addr := convnop(nodAddr(ix), types.Types[types.TUNSAFEPTR])
addr := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
n.Body.Append(ir.NewAssignStmt(base.Pos, hp, addr))
// hn = len(a) * sizeof(elem(a))
hn := temp(types.Types[types.TUINTPTR])
mul := conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(elemsize)), types.Types[types.TUINTPTR])
hn := typecheck.Temp(types.Types[types.TUINTPTR])
mul := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, ir.NewUnaryExpr(base.Pos, ir.OLEN, a), ir.NewInt(elemsize)), types.Types[types.TUINTPTR])
n.Body.Append(ir.NewAssignStmt(base.Pos, hn, mul))
var fn ir.Node
@ -595,9 +471,9 @@ func arrayClear(loop *ir.RangeStmt, v1, v2, a ir.Node) ir.Node {
n.Body.Append(v1)
n.Cond = typecheck(n.Cond, ctxExpr)
n.Cond = defaultlit(n.Cond, nil)
typecheckslice(n.Body, ctxStmt)
n.Cond = typecheck.Expr(n.Cond)
n.Cond = typecheck.DefaultLit(n.Cond, nil)
typecheck.Stmts(n.Body)
return walkstmt(n)
}

View file

@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/gcprog"
"cmd/internal/obj"
@ -339,36 +340,6 @@ func deferstruct(stksize int64) *types.Type {
return s
}
// f is method type, with receiver.
// return function type, receiver as first argument (or not).
func methodfunc(f *types.Type, receiver *types.Type) *types.Type {
inLen := f.Params().Fields().Len()
if receiver != nil {
inLen++
}
in := make([]*ir.Field, 0, inLen)
if receiver != nil {
d := ir.NewField(base.Pos, nil, nil, receiver)
in = append(in, d)
}
for _, t := range f.Params().Fields().Slice() {
d := ir.NewField(base.Pos, nil, nil, t.Type)
d.IsDDD = t.IsDDD()
in = append(in, d)
}
outLen := f.Results().Fields().Len()
out := make([]*ir.Field, 0, outLen)
for _, t := range f.Results().Fields().Slice() {
d := ir.NewField(base.Pos, nil, nil, t.Type)
out = append(out, d)
}
return functype(nil, in, out)
}
// methods returns the methods of the non-interface type t, sorted by name.
// Generates stub functions as needed.
func methods(t *types.Type) []*Sig {
@ -378,7 +349,7 @@ func methods(t *types.Type) []*Sig {
if mt == nil {
return nil
}
expandmeth(mt)
typecheck.CalcMethods(mt)
// type stored in interface word
it := t
@ -418,8 +389,8 @@ func methods(t *types.Type) []*Sig {
name: method,
isym: ir.MethodSym(it, method),
tsym: ir.MethodSym(t, method),
type_: methodfunc(f.Type, t),
mtype: methodfunc(f.Type, nil),
type_: typecheck.NewMethodType(f.Type, t),
mtype: typecheck.NewMethodType(f.Type, nil),
}
ms = append(ms, sig)
@ -463,7 +434,7 @@ func imethods(t *types.Type) []*Sig {
sig := &Sig{
name: f.Sym,
mtype: f.Type,
type_: methodfunc(f.Type, nil),
type_: typecheck.NewMethodType(f.Type, nil),
}
methods = append(methods, sig)
@ -916,7 +887,7 @@ func typename(t *types.Type) *ir.AddrExpr {
s.Def = n
}
n := nodAddr(ir.AsNode(s.Def))
n := typecheck.NodAddr(ir.AsNode(s.Def))
n.SetType(types.NewPtr(s.Def.Type()))
n.SetTypecheck(1)
return n
@ -928,7 +899,7 @@ func itabname(t, itype *types.Type) *ir.AddrExpr {
}
s := ir.Pkgs.Itab.Lookup(t.ShortString() + "," + itype.ShortString())
if s.Def == nil {
n := NewName(s)
n := typecheck.NewName(s)
n.SetType(types.Types[types.TUINT8])
n.Class_ = ir.PEXTERN
n.SetTypecheck(1)
@ -936,7 +907,7 @@ func itabname(t, itype *types.Type) *ir.AddrExpr {
itabs = append(itabs, itabEntry{t: t, itype: itype, lsym: s.Linksym()})
}
n := nodAddr(ir.AsNode(s.Def))
n := typecheck.NodAddr(ir.AsNode(s.Def))
n.SetType(types.NewPtr(s.Def.Type()))
n.SetTypecheck(1)
return n
@ -1033,7 +1004,7 @@ func dtypesym(t *types.Type) *obj.LSym {
if base.Ctxt.Pkgpath != "runtime" || (tbase != types.Types[tbase.Kind()] && tbase != types.ByteType && tbase != types.RuneType && tbase != types.ErrorType) { // int, float, etc
// named types from other files are defined only by those files
if tbase.Sym() != nil && tbase.Sym().Pkg != types.LocalPkg {
if i := BaseTypeIndex(t); i >= 0 {
if i := typecheck.BaseTypeIndex(t); i >= 0 {
lsym.Pkg = tbase.Sym().Pkg.Prefix
lsym.SymIdx = int32(i)
lsym.Set(obj.AttrIndexed, true)
@ -1492,7 +1463,7 @@ func dumpbasictypes() {
// The latter is the type of an auto-generated wrapper.
dtypesym(types.NewPtr(types.ErrorType))
dtypesym(functype(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.ErrorType)}, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.Types[types.TSTRING])}))
dtypesym(typecheck.NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.ErrorType)}, []*ir.Field{ir.NewField(base.Pos, nil, nil, types.Types[types.TSTRING])}))
// add paths for runtime and main, which 6l imports implicitly.
dimportpath(ir.Pkgs.Runtime)
@ -1744,13 +1715,13 @@ func zeroaddr(size int64) ir.Node {
}
s := ir.Pkgs.Map.Lookup("zero")
if s.Def == nil {
x := NewName(s)
x := typecheck.NewName(s)
x.SetType(types.Types[types.TUINT8])
x.Class_ = ir.PEXTERN
x.SetTypecheck(1)
s.Def = x
}
z := nodAddr(ir.AsNode(s.Def))
z := typecheck.NodAddr(ir.AsNode(s.Def))
z.SetType(types.NewPtr(types.Types[types.TUINT8]))
z.SetTypecheck(1)
return z

View file

@ -7,92 +7,10 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
)
// select
func typecheckselect(sel *ir.SelectStmt) {
var def ir.Node
lno := ir.SetPos(sel)
typecheckslice(sel.Init(), ctxStmt)
for _, ncase := range sel.Cases {
ncase := ncase.(*ir.CaseStmt)
if len(ncase.List) == 0 {
// default
if def != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
} else {
def = ncase
}
} else if len(ncase.List) > 1 {
base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
} else {
ncase.List[0] = typecheck(ncase.List[0], ctxStmt)
n := ncase.List[0]
ncase.Comm = n
ncase.List.Set(nil)
oselrecv2 := func(dst, recv ir.Node, colas bool) {
n := ir.NewAssignListStmt(n.Pos(), ir.OSELRECV2, nil, nil)
n.Lhs = []ir.Node{dst, ir.BlankNode}
n.Rhs = []ir.Node{recv}
n.Def = colas
n.SetTypecheck(1)
ncase.Comm = n
}
switch n.Op() {
default:
pos := n.Pos()
if n.Op() == ir.ONAME {
// We don't have the right position for ONAME nodes (see #15459 and
// others). Using ncase.Pos for now as it will provide the correct
// line number (assuming the expression follows the "case" keyword
// on the same line). This matches the approach before 1.10.
pos = ncase.Pos()
}
base.ErrorfAt(pos, "select case must be receive, send or assign recv")
case ir.OAS:
// convert x = <-c into x, _ = <-c
// remove implicit conversions; the eventual assignment
// will reintroduce them.
n := n.(*ir.AssignStmt)
if r := n.Y; r.Op() == ir.OCONVNOP || r.Op() == ir.OCONVIFACE {
r := r.(*ir.ConvExpr)
if r.Implicit() {
n.Y = r.X
}
}
if n.Y.Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
oselrecv2(n.X, n.Y, n.Def)
case ir.OAS2RECV:
n := n.(*ir.AssignListStmt)
if n.Rhs[0].Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
n.SetOp(ir.OSELRECV2)
case ir.ORECV:
// convert <-c into _, _ = <-c
n := n.(*ir.UnaryExpr)
oselrecv2(ir.BlankNode, n, false)
case ir.OSEND:
break
}
}
typecheckslice(ncase.Body, ctxStmt)
}
base.Pos = lno
}
func walkselect(sel *ir.SelectStmt) {
lno := ir.SetPos(sel)
if len(sel.Compiled) != 0 {
@ -167,14 +85,14 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
switch n.Op() {
case ir.OSEND:
n := n.(*ir.SendStmt)
n.Value = nodAddr(n.Value)
n.Value = typecheck(n.Value, ctxExpr)
n.Value = typecheck.NodAddr(n.Value)
n.Value = typecheck.Expr(n.Value)
case ir.OSELRECV2:
n := n.(*ir.AssignListStmt)
if !ir.IsBlank(n.Lhs[0]) {
n.Lhs[0] = nodAddr(n.Lhs[0])
n.Lhs[0] = typecheck(n.Lhs[0], ctxExpr)
n.Lhs[0] = typecheck.NodAddr(n.Lhs[0])
n.Lhs[0] = typecheck.Expr(n.Lhs[0])
}
}
}
@ -207,7 +125,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
ch := recv.X
elem := n.Lhs[0]
if ir.IsBlank(elem) {
elem = nodnil()
elem = typecheck.NodNil()
}
if ir.IsBlank(n.Lhs[1]) {
// if selectnbrecv(&v, c) { body } else { default body }
@ -215,12 +133,12 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
} else {
// TODO(cuonglm): make this use selectnbrecv()
// if selectnbrecv2(&v, &received, c) { body } else { default body }
receivedp := typecheck(nodAddr(n.Lhs[1]), ctxExpr)
receivedp := typecheck.Expr(typecheck.NodAddr(n.Lhs[1]))
call = mkcall1(chanfn("selectnbrecv2", 2, ch.Type()), types.Types[types.TBOOL], r.PtrInit(), elem, receivedp, ch)
}
}
r.Cond = typecheck(call, ctxExpr)
r.Cond = typecheck.Expr(call)
r.Body.Set(cas.Body)
r.Else.Set(append(dflt.Init(), dflt.Body...))
return []ir.Node{r, ir.NewBranchStmt(base.Pos, ir.OBREAK, nil)}
@ -236,18 +154,18 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
// generate sel-struct
base.Pos = sellineno
selv := temp(types.NewArray(scasetype(), int64(ncas)))
init = append(init, typecheck(ir.NewAssignStmt(base.Pos, selv, nil), ctxStmt))
selv := typecheck.Temp(types.NewArray(scasetype(), int64(ncas)))
init = append(init, typecheck.Stmt(ir.NewAssignStmt(base.Pos, selv, nil)))
// No initialization for order; runtime.selectgo is responsible for that.
order := temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas)))
order := typecheck.Temp(types.NewArray(types.Types[types.TUINT16], 2*int64(ncas)))
var pc0, pcs ir.Node
if base.Flag.Race {
pcs = temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas)))
pc0 = typecheck(nodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(0))), ctxExpr)
pcs = typecheck.Temp(types.NewArray(types.Types[types.TUINTPTR], int64(ncas)))
pc0 = typecheck.Expr(typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(0))))
} else {
pc0 = nodnil()
pc0 = typecheck.NodNil()
}
// register cases
@ -286,21 +204,21 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
casorder[i] = cas
setField := func(f string, val ir.Node) {
r := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, ir.NewIndexExpr(base.Pos, selv, ir.NewInt(int64(i))), lookup(f)), val)
init = append(init, typecheck(r, ctxStmt))
r := ir.NewAssignStmt(base.Pos, ir.NewSelectorExpr(base.Pos, ir.ODOT, ir.NewIndexExpr(base.Pos, selv, ir.NewInt(int64(i))), typecheck.Lookup(f)), val)
init = append(init, typecheck.Stmt(r))
}
c = convnop(c, types.Types[types.TUNSAFEPTR])
c = typecheck.ConvNop(c, types.Types[types.TUNSAFEPTR])
setField("c", c)
if !ir.IsBlank(elem) {
elem = convnop(elem, types.Types[types.TUNSAFEPTR])
elem = typecheck.ConvNop(elem, types.Types[types.TUNSAFEPTR])
setField("elem", elem)
}
// TODO(mdempsky): There should be a cleaner way to
// handle this.
if base.Flag.Race {
r := mkcall("selectsetpc", nil, nil, nodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(int64(i)))))
r := mkcall("selectsetpc", nil, nil, typecheck.NodAddr(ir.NewIndexExpr(base.Pos, pcs, ir.NewInt(int64(i)))))
init = append(init, r)
}
}
@ -310,13 +228,13 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
// run the select
base.Pos = sellineno
chosen := temp(types.Types[types.TINT])
recvOK := temp(types.Types[types.TBOOL])
chosen := typecheck.Temp(types.Types[types.TINT])
recvOK := typecheck.Temp(types.Types[types.TBOOL])
r := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
r.Lhs = []ir.Node{chosen, recvOK}
fn := syslook("selectgo")
fn := typecheck.LookupRuntime("selectgo")
r.Rhs = []ir.Node{mkcall1(fn, fn.Type().Results(), nil, bytePtrToIndex(selv, 0), bytePtrToIndex(order, 0), pc0, ir.NewInt(int64(nsends)), ir.NewInt(int64(nrecvs)), ir.NewBool(dflt == nil))}
init = append(init, typecheck(r, ctxStmt))
init = append(init, typecheck.Stmt(r))
// selv and order are no longer alive after selectgo.
init = append(init, ir.NewUnaryExpr(base.Pos, ir.OVARKILL, selv))
@ -327,8 +245,8 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
// dispatch cases
dispatch := func(cond ir.Node, cas *ir.CaseStmt) {
cond = typecheck(cond, ctxExpr)
cond = defaultlit(cond, nil)
cond = typecheck.Expr(cond)
cond = typecheck.DefaultLit(cond, nil)
r := ir.NewIfStmt(base.Pos, cond, nil, nil)
@ -336,7 +254,7 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
n := n.(*ir.AssignListStmt)
if !ir.IsBlank(n.Lhs[1]) {
x := ir.NewAssignStmt(base.Pos, n.Lhs[1], recvOK)
r.Body.Append(typecheck(x, ctxStmt))
r.Body.Append(typecheck.Stmt(x))
}
}
@ -359,9 +277,9 @@ func walkselectcases(cases ir.Nodes) []ir.Node {
// bytePtrToIndex returns a Node representing "(*byte)(&n[i])".
func bytePtrToIndex(n ir.Node, i int64) ir.Node {
s := nodAddr(ir.NewIndexExpr(base.Pos, n, ir.NewInt(i)))
s := typecheck.NodAddr(ir.NewIndexExpr(base.Pos, n, ir.NewInt(i)))
t := types.NewPtr(types.Types[types.TUINT8])
return convnop(s, t)
return typecheck.ConvNop(s, t)
}
var scase *types.Type
@ -369,9 +287,9 @@ var scase *types.Type
// Keep in sync with src/runtime/select.go.
func scasetype() *types.Type {
if scase == nil {
scase = tostruct([]*ir.Field{
ir.NewField(base.Pos, lookup("c"), nil, types.Types[types.TUNSAFEPTR]),
ir.NewField(base.Pos, lookup("elem"), nil, types.Types[types.TUNSAFEPTR]),
scase = typecheck.NewStructType([]*ir.Field{
ir.NewField(base.Pos, typecheck.Lookup("c"), nil, types.Types[types.TUNSAFEPTR]),
ir.NewField(base.Pos, typecheck.Lookup("elem"), nil, types.Types[types.TUNSAFEPTR]),
})
scase.SetNoalg(true)
}

View file

@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"fmt"
@ -112,7 +113,7 @@ func (s *InitSchedule) staticcopy(l *ir.Name, loff int64, rn *ir.Name, typ *type
if loff != 0 || !types.Identical(typ, l.Type()) {
dst = ir.NewNameOffsetExpr(base.Pos, l, loff, typ)
}
s.append(ir.NewAssignStmt(base.Pos, dst, conv(r, typ)))
s.append(ir.NewAssignStmt(base.Pos, dst, typecheck.Conv(r, typ)))
return true
case ir.ONIL:
@ -387,9 +388,9 @@ var statuniqgen int // name generator for static temps
// Use readonlystaticname for read-only node.
func staticname(t *types.Type) *ir.Name {
// Don't use lookupN; it interns the resulting string, but these are all unique.
n := NewName(lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen)))
n := typecheck.NewName(typecheck.Lookup(fmt.Sprintf("%s%d", obj.StaticNamePref, statuniqgen)))
statuniqgen++
declare(n, ir.PEXTERN)
typecheck.Declare(n, ir.PEXTERN)
n.SetType(t)
n.Sym().Linksym().Set(obj.AttrLocal, true)
return n
@ -541,7 +542,7 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
splitnode = func(r ir.Node) (ir.Node, ir.Node) {
if r.Op() == ir.OKEY {
kv := r.(*ir.KeyExpr)
k = indexconst(kv.Key)
k = typecheck.IndexConst(kv.Key)
if k < 0 {
base.Fatalf("fixedlit: invalid index %v", kv.Key)
}
@ -596,7 +597,7 @@ func fixedlit(ctxt initContext, kind initKind, n *ir.CompLitExpr, var_ ir.Node,
// build list of assignments: var[index] = expr
ir.SetPos(a)
as := ir.NewAssignStmt(base.Pos, a, value)
as = typecheck(as, ctxStmt).(*ir.AssignStmt)
as = typecheck.Stmt(as).(*ir.AssignStmt)
switch kind {
case initKindStatic:
genAsStatic(as)
@ -632,7 +633,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
fixedlit(ctxt, initKindDynamic, n, vstat, init)
// copy static to slice
var_ = typecheck(var_, ctxExpr|ctxAssign)
var_ = typecheck.AssignExpr(var_)
name, offset, ok := stataddr(var_)
if !ok || name.Class_ != ir.PEXTERN {
base.Fatalf("slicelit: %v", var_)
@ -675,7 +676,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
}
// make new auto *array (3 declare)
vauto := temp(types.NewPtr(t))
vauto := typecheck.Temp(types.NewPtr(t))
// set auto to point at new temp or heap (3 assign)
var a ir.Node
@ -687,7 +688,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
if vstat == nil {
a = ir.NewAssignStmt(base.Pos, x, nil)
a = typecheck(a, ctxStmt)
a = typecheck.Stmt(a)
init.Append(a) // zero new temp
} else {
// Declare that we're about to initialize all of x.
@ -695,19 +696,19 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
init.Append(ir.NewUnaryExpr(base.Pos, ir.OVARDEF, x))
}
a = nodAddr(x)
a = typecheck.NodAddr(x)
} else if n.Esc() == ir.EscNone {
a = temp(t)
a = typecheck.Temp(t)
if vstat == nil {
a = ir.NewAssignStmt(base.Pos, temp(t), nil)
a = typecheck(a, ctxStmt)
a = ir.NewAssignStmt(base.Pos, typecheck.Temp(t), nil)
a = typecheck.Stmt(a)
init.Append(a) // zero new temp
a = a.(*ir.AssignStmt).X
} else {
init.Append(ir.NewUnaryExpr(base.Pos, ir.OVARDEF, a))
}
a = nodAddr(a)
a = typecheck.NodAddr(a)
} else {
a = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(t))
}
@ -724,7 +725,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
for _, value := range n.List {
if value.Op() == ir.OKEY {
kv := value.(*ir.KeyExpr)
index = indexconst(kv.Key)
index = typecheck.IndexConst(kv.Key)
if index < 0 {
base.Fatalf("slicelit: invalid index %v", kv.Key)
}
@ -758,7 +759,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// build list of vauto[c] = expr
ir.SetPos(value)
as := typecheck(ir.NewAssignStmt(base.Pos, a, value), ctxStmt)
as := typecheck.Stmt(ir.NewAssignStmt(base.Pos, a, value))
as = orderStmtInPlace(as, map[string][]*ir.Name{})
as = walkstmt(as)
init.Append(as)
@ -767,7 +768,7 @@ func slicelit(ctxt initContext, n *ir.CompLitExpr, var_ ir.Node, init *ir.Nodes)
// make slice out of heap (6)
a = ir.NewAssignStmt(base.Pos, var_, ir.NewSliceExpr(base.Pos, ir.OSLICE, vauto))
a = typecheck(a, ctxStmt)
a = typecheck.Stmt(a)
a = orderStmtInPlace(a, map[string][]*ir.Name{})
a = walkstmt(a)
init.Append(a)
@ -822,7 +823,7 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
// for i = 0; i < len(vstatk); i++ {
// map[vstatk[i]] = vstate[i]
// }
i := temp(types.Types[types.TINT])
i := typecheck.Temp(types.Types[types.TINT])
rhs := ir.NewIndexExpr(base.Pos, vstate, i)
rhs.SetBounded(true)
@ -847,8 +848,8 @@ func maplit(n *ir.CompLitExpr, m ir.Node, init *ir.Nodes) {
// Build list of var[c] = expr.
// Use temporaries so that mapassign1 can have addressable key, elem.
// TODO(josharian): avoid map key temporaries for mapfast_* assignments with literal keys.
tmpkey := temp(m.Type().Key())
tmpelem := temp(m.Type().Elem())
tmpkey := typecheck.Temp(m.Type().Key())
tmpelem := typecheck.Temp(m.Type().Elem())
for _, r := range entries {
r := r.(*ir.KeyExpr)
@ -892,7 +893,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
if n.Alloc != nil {
// n.Right is stack temporary used as backing store.
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, n.Alloc, nil)) // zero backing store, just in case (#18410)
r = nodAddr(n.Alloc)
r = typecheck.NodAddr(n.Alloc)
} else {
r = ir.NewUnaryExpr(base.Pos, ir.ONEW, ir.TypeNode(n.X.Type()))
r.SetEsc(n.Esc())
@ -900,7 +901,7 @@ func anylit(n ir.Node, var_ ir.Node, init *ir.Nodes) {
appendWalkStmt(init, ir.NewAssignStmt(base.Pos, var_, r))
var_ = ir.NewStarExpr(base.Pos, var_)
var_ = typecheck(var_, ctxExpr|ctxAssign)
var_ = typecheck.AssignExpr(var_)
anylit(n.X, var_, init)
case ir.OSTRUCTLIT, ir.OARRAYLIT:
@ -1060,7 +1061,7 @@ func (s *InitSchedule) initplan(n ir.Node) {
for _, a := range n.List {
if a.Op() == ir.OKEY {
kv := a.(*ir.KeyExpr)
k = indexconst(kv.Key)
k = typecheck.IndexConst(kv.Key)
if k < 0 {
base.Fatalf("initplan arraylit: invalid index %v", kv.Key)
}

View file

@ -19,6 +19,7 @@ import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/ssa"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/obj/x86"
@ -91,119 +92,119 @@ func initssaconfig() {
ssaCaches = make([]ssa.Cache, base.Flag.LowerC)
// Set up some runtime functions we'll need to call.
ir.Syms.AssertE2I = sysfunc("assertE2I")
ir.Syms.AssertE2I2 = sysfunc("assertE2I2")
ir.Syms.AssertI2I = sysfunc("assertI2I")
ir.Syms.AssertI2I2 = sysfunc("assertI2I2")
ir.Syms.Deferproc = sysfunc("deferproc")
ir.Syms.DeferprocStack = sysfunc("deferprocStack")
ir.Syms.Deferreturn = sysfunc("deferreturn")
ir.Syms.Duffcopy = sysfunc("duffcopy")
ir.Syms.Duffzero = sysfunc("duffzero")
ir.Syms.GCWriteBarrier = sysfunc("gcWriteBarrier")
ir.Syms.Goschedguarded = sysfunc("goschedguarded")
ir.Syms.Growslice = sysfunc("growslice")
ir.Syms.Msanread = sysfunc("msanread")
ir.Syms.Msanwrite = sysfunc("msanwrite")
ir.Syms.Msanmove = sysfunc("msanmove")
ir.Syms.Newobject = sysfunc("newobject")
ir.Syms.Newproc = sysfunc("newproc")
ir.Syms.Panicdivide = sysfunc("panicdivide")
ir.Syms.PanicdottypeE = sysfunc("panicdottypeE")
ir.Syms.PanicdottypeI = sysfunc("panicdottypeI")
ir.Syms.Panicnildottype = sysfunc("panicnildottype")
ir.Syms.Panicoverflow = sysfunc("panicoverflow")
ir.Syms.Panicshift = sysfunc("panicshift")
ir.Syms.Raceread = sysfunc("raceread")
ir.Syms.Racereadrange = sysfunc("racereadrange")
ir.Syms.Racewrite = sysfunc("racewrite")
ir.Syms.Racewriterange = sysfunc("racewriterange")
ir.Syms.X86HasPOPCNT = sysvar("x86HasPOPCNT") // bool
ir.Syms.X86HasSSE41 = sysvar("x86HasSSE41") // bool
ir.Syms.X86HasFMA = sysvar("x86HasFMA") // bool
ir.Syms.ARMHasVFPv4 = sysvar("armHasVFPv4") // bool
ir.Syms.ARM64HasATOMICS = sysvar("arm64HasATOMICS") // bool
ir.Syms.Typedmemclr = sysfunc("typedmemclr")
ir.Syms.Typedmemmove = sysfunc("typedmemmove")
ir.Syms.Udiv = sysvar("udiv") // asm func with special ABI
ir.Syms.WriteBarrier = sysvar("writeBarrier") // struct { bool; ... }
ir.Syms.Zerobase = sysvar("zerobase")
ir.Syms.AssertE2I = typecheck.LookupRuntimeFunc("assertE2I")
ir.Syms.AssertE2I2 = typecheck.LookupRuntimeFunc("assertE2I2")
ir.Syms.AssertI2I = typecheck.LookupRuntimeFunc("assertI2I")
ir.Syms.AssertI2I2 = typecheck.LookupRuntimeFunc("assertI2I2")
ir.Syms.Deferproc = typecheck.LookupRuntimeFunc("deferproc")
ir.Syms.DeferprocStack = typecheck.LookupRuntimeFunc("deferprocStack")
ir.Syms.Deferreturn = typecheck.LookupRuntimeFunc("deferreturn")
ir.Syms.Duffcopy = typecheck.LookupRuntimeFunc("duffcopy")
ir.Syms.Duffzero = typecheck.LookupRuntimeFunc("duffzero")
ir.Syms.GCWriteBarrier = typecheck.LookupRuntimeFunc("gcWriteBarrier")
ir.Syms.Goschedguarded = typecheck.LookupRuntimeFunc("goschedguarded")
ir.Syms.Growslice = typecheck.LookupRuntimeFunc("growslice")
ir.Syms.Msanread = typecheck.LookupRuntimeFunc("msanread")
ir.Syms.Msanwrite = typecheck.LookupRuntimeFunc("msanwrite")
ir.Syms.Msanmove = typecheck.LookupRuntimeFunc("msanmove")
ir.Syms.Newobject = typecheck.LookupRuntimeFunc("newobject")
ir.Syms.Newproc = typecheck.LookupRuntimeFunc("newproc")
ir.Syms.Panicdivide = typecheck.LookupRuntimeFunc("panicdivide")
ir.Syms.PanicdottypeE = typecheck.LookupRuntimeFunc("panicdottypeE")
ir.Syms.PanicdottypeI = typecheck.LookupRuntimeFunc("panicdottypeI")
ir.Syms.Panicnildottype = typecheck.LookupRuntimeFunc("panicnildottype")
ir.Syms.Panicoverflow = typecheck.LookupRuntimeFunc("panicoverflow")
ir.Syms.Panicshift = typecheck.LookupRuntimeFunc("panicshift")
ir.Syms.Raceread = typecheck.LookupRuntimeFunc("raceread")
ir.Syms.Racereadrange = typecheck.LookupRuntimeFunc("racereadrange")
ir.Syms.Racewrite = typecheck.LookupRuntimeFunc("racewrite")
ir.Syms.Racewriterange = typecheck.LookupRuntimeFunc("racewriterange")
ir.Syms.X86HasPOPCNT = typecheck.LookupRuntimeVar("x86HasPOPCNT") // bool
ir.Syms.X86HasSSE41 = typecheck.LookupRuntimeVar("x86HasSSE41") // bool
ir.Syms.X86HasFMA = typecheck.LookupRuntimeVar("x86HasFMA") // bool
ir.Syms.ARMHasVFPv4 = typecheck.LookupRuntimeVar("armHasVFPv4") // bool
ir.Syms.ARM64HasATOMICS = typecheck.LookupRuntimeVar("arm64HasATOMICS") // bool
ir.Syms.Typedmemclr = typecheck.LookupRuntimeFunc("typedmemclr")
ir.Syms.Typedmemmove = typecheck.LookupRuntimeFunc("typedmemmove")
ir.Syms.Udiv = typecheck.LookupRuntimeVar("udiv") // asm func with special ABI
ir.Syms.WriteBarrier = typecheck.LookupRuntimeVar("writeBarrier") // struct { bool; ... }
ir.Syms.Zerobase = typecheck.LookupRuntimeVar("zerobase")
// asm funcs with special ABI
if thearch.LinkArch.Name == "amd64" {
GCWriteBarrierReg = map[int16]*obj.LSym{
x86.REG_AX: sysfunc("gcWriteBarrier"),
x86.REG_CX: sysfunc("gcWriteBarrierCX"),
x86.REG_DX: sysfunc("gcWriteBarrierDX"),
x86.REG_BX: sysfunc("gcWriteBarrierBX"),
x86.REG_BP: sysfunc("gcWriteBarrierBP"),
x86.REG_SI: sysfunc("gcWriteBarrierSI"),
x86.REG_R8: sysfunc("gcWriteBarrierR8"),
x86.REG_R9: sysfunc("gcWriteBarrierR9"),
x86.REG_AX: typecheck.LookupRuntimeFunc("gcWriteBarrier"),
x86.REG_CX: typecheck.LookupRuntimeFunc("gcWriteBarrierCX"),
x86.REG_DX: typecheck.LookupRuntimeFunc("gcWriteBarrierDX"),
x86.REG_BX: typecheck.LookupRuntimeFunc("gcWriteBarrierBX"),
x86.REG_BP: typecheck.LookupRuntimeFunc("gcWriteBarrierBP"),
x86.REG_SI: typecheck.LookupRuntimeFunc("gcWriteBarrierSI"),
x86.REG_R8: typecheck.LookupRuntimeFunc("gcWriteBarrierR8"),
x86.REG_R9: typecheck.LookupRuntimeFunc("gcWriteBarrierR9"),
}
}
if thearch.LinkArch.Family == sys.Wasm {
BoundsCheckFunc[ssa.BoundsIndex] = sysfunc("goPanicIndex")
BoundsCheckFunc[ssa.BoundsIndexU] = sysfunc("goPanicIndexU")
BoundsCheckFunc[ssa.BoundsSliceAlen] = sysfunc("goPanicSliceAlen")
BoundsCheckFunc[ssa.BoundsSliceAlenU] = sysfunc("goPanicSliceAlenU")
BoundsCheckFunc[ssa.BoundsSliceAcap] = sysfunc("goPanicSliceAcap")
BoundsCheckFunc[ssa.BoundsSliceAcapU] = sysfunc("goPanicSliceAcapU")
BoundsCheckFunc[ssa.BoundsSliceB] = sysfunc("goPanicSliceB")
BoundsCheckFunc[ssa.BoundsSliceBU] = sysfunc("goPanicSliceBU")
BoundsCheckFunc[ssa.BoundsSlice3Alen] = sysfunc("goPanicSlice3Alen")
BoundsCheckFunc[ssa.BoundsSlice3AlenU] = sysfunc("goPanicSlice3AlenU")
BoundsCheckFunc[ssa.BoundsSlice3Acap] = sysfunc("goPanicSlice3Acap")
BoundsCheckFunc[ssa.BoundsSlice3AcapU] = sysfunc("goPanicSlice3AcapU")
BoundsCheckFunc[ssa.BoundsSlice3B] = sysfunc("goPanicSlice3B")
BoundsCheckFunc[ssa.BoundsSlice3BU] = sysfunc("goPanicSlice3BU")
BoundsCheckFunc[ssa.BoundsSlice3C] = sysfunc("goPanicSlice3C")
BoundsCheckFunc[ssa.BoundsSlice3CU] = sysfunc("goPanicSlice3CU")
BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("goPanicIndex")
BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("goPanicIndexU")
BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("goPanicSliceAlen")
BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("goPanicSliceAlenU")
BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("goPanicSliceAcap")
BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("goPanicSliceAcapU")
BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("goPanicSliceB")
BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("goPanicSliceBU")
BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("goPanicSlice3Alen")
BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("goPanicSlice3AlenU")
BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("goPanicSlice3Acap")
BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("goPanicSlice3AcapU")
BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("goPanicSlice3B")
BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("goPanicSlice3BU")
BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("goPanicSlice3C")
BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("goPanicSlice3CU")
} else {
BoundsCheckFunc[ssa.BoundsIndex] = sysfunc("panicIndex")
BoundsCheckFunc[ssa.BoundsIndexU] = sysfunc("panicIndexU")
BoundsCheckFunc[ssa.BoundsSliceAlen] = sysfunc("panicSliceAlen")
BoundsCheckFunc[ssa.BoundsSliceAlenU] = sysfunc("panicSliceAlenU")
BoundsCheckFunc[ssa.BoundsSliceAcap] = sysfunc("panicSliceAcap")
BoundsCheckFunc[ssa.BoundsSliceAcapU] = sysfunc("panicSliceAcapU")
BoundsCheckFunc[ssa.BoundsSliceB] = sysfunc("panicSliceB")
BoundsCheckFunc[ssa.BoundsSliceBU] = sysfunc("panicSliceBU")
BoundsCheckFunc[ssa.BoundsSlice3Alen] = sysfunc("panicSlice3Alen")
BoundsCheckFunc[ssa.BoundsSlice3AlenU] = sysfunc("panicSlice3AlenU")
BoundsCheckFunc[ssa.BoundsSlice3Acap] = sysfunc("panicSlice3Acap")
BoundsCheckFunc[ssa.BoundsSlice3AcapU] = sysfunc("panicSlice3AcapU")
BoundsCheckFunc[ssa.BoundsSlice3B] = sysfunc("panicSlice3B")
BoundsCheckFunc[ssa.BoundsSlice3BU] = sysfunc("panicSlice3BU")
BoundsCheckFunc[ssa.BoundsSlice3C] = sysfunc("panicSlice3C")
BoundsCheckFunc[ssa.BoundsSlice3CU] = sysfunc("panicSlice3CU")
BoundsCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeFunc("panicIndex")
BoundsCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeFunc("panicIndexU")
BoundsCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeFunc("panicSliceAlen")
BoundsCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeFunc("panicSliceAlenU")
BoundsCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeFunc("panicSliceAcap")
BoundsCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeFunc("panicSliceAcapU")
BoundsCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeFunc("panicSliceB")
BoundsCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeFunc("panicSliceBU")
BoundsCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeFunc("panicSlice3Alen")
BoundsCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeFunc("panicSlice3AlenU")
BoundsCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeFunc("panicSlice3Acap")
BoundsCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeFunc("panicSlice3AcapU")
BoundsCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeFunc("panicSlice3B")
BoundsCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeFunc("panicSlice3BU")
BoundsCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeFunc("panicSlice3C")
BoundsCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeFunc("panicSlice3CU")
}
if thearch.LinkArch.PtrSize == 4 {
ExtendCheckFunc[ssa.BoundsIndex] = sysvar("panicExtendIndex")
ExtendCheckFunc[ssa.BoundsIndexU] = sysvar("panicExtendIndexU")
ExtendCheckFunc[ssa.BoundsSliceAlen] = sysvar("panicExtendSliceAlen")
ExtendCheckFunc[ssa.BoundsSliceAlenU] = sysvar("panicExtendSliceAlenU")
ExtendCheckFunc[ssa.BoundsSliceAcap] = sysvar("panicExtendSliceAcap")
ExtendCheckFunc[ssa.BoundsSliceAcapU] = sysvar("panicExtendSliceAcapU")
ExtendCheckFunc[ssa.BoundsSliceB] = sysvar("panicExtendSliceB")
ExtendCheckFunc[ssa.BoundsSliceBU] = sysvar("panicExtendSliceBU")
ExtendCheckFunc[ssa.BoundsSlice3Alen] = sysvar("panicExtendSlice3Alen")
ExtendCheckFunc[ssa.BoundsSlice3AlenU] = sysvar("panicExtendSlice3AlenU")
ExtendCheckFunc[ssa.BoundsSlice3Acap] = sysvar("panicExtendSlice3Acap")
ExtendCheckFunc[ssa.BoundsSlice3AcapU] = sysvar("panicExtendSlice3AcapU")
ExtendCheckFunc[ssa.BoundsSlice3B] = sysvar("panicExtendSlice3B")
ExtendCheckFunc[ssa.BoundsSlice3BU] = sysvar("panicExtendSlice3BU")
ExtendCheckFunc[ssa.BoundsSlice3C] = sysvar("panicExtendSlice3C")
ExtendCheckFunc[ssa.BoundsSlice3CU] = sysvar("panicExtendSlice3CU")
ExtendCheckFunc[ssa.BoundsIndex] = typecheck.LookupRuntimeVar("panicExtendIndex")
ExtendCheckFunc[ssa.BoundsIndexU] = typecheck.LookupRuntimeVar("panicExtendIndexU")
ExtendCheckFunc[ssa.BoundsSliceAlen] = typecheck.LookupRuntimeVar("panicExtendSliceAlen")
ExtendCheckFunc[ssa.BoundsSliceAlenU] = typecheck.LookupRuntimeVar("panicExtendSliceAlenU")
ExtendCheckFunc[ssa.BoundsSliceAcap] = typecheck.LookupRuntimeVar("panicExtendSliceAcap")
ExtendCheckFunc[ssa.BoundsSliceAcapU] = typecheck.LookupRuntimeVar("panicExtendSliceAcapU")
ExtendCheckFunc[ssa.BoundsSliceB] = typecheck.LookupRuntimeVar("panicExtendSliceB")
ExtendCheckFunc[ssa.BoundsSliceBU] = typecheck.LookupRuntimeVar("panicExtendSliceBU")
ExtendCheckFunc[ssa.BoundsSlice3Alen] = typecheck.LookupRuntimeVar("panicExtendSlice3Alen")
ExtendCheckFunc[ssa.BoundsSlice3AlenU] = typecheck.LookupRuntimeVar("panicExtendSlice3AlenU")
ExtendCheckFunc[ssa.BoundsSlice3Acap] = typecheck.LookupRuntimeVar("panicExtendSlice3Acap")
ExtendCheckFunc[ssa.BoundsSlice3AcapU] = typecheck.LookupRuntimeVar("panicExtendSlice3AcapU")
ExtendCheckFunc[ssa.BoundsSlice3B] = typecheck.LookupRuntimeVar("panicExtendSlice3B")
ExtendCheckFunc[ssa.BoundsSlice3BU] = typecheck.LookupRuntimeVar("panicExtendSlice3BU")
ExtendCheckFunc[ssa.BoundsSlice3C] = typecheck.LookupRuntimeVar("panicExtendSlice3C")
ExtendCheckFunc[ssa.BoundsSlice3CU] = typecheck.LookupRuntimeVar("panicExtendSlice3CU")
}
// Wasm (all asm funcs with special ABIs)
ir.Syms.WasmMove = sysvar("wasmMove")
ir.Syms.WasmZero = sysvar("wasmZero")
ir.Syms.WasmDiv = sysvar("wasmDiv")
ir.Syms.WasmTruncS = sysvar("wasmTruncS")
ir.Syms.WasmTruncU = sysvar("wasmTruncU")
ir.Syms.SigPanic = sysfunc("sigpanic")
ir.Syms.WasmMove = typecheck.LookupRuntimeVar("wasmMove")
ir.Syms.WasmZero = typecheck.LookupRuntimeVar("wasmZero")
ir.Syms.WasmDiv = typecheck.LookupRuntimeVar("wasmDiv")
ir.Syms.WasmTruncS = typecheck.LookupRuntimeVar("wasmTruncS")
ir.Syms.WasmTruncU = typecheck.LookupRuntimeVar("wasmTruncU")
ir.Syms.SigPanic = typecheck.LookupRuntimeFunc("sigpanic")
}
// getParam returns the Field of ith param of node n (which is a
@ -418,7 +419,7 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
// Create the deferBits variable and stack slot. deferBits is a
// bitmask showing which of the open-coded defers in this function
// have been activated.
deferBitsTemp := tempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
deferBitsTemp := typecheck.TempAt(src.NoXPos, s.curfn, types.Types[types.TUINT8])
s.deferBitsTemp = deferBitsTemp
// For this value, AuxInt is initialized to zero by default
startDeferBits := s.entryNewValue0(ssa.OpConst8, types.Types[types.TUINT8])
@ -710,7 +711,7 @@ func (s *state) Warnl(pos src.XPos, msg string, args ...interface{}) { s.f.Warnl
func (s *state) Debug_checknil() bool { return s.f.Frontend().Debug_checknil() }
func ssaMarker(name string) *ir.Name {
return NewName(&types.Sym{Name: name})
return typecheck.NewName(&types.Sym{Name: name})
}
var (
@ -3342,38 +3343,38 @@ var softFloatOps map[ssa.Op]sfRtCallDef
func softfloatInit() {
// Some of these operations get transformed by sfcall.
softFloatOps = map[ssa.Op]sfRtCallDef{
ssa.OpAdd32F: sfRtCallDef{sysfunc("fadd32"), types.TFLOAT32},
ssa.OpAdd64F: sfRtCallDef{sysfunc("fadd64"), types.TFLOAT64},
ssa.OpSub32F: sfRtCallDef{sysfunc("fadd32"), types.TFLOAT32},
ssa.OpSub64F: sfRtCallDef{sysfunc("fadd64"), types.TFLOAT64},
ssa.OpMul32F: sfRtCallDef{sysfunc("fmul32"), types.TFLOAT32},
ssa.OpMul64F: sfRtCallDef{sysfunc("fmul64"), types.TFLOAT64},
ssa.OpDiv32F: sfRtCallDef{sysfunc("fdiv32"), types.TFLOAT32},
ssa.OpDiv64F: sfRtCallDef{sysfunc("fdiv64"), types.TFLOAT64},
ssa.OpAdd32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
ssa.OpAdd64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
ssa.OpSub32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd32"), types.TFLOAT32},
ssa.OpSub64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fadd64"), types.TFLOAT64},
ssa.OpMul32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fmul32"), types.TFLOAT32},
ssa.OpMul64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fmul64"), types.TFLOAT64},
ssa.OpDiv32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fdiv32"), types.TFLOAT32},
ssa.OpDiv64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fdiv64"), types.TFLOAT64},
ssa.OpEq64F: sfRtCallDef{sysfunc("feq64"), types.TBOOL},
ssa.OpEq32F: sfRtCallDef{sysfunc("feq32"), types.TBOOL},
ssa.OpNeq64F: sfRtCallDef{sysfunc("feq64"), types.TBOOL},
ssa.OpNeq32F: sfRtCallDef{sysfunc("feq32"), types.TBOOL},
ssa.OpLess64F: sfRtCallDef{sysfunc("fgt64"), types.TBOOL},
ssa.OpLess32F: sfRtCallDef{sysfunc("fgt32"), types.TBOOL},
ssa.OpLeq64F: sfRtCallDef{sysfunc("fge64"), types.TBOOL},
ssa.OpLeq32F: sfRtCallDef{sysfunc("fge32"), types.TBOOL},
ssa.OpEq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
ssa.OpEq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
ssa.OpNeq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq64"), types.TBOOL},
ssa.OpNeq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("feq32"), types.TBOOL},
ssa.OpLess64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fgt64"), types.TBOOL},
ssa.OpLess32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fgt32"), types.TBOOL},
ssa.OpLeq64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fge64"), types.TBOOL},
ssa.OpLeq32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fge32"), types.TBOOL},
ssa.OpCvt32to32F: sfRtCallDef{sysfunc("fint32to32"), types.TFLOAT32},
ssa.OpCvt32Fto32: sfRtCallDef{sysfunc("f32toint32"), types.TINT32},
ssa.OpCvt64to32F: sfRtCallDef{sysfunc("fint64to32"), types.TFLOAT32},
ssa.OpCvt32Fto64: sfRtCallDef{sysfunc("f32toint64"), types.TINT64},
ssa.OpCvt64Uto32F: sfRtCallDef{sysfunc("fuint64to32"), types.TFLOAT32},
ssa.OpCvt32Fto64U: sfRtCallDef{sysfunc("f32touint64"), types.TUINT64},
ssa.OpCvt32to64F: sfRtCallDef{sysfunc("fint32to64"), types.TFLOAT64},
ssa.OpCvt64Fto32: sfRtCallDef{sysfunc("f64toint32"), types.TINT32},
ssa.OpCvt64to64F: sfRtCallDef{sysfunc("fint64to64"), types.TFLOAT64},
ssa.OpCvt64Fto64: sfRtCallDef{sysfunc("f64toint64"), types.TINT64},
ssa.OpCvt64Uto64F: sfRtCallDef{sysfunc("fuint64to64"), types.TFLOAT64},
ssa.OpCvt64Fto64U: sfRtCallDef{sysfunc("f64touint64"), types.TUINT64},
ssa.OpCvt32Fto64F: sfRtCallDef{sysfunc("f32to64"), types.TFLOAT64},
ssa.OpCvt64Fto32F: sfRtCallDef{sysfunc("f64to32"), types.TFLOAT32},
ssa.OpCvt32to32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint32to32"), types.TFLOAT32},
ssa.OpCvt32Fto32: sfRtCallDef{typecheck.LookupRuntimeFunc("f32toint32"), types.TINT32},
ssa.OpCvt64to32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint64to32"), types.TFLOAT32},
ssa.OpCvt32Fto64: sfRtCallDef{typecheck.LookupRuntimeFunc("f32toint64"), types.TINT64},
ssa.OpCvt64Uto32F: sfRtCallDef{typecheck.LookupRuntimeFunc("fuint64to32"), types.TFLOAT32},
ssa.OpCvt32Fto64U: sfRtCallDef{typecheck.LookupRuntimeFunc("f32touint64"), types.TUINT64},
ssa.OpCvt32to64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint32to64"), types.TFLOAT64},
ssa.OpCvt64Fto32: sfRtCallDef{typecheck.LookupRuntimeFunc("f64toint32"), types.TINT32},
ssa.OpCvt64to64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fint64to64"), types.TFLOAT64},
ssa.OpCvt64Fto64: sfRtCallDef{typecheck.LookupRuntimeFunc("f64toint64"), types.TINT64},
ssa.OpCvt64Uto64F: sfRtCallDef{typecheck.LookupRuntimeFunc("fuint64to64"), types.TFLOAT64},
ssa.OpCvt64Fto64U: sfRtCallDef{typecheck.LookupRuntimeFunc("f64touint64"), types.TUINT64},
ssa.OpCvt32Fto64F: sfRtCallDef{typecheck.LookupRuntimeFunc("f32to64"), types.TFLOAT64},
ssa.OpCvt64Fto32F: sfRtCallDef{typecheck.LookupRuntimeFunc("f64to32"), types.TFLOAT32},
}
}
@ -4458,7 +4459,7 @@ func (s *state) openDeferSave(n ir.Node, t *types.Type, val *ssa.Value) *ssa.Val
} else {
pos = n.Pos()
}
argTemp := tempAt(pos.WithNotStmt(), s.curfn, t)
argTemp := typecheck.TempAt(pos.WithNotStmt(), s.curfn, t)
argTemp.SetOpenDeferSlot(true)
var addrArgTemp *ssa.Value
// Use OpVarLive to make sure stack slots for the args, etc. are not
@ -4719,7 +4720,7 @@ func (s *state) call(n *ir.CallExpr, k callKind, returnResultAddr bool) *ssa.Val
testLateExpansion = ssa.LateCallExpansionEnabledWithin(s.f)
// Make a defer struct d on the stack.
t := deferstruct(stksize)
d := tempAt(n.Pos(), s.curfn, t)
d := typecheck.TempAt(n.Pos(), s.curfn, t)
s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, d, s.mem())
addr := s.addr(d)
@ -6144,7 +6145,7 @@ func (s *state) dottype(n *ir.TypeAssertExpr, commaok bool) (res, resok *ssa.Val
if commaok && !canSSAType(n.Type()) {
// unSSAable type, use temporary.
// TODO: get rid of some of these temporaries.
tmp = tempAt(n.Pos(), s.curfn, n.Type())
tmp = typecheck.TempAt(n.Pos(), s.curfn, n.Type())
s.vars[memVar] = s.newValue1A(ssa.OpVarDef, types.TypeMem, tmp.(*ir.Name), s.mem())
addr = s.addr(tmp)
}
@ -7173,7 +7174,7 @@ func (e *ssafn) StringData(s string) *obj.LSym {
}
func (e *ssafn) Auto(pos src.XPos, t *types.Type) *ir.Name {
return tempAt(pos, e.curfn, t) // Note: adds new auto to e.curfn.Func.Dcl list
return typecheck.TempAt(pos, e.curfn, t) // Note: adds new auto to e.curfn.Func.Dcl list
}
func (e *ssafn) SplitString(name ssa.LocalSlot) (ssa.LocalSlot, ssa.LocalSlot) {

File diff suppressed because it is too large Load diff

View file

@ -7,6 +7,7 @@ package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/typecheck"
"cmd/compile/internal/types"
"cmd/internal/src"
"go/constant"
@ -14,221 +15,6 @@ import (
"sort"
)
// typecheckswitch typechecks a switch statement.
func typecheckswitch(n *ir.SwitchStmt) {
typecheckslice(n.Init(), ctxStmt)
if n.Tag != nil && n.Tag.Op() == ir.OTYPESW {
typecheckTypeSwitch(n)
} else {
typecheckExprSwitch(n)
}
}
func typecheckTypeSwitch(n *ir.SwitchStmt) {
guard := n.Tag.(*ir.TypeSwitchGuard)
guard.X = typecheck(guard.X, ctxExpr)
t := guard.X.Type()
if t != nil && !t.IsInterface() {
base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", guard.X)
t = nil
}
// We don't actually declare the type switch's guarded
// declaration itself. So if there are no cases, we won't
// notice that it went unused.
if v := guard.Tag; v != nil && !ir.IsBlank(v) && len(n.Cases) == 0 {
base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
}
var defCase, nilCase ir.Node
var ts typeSet
for _, ncase := range n.Cases {
ncase := ncase.(*ir.CaseStmt)
ls := ncase.List
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
} else {
defCase = ncase
}
}
for i := range ls {
ls[i] = typecheck(ls[i], ctxExpr|ctxType)
n1 := ls[i]
if t == nil || n1.Type() == nil {
continue
}
var missing, have *types.Field
var ptr int
if ir.IsNil(n1) { // case nil:
if nilCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple nil cases in type switch (first at %v)", ir.Line(nilCase))
} else {
nilCase = ncase
}
continue
}
if n1.Op() != ir.OTYPE {
base.ErrorfAt(ncase.Pos(), "%L is not a type", n1)
continue
}
if !n1.Type().IsInterface() && !implements(n1.Type(), t, &missing, &have, &ptr) && !missing.Broke() {
if have != nil && !have.Broke() {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", guard.X, n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else if ptr != 0 {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (%v method has pointer receiver)", guard.X, n1.Type(), missing.Sym)
} else {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (missing %v method)", guard.X, n1.Type(), missing.Sym)
}
continue
}
ts.add(ncase.Pos(), n1.Type())
}
if len(ncase.Vars) != 0 {
// Assign the clause variable's type.
vt := t
if len(ls) == 1 {
if ls[0].Op() == ir.OTYPE {
vt = ls[0].Type()
} else if !ir.IsNil(ls[0]) {
// Invalid single-type case;
// mark variable as broken.
vt = nil
}
}
nvar := ncase.Vars[0]
nvar.SetType(vt)
if vt != nil {
nvar = typecheck(nvar, ctxExpr|ctxAssign)
} else {
// Clause variable is broken; prevent typechecking.
nvar.SetTypecheck(1)
nvar.SetWalkdef(1)
}
ncase.Vars[0] = nvar
}
typecheckslice(ncase.Body, ctxStmt)
}
}
type typeSet struct {
m map[string][]typeSetEntry
}
type typeSetEntry struct {
pos src.XPos
typ *types.Type
}
func (s *typeSet) add(pos src.XPos, typ *types.Type) {
if s.m == nil {
s.m = make(map[string][]typeSetEntry)
}
// LongString does not uniquely identify types, so we need to
// disambiguate collisions with types.Identical.
// TODO(mdempsky): Add a method that *is* unique.
ls := typ.LongString()
prevs := s.m[ls]
for _, prev := range prevs {
if types.Identical(typ, prev.typ) {
base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev.pos))
return
}
}
s.m[ls] = append(prevs, typeSetEntry{pos, typ})
}
func typecheckExprSwitch(n *ir.SwitchStmt) {
t := types.Types[types.TBOOL]
if n.Tag != nil {
n.Tag = typecheck(n.Tag, ctxExpr)
n.Tag = defaultlit(n.Tag, nil)
t = n.Tag.Type()
}
var nilonly string
if t != nil {
switch {
case t.IsMap():
nilonly = "map"
case t.Kind() == types.TFUNC:
nilonly = "func"
case t.IsSlice():
nilonly = "slice"
case !types.IsComparable(t):
if t.IsStruct() {
base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Tag, types.IncomparableField(t).Type)
} else {
base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Tag)
}
t = nil
}
}
var defCase ir.Node
var cs constSet
for _, ncase := range n.Cases {
ncase := ncase.(*ir.CaseStmt)
ls := ncase.List
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
} else {
defCase = ncase
}
}
for i := range ls {
ir.SetPos(ncase)
ls[i] = typecheck(ls[i], ctxExpr)
ls[i] = defaultlit(ls[i], t)
n1 := ls[i]
if t == nil || n1.Type() == nil {
continue
}
if nilonly != "" && !ir.IsNil(n1) {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Tag)
} else if t.IsInterface() && !n1.Type().IsInterface() && !types.IsComparable(n1.Type()) {
base.ErrorfAt(ncase.Pos(), "invalid case %L in switch (incomparable type)", n1)
} else {
op1, _ := assignop(n1.Type(), t)
op2, _ := assignop(t, n1.Type())
if op1 == ir.OXXX && op2 == ir.OXXX {
if n.Tag != nil {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Tag, n1.Type(), t)
} else {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type())
}
}
}
// Don't check for duplicate bools. Although the spec allows it,
// (1) the compiler hasn't checked it in the past, so compatibility mandates it, and
// (2) it would disallow useful things like
// case GOARCH == "arm" && GOARM == "5":
// case GOARCH == "arm":
// which would both evaluate to false for non-ARM compiles.
if !n1.Type().IsBoolean() {
cs.add(ncase.Pos(), n1, "case", "switch")
}
}
typecheckslice(ncase.Body, ctxStmt)
}
}
// walkswitch walks a switch statement.
func walkswitch(sw *ir.SwitchStmt) {
// Guard against double walk, see #25776.
@ -254,8 +40,8 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
// convert switch {...} to switch true {...}
if cond == nil {
cond = ir.NewBool(true)
cond = typecheck(cond, ctxExpr)
cond = defaultlit(cond, nil)
cond = typecheck.Expr(cond)
cond = typecheck.DefaultLit(cond, nil)
}
// Given "switch string(byteslice)",
@ -285,7 +71,7 @@ func walkExprSwitch(sw *ir.SwitchStmt) {
var body ir.Nodes
for _, ncase := range sw.Cases {
ncase := ncase.(*ir.CaseStmt)
label := autolabel(".s")
label := typecheck.AutoLabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
// Process case dispatch.
@ -509,7 +295,7 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
s.facename = walkexpr(s.facename, sw.PtrInit())
s.facename = copyexpr(s.facename, s.facename.Type(), &sw.Compiled)
s.okname = temp(types.Types[types.TBOOL])
s.okname = typecheck.Temp(types.Types[types.TBOOL])
// Get interface descriptor word.
// For empty interfaces this will be the type.
@ -523,10 +309,10 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
// h := e._type.hash
// Use a similar strategy for non-empty interfaces.
ifNil := ir.NewIfStmt(base.Pos, nil, nil, nil)
ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, nodnil())
ifNil.Cond = ir.NewBinaryExpr(base.Pos, ir.OEQ, itab, typecheck.NodNil())
base.Pos = base.Pos.WithNotStmt() // disable statement marks after the first check.
ifNil.Cond = typecheck(ifNil.Cond, ctxExpr)
ifNil.Cond = defaultlit(ifNil.Cond, nil)
ifNil.Cond = typecheck.Expr(ifNil.Cond)
ifNil.Cond = typecheck.DefaultLit(ifNil.Cond, nil)
// ifNil.Nbody assigned at end.
sw.Compiled.Append(ifNil)
@ -561,7 +347,7 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
}
caseVarInitialized := false
label := autolabel(".s")
label := typecheck.AutoLabel(".s")
jmp := ir.NewBranchStmt(ncase.Pos(), ir.OGOTO, label)
if len(ncase.List) == 0 { // default:
@ -602,7 +388,7 @@ func walkTypeSwitch(sw *ir.SwitchStmt) {
ir.NewDecl(ncase.Pos(), ir.ODCL, caseVar),
ir.NewAssignStmt(ncase.Pos(), caseVar, val),
}
typecheckslice(l, ctxStmt)
typecheck.Stmts(l)
body.Append(l...)
}
body.Append(ncase.Body...)
@ -648,7 +434,7 @@ func (s *typeSwitch) Add(pos src.XPos, typ *types.Type, caseVar, jmp ir.Node) {
ir.NewDecl(pos, ir.ODCL, caseVar),
ir.NewAssignStmt(pos, caseVar, nil),
}
typecheckslice(l, ctxStmt)
typecheck.Stmts(l)
body.Append(l...)
} else {
caseVar = ir.BlankNode
@ -740,8 +526,8 @@ func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i in
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
leaf(i, nif)
base.Pos = base.Pos.WithNotStmt()
nif.Cond = typecheck(nif.Cond, ctxExpr)
nif.Cond = defaultlit(nif.Cond, nil)
nif.Cond = typecheck.Expr(nif.Cond)
nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
out.Append(nif)
out = &nif.Else
}
@ -752,8 +538,8 @@ func binarySearch(n int, out *ir.Nodes, less func(i int) ir.Node, leaf func(i in
nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
nif.Cond = less(half)
base.Pos = base.Pos.WithNotStmt()
nif.Cond = typecheck(nif.Cond, ctxExpr)
nif.Cond = defaultlit(nif.Cond, nil)
nif.Cond = typecheck.Expr(nif.Cond)
nif.Cond = typecheck.DefaultLit(nif.Cond, nil)
do(lo, half, &nif.Body)
do(half, hi, &nif.Else)
out.Append(nif)

View file

@ -1,8 +0,0 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file implements convertions between *types.Node and *Node.
// TODO(gri) try to eliminate these soon
package gc

View file

@ -1,90 +0,0 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
)
// evalunsafe evaluates a package unsafe operation and returns the result.
func evalunsafe(n ir.Node) int64 {
switch n.Op() {
case ir.OALIGNOF, ir.OSIZEOF:
n := n.(*ir.UnaryExpr)
n.X = typecheck(n.X, ctxExpr)
n.X = defaultlit(n.X, nil)
tr := n.X.Type()
if tr == nil {
return 0
}
types.CalcSize(tr)
if n.Op() == ir.OALIGNOF {
return int64(tr.Align)
}
return tr.Width
case ir.OOFFSETOF:
// must be a selector.
n := n.(*ir.UnaryExpr)
if n.X.Op() != ir.OXDOT {
base.Errorf("invalid expression %v", n)
return 0
}
sel := n.X.(*ir.SelectorExpr)
// Remember base of selector to find it back after dot insertion.
// Since r->left may be mutated by typechecking, check it explicitly
// first to track it correctly.
sel.X = typecheck(sel.X, ctxExpr)
sbase := sel.X
tsel := typecheck(sel, ctxExpr)
n.X = tsel
if tsel.Type() == nil {
return 0
}
switch tsel.Op() {
case ir.ODOT, ir.ODOTPTR:
break
case ir.OCALLPART:
base.Errorf("invalid expression %v: argument is a method value", n)
return 0
default:
base.Errorf("invalid expression %v", n)
return 0
}
// Sum offsets for dots until we reach sbase.
var v int64
var next ir.Node
for r := tsel; r != sbase; r = next {
switch r.Op() {
case ir.ODOTPTR:
// For Offsetof(s.f), s may itself be a pointer,
// but accessing f must not otherwise involve
// indirection via embedded pointer types.
r := r.(*ir.SelectorExpr)
if r.X != sbase {
base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.X)
return 0
}
fallthrough
case ir.ODOT:
r := r.(*ir.SelectorExpr)
v += r.Offset
next = r.X
default:
ir.Dump("unsafenmagic", tsel)
base.Fatalf("impossible %v node after dot insertion", r.Op())
}
}
return v
}
base.Fatalf("unexpected op %v", n.Op())
return 0
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,102 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typecheck
import "cmd/compile/internal/types"
// ----------------------------------------------------------------------------
// Export format
// Tags. Must be < 0.
const (
// Objects
packageTag = -(iota + 1)
constTag
typeTag
varTag
funcTag
endTag
// Types
namedTag
arrayTag
sliceTag
dddTag
structTag
pointerTag
signatureTag
interfaceTag
mapTag
chanTag
// Values
falseTag
trueTag
int64Tag
floatTag
fractionTag // not used by gc
complexTag
stringTag
nilTag
unknownTag // not used by gc (only appears in packages with errors)
// Type aliases
aliasTag
)
var predecl []*types.Type // initialized lazily
func predeclared() []*types.Type {
if predecl == nil {
// initialize lazily to be sure that all
// elements have been initialized before
predecl = []*types.Type{
// basic types
types.Types[types.TBOOL],
types.Types[types.TINT],
types.Types[types.TINT8],
types.Types[types.TINT16],
types.Types[types.TINT32],
types.Types[types.TINT64],
types.Types[types.TUINT],
types.Types[types.TUINT8],
types.Types[types.TUINT16],
types.Types[types.TUINT32],
types.Types[types.TUINT64],
types.Types[types.TUINTPTR],
types.Types[types.TFLOAT32],
types.Types[types.TFLOAT64],
types.Types[types.TCOMPLEX64],
types.Types[types.TCOMPLEX128],
types.Types[types.TSTRING],
// basic type aliases
types.ByteType,
types.RuneType,
// error
types.ErrorType,
// untyped types
types.UntypedBool,
types.UntypedInt,
types.UntypedRune,
types.UntypedFloat,
types.UntypedComplex,
types.UntypedString,
types.Types[types.TNIL],
// package unsafe
types.Types[types.TUNSAFEPTR],
// invalid type (package contains errors)
types.Types[types.Txxx],
// any type, for builtin export data
types.Types[types.TANY],
}
}
return predecl
}

View file

@ -0,0 +1,344 @@
// Code generated by mkbuiltin.go. DO NOT EDIT.
package typecheck
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
)
var runtimeDecls = [...]struct {
name string
tag int
typ int
}{
{"newobject", funcTag, 4},
{"mallocgc", funcTag, 8},
{"panicdivide", funcTag, 9},
{"panicshift", funcTag, 9},
{"panicmakeslicelen", funcTag, 9},
{"panicmakeslicecap", funcTag, 9},
{"throwinit", funcTag, 9},
{"panicwrap", funcTag, 9},
{"gopanic", funcTag, 11},
{"gorecover", funcTag, 14},
{"goschedguarded", funcTag, 9},
{"goPanicIndex", funcTag, 16},
{"goPanicIndexU", funcTag, 18},
{"goPanicSliceAlen", funcTag, 16},
{"goPanicSliceAlenU", funcTag, 18},
{"goPanicSliceAcap", funcTag, 16},
{"goPanicSliceAcapU", funcTag, 18},
{"goPanicSliceB", funcTag, 16},
{"goPanicSliceBU", funcTag, 18},
{"goPanicSlice3Alen", funcTag, 16},
{"goPanicSlice3AlenU", funcTag, 18},
{"goPanicSlice3Acap", funcTag, 16},
{"goPanicSlice3AcapU", funcTag, 18},
{"goPanicSlice3B", funcTag, 16},
{"goPanicSlice3BU", funcTag, 18},
{"goPanicSlice3C", funcTag, 16},
{"goPanicSlice3CU", funcTag, 18},
{"printbool", funcTag, 19},
{"printfloat", funcTag, 21},
{"printint", funcTag, 23},
{"printhex", funcTag, 25},
{"printuint", funcTag, 25},
{"printcomplex", funcTag, 27},
{"printstring", funcTag, 29},
{"printpointer", funcTag, 30},
{"printuintptr", funcTag, 31},
{"printiface", funcTag, 30},
{"printeface", funcTag, 30},
{"printslice", funcTag, 30},
{"printnl", funcTag, 9},
{"printsp", funcTag, 9},
{"printlock", funcTag, 9},
{"printunlock", funcTag, 9},
{"concatstring2", funcTag, 34},
{"concatstring3", funcTag, 35},
{"concatstring4", funcTag, 36},
{"concatstring5", funcTag, 37},
{"concatstrings", funcTag, 39},
{"cmpstring", funcTag, 40},
{"intstring", funcTag, 43},
{"slicebytetostring", funcTag, 44},
{"slicebytetostringtmp", funcTag, 45},
{"slicerunetostring", funcTag, 48},
{"stringtoslicebyte", funcTag, 50},
{"stringtoslicerune", funcTag, 53},
{"slicecopy", funcTag, 54},
{"decoderune", funcTag, 55},
{"countrunes", funcTag, 56},
{"convI2I", funcTag, 57},
{"convT16", funcTag, 58},
{"convT32", funcTag, 58},
{"convT64", funcTag, 58},
{"convTstring", funcTag, 58},
{"convTslice", funcTag, 58},
{"convT2E", funcTag, 59},
{"convT2Enoptr", funcTag, 59},
{"convT2I", funcTag, 59},
{"convT2Inoptr", funcTag, 59},
{"assertE2I", funcTag, 57},
{"assertE2I2", funcTag, 60},
{"assertI2I", funcTag, 57},
{"assertI2I2", funcTag, 60},
{"panicdottypeE", funcTag, 61},
{"panicdottypeI", funcTag, 61},
{"panicnildottype", funcTag, 62},
{"ifaceeq", funcTag, 64},
{"efaceeq", funcTag, 64},
{"fastrand", funcTag, 66},
{"makemap64", funcTag, 68},
{"makemap", funcTag, 69},
{"makemap_small", funcTag, 70},
{"mapaccess1", funcTag, 71},
{"mapaccess1_fast32", funcTag, 72},
{"mapaccess1_fast64", funcTag, 72},
{"mapaccess1_faststr", funcTag, 72},
{"mapaccess1_fat", funcTag, 73},
{"mapaccess2", funcTag, 74},
{"mapaccess2_fast32", funcTag, 75},
{"mapaccess2_fast64", funcTag, 75},
{"mapaccess2_faststr", funcTag, 75},
{"mapaccess2_fat", funcTag, 76},
{"mapassign", funcTag, 71},
{"mapassign_fast32", funcTag, 72},
{"mapassign_fast32ptr", funcTag, 72},
{"mapassign_fast64", funcTag, 72},
{"mapassign_fast64ptr", funcTag, 72},
{"mapassign_faststr", funcTag, 72},
{"mapiterinit", funcTag, 77},
{"mapdelete", funcTag, 77},
{"mapdelete_fast32", funcTag, 78},
{"mapdelete_fast64", funcTag, 78},
{"mapdelete_faststr", funcTag, 78},
{"mapiternext", funcTag, 79},
{"mapclear", funcTag, 80},
{"makechan64", funcTag, 82},
{"makechan", funcTag, 83},
{"chanrecv1", funcTag, 85},
{"chanrecv2", funcTag, 86},
{"chansend1", funcTag, 88},
{"closechan", funcTag, 30},
{"writeBarrier", varTag, 90},
{"typedmemmove", funcTag, 91},
{"typedmemclr", funcTag, 92},
{"typedslicecopy", funcTag, 93},
{"selectnbsend", funcTag, 94},
{"selectnbrecv", funcTag, 95},
{"selectnbrecv2", funcTag, 97},
{"selectsetpc", funcTag, 98},
{"selectgo", funcTag, 99},
{"block", funcTag, 9},
{"makeslice", funcTag, 100},
{"makeslice64", funcTag, 101},
{"makeslicecopy", funcTag, 102},
{"growslice", funcTag, 104},
{"memmove", funcTag, 105},
{"memclrNoHeapPointers", funcTag, 106},
{"memclrHasPointers", funcTag, 106},
{"memequal", funcTag, 107},
{"memequal0", funcTag, 108},
{"memequal8", funcTag, 108},
{"memequal16", funcTag, 108},
{"memequal32", funcTag, 108},
{"memequal64", funcTag, 108},
{"memequal128", funcTag, 108},
{"f32equal", funcTag, 109},
{"f64equal", funcTag, 109},
{"c64equal", funcTag, 109},
{"c128equal", funcTag, 109},
{"strequal", funcTag, 109},
{"interequal", funcTag, 109},
{"nilinterequal", funcTag, 109},
{"memhash", funcTag, 110},
{"memhash0", funcTag, 111},
{"memhash8", funcTag, 111},
{"memhash16", funcTag, 111},
{"memhash32", funcTag, 111},
{"memhash64", funcTag, 111},
{"memhash128", funcTag, 111},
{"f32hash", funcTag, 111},
{"f64hash", funcTag, 111},
{"c64hash", funcTag, 111},
{"c128hash", funcTag, 111},
{"strhash", funcTag, 111},
{"interhash", funcTag, 111},
{"nilinterhash", funcTag, 111},
{"int64div", funcTag, 112},
{"uint64div", funcTag, 113},
{"int64mod", funcTag, 112},
{"uint64mod", funcTag, 113},
{"float64toint64", funcTag, 114},
{"float64touint64", funcTag, 115},
{"float64touint32", funcTag, 116},
{"int64tofloat64", funcTag, 117},
{"uint64tofloat64", funcTag, 118},
{"uint32tofloat64", funcTag, 119},
{"complex128div", funcTag, 120},
{"racefuncenter", funcTag, 31},
{"racefuncenterfp", funcTag, 9},
{"racefuncexit", funcTag, 9},
{"raceread", funcTag, 31},
{"racewrite", funcTag, 31},
{"racereadrange", funcTag, 121},
{"racewriterange", funcTag, 121},
{"msanread", funcTag, 121},
{"msanwrite", funcTag, 121},
{"msanmove", funcTag, 122},
{"checkptrAlignment", funcTag, 123},
{"checkptrArithmetic", funcTag, 125},
{"libfuzzerTraceCmp1", funcTag, 127},
{"libfuzzerTraceCmp2", funcTag, 129},
{"libfuzzerTraceCmp4", funcTag, 130},
{"libfuzzerTraceCmp8", funcTag, 131},
{"libfuzzerTraceConstCmp1", funcTag, 127},
{"libfuzzerTraceConstCmp2", funcTag, 129},
{"libfuzzerTraceConstCmp4", funcTag, 130},
{"libfuzzerTraceConstCmp8", funcTag, 131},
{"x86HasPOPCNT", varTag, 6},
{"x86HasSSE41", varTag, 6},
{"x86HasFMA", varTag, 6},
{"armHasVFPv4", varTag, 6},
{"arm64HasATOMICS", varTag, 6},
}
func runtimeTypes() []*types.Type {
var typs [132]*types.Type
typs[0] = types.ByteType
typs[1] = types.NewPtr(typs[0])
typs[2] = types.Types[types.TANY]
typs[3] = types.NewPtr(typs[2])
typs[4] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[5] = types.Types[types.TUINTPTR]
typs[6] = types.Types[types.TBOOL]
typs[7] = types.Types[types.TUNSAFEPTR]
typs[8] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[9] = NewFuncType(nil, nil, nil)
typs[10] = types.Types[types.TINTER]
typs[11] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])}, nil)
typs[12] = types.Types[types.TINT32]
typs[13] = types.NewPtr(typs[12])
typs[14] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[13])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[10])})
typs[15] = types.Types[types.TINT]
typs[16] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
typs[17] = types.Types[types.TUINT]
typs[18] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[17]), ir.NewField(base.Pos, nil, nil, typs[15])}, nil)
typs[19] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])}, nil)
typs[20] = types.Types[types.TFLOAT64]
typs[21] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, nil)
typs[22] = types.Types[types.TINT64]
typs[23] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, nil)
typs[24] = types.Types[types.TUINT64]
typs[25] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
typs[26] = types.Types[types.TCOMPLEX128]
typs[27] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])}, nil)
typs[28] = types.Types[types.TSTRING]
typs[29] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, nil)
typs[30] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
typs[31] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[32] = types.NewArray(typs[0], 32)
typs[33] = types.NewPtr(typs[32])
typs[34] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[35] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[36] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[37] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[38] = types.NewSlice(typs[28])
typs[39] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[38])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[40] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[41] = types.NewArray(typs[0], 4)
typs[42] = types.NewPtr(typs[41])
typs[43] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[42]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[44] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[45] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[46] = types.RuneType
typs[47] = types.NewSlice(typs[46])
typs[48] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[47])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])})
typs[49] = types.NewSlice(typs[0])
typs[50] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[33]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[49])})
typs[51] = types.NewArray(typs[46], 32)
typs[52] = types.NewPtr(typs[51])
typs[53] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[52]), ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[47])})
typs[54] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[55] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[46]), ir.NewField(base.Pos, nil, nil, typs[15])})
typs[56] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[28])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[57] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
typs[58] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[59] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2])})
typs[60] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[2]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[61] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
typs[62] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1])}, nil)
typs[63] = types.NewPtr(typs[5])
typs[64] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[65] = types.Types[types.TUINT32]
typs[66] = NewFuncType(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
typs[67] = types.NewMap(typs[2], typs[2])
typs[68] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[69] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[70] = NewFuncType(nil, nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[67])})
typs[71] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[72] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[73] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])})
typs[74] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[75] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[76] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[1])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[77] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[78] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67]), ir.NewField(base.Pos, nil, nil, typs[2])}, nil)
typs[79] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[80] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[67])}, nil)
typs[81] = types.NewChan(typs[2], types.Cboth)
typs[82] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
typs[83] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[81])})
typs[84] = types.NewChan(typs[2], types.Crecv)
typs[85] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[86] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[84]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[87] = types.NewChan(typs[2], types.Csend)
typs[88] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[89] = types.NewArray(typs[0], 3)
typs[90] = NewStructType([]*ir.Field{ir.NewField(base.Pos, Lookup("enabled"), nil, typs[6]), ir.NewField(base.Pos, Lookup("pad"), nil, typs[89]), ir.NewField(base.Pos, Lookup("needed"), nil, typs[6]), ir.NewField(base.Pos, Lookup("cgo"), nil, typs[6]), ir.NewField(base.Pos, Lookup("alignme"), nil, typs[24])})
typs[91] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[92] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3])}, nil)
typs[93] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15])})
typs[94] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[87]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[95] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[96] = types.NewPtr(typs[6])
typs[97] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[96]), ir.NewField(base.Pos, nil, nil, typs[84])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[98] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[63])}, nil)
typs[99] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[63]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[6])})
typs[100] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[101] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[102] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[15]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7])})
typs[103] = types.NewSlice(typs[2])
typs[104] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[103]), ir.NewField(base.Pos, nil, nil, typs[15])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[103])})
typs[105] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[106] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[107] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[108] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[3]), ir.NewField(base.Pos, nil, nil, typs[3])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[109] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[7])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[6])})
typs[110] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
typs[111] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[5])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5])})
typs[112] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22]), ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
typs[113] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
typs[114] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])})
typs[115] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])})
typs[116] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])})
typs[117] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[22])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[118] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[119] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[20])})
typs[120] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26]), ir.NewField(base.Pos, nil, nil, typs[26])}, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[26])})
typs[121] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[122] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[123] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[1]), ir.NewField(base.Pos, nil, nil, typs[5])}, nil)
typs[124] = types.NewSlice(typs[7])
typs[125] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[7]), ir.NewField(base.Pos, nil, nil, typs[124])}, nil)
typs[126] = types.Types[types.TUINT8]
typs[127] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[126]), ir.NewField(base.Pos, nil, nil, typs[126])}, nil)
typs[128] = types.Types[types.TUINT16]
typs[129] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[128]), ir.NewField(base.Pos, nil, nil, typs[128])}, nil)
typs[130] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[65]), ir.NewField(base.Pos, nil, nil, typs[65])}, nil)
typs[131] = NewFuncType(nil, []*ir.Field{ir.NewField(base.Pos, nil, nil, typs[24]), ir.NewField(base.Pos, nil, nil, typs[24])}, nil)
return typs[:]
}

View file

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc_test
package typecheck
import (
"bytes"

View file

@ -2,13 +2,9 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
package typecheck
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
"fmt"
"go/constant"
"go/token"
@ -16,6 +12,11 @@ import (
"math/big"
"strings"
"unicode"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
)
func roundFloat(v constant.Value, sz int64) constant.Value {
@ -61,7 +62,7 @@ func trunccmplxlit(v constant.Value, t *types.Type) constant.Value {
// TODO(mdempsky): Replace these with better APIs.
func convlit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
func defaultlit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
func DefaultLit(n ir.Node, t *types.Type) ir.Node { return convlit1(n, t, false, nil) }
// convlit1 converts an untyped expression n to type t. If n already
// has a type, convlit1 has no effect.
@ -134,7 +135,7 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT, ir.OREAL, ir.OIMAG:
ot := operandType(n.Op(), t)
if ot == nil {
n = defaultlit(n, nil)
n = DefaultLit(n, nil)
break
}
@ -150,7 +151,7 @@ func convlit1(n ir.Node, t *types.Type, explicit bool, context func() string) ir
case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT, ir.OOROR, ir.OANDAND, ir.OCOMPLEX:
ot := operandType(n.Op(), t)
if ot == nil {
n = defaultlit(n, nil)
n = DefaultLit(n, nil)
break
}
@ -387,11 +388,11 @@ var tokenForOp = [...]token.Token{
ir.ORSH: token.SHR,
}
// evalConst returns a constant-evaluated expression equivalent to n.
// If n is not a constant, evalConst returns n.
// Otherwise, evalConst returns a new OLITERAL with the same value as n,
// EvalConst returns a constant-evaluated expression equivalent to n.
// If n is not a constant, EvalConst returns n.
// Otherwise, EvalConst returns a new OLITERAL with the same value as n,
// and with .Orig pointing back to n.
func evalConst(n ir.Node) ir.Node {
func EvalConst(n ir.Node) ir.Node {
// Pick off just the opcodes that can be constant evaluated.
switch n.Op() {
case ir.OPLUS, ir.ONEG, ir.OBITNOT, ir.ONOT:
@ -402,7 +403,7 @@ func evalConst(n ir.Node) ir.Node {
if n.Type().IsUnsigned() {
prec = uint(n.Type().Size() * 8)
}
return origConst(n, constant.UnaryOp(tokenForOp[n.Op()], nl.Val(), prec))
return OrigConst(n, constant.UnaryOp(tokenForOp[n.Op()], nl.Val(), prec))
}
case ir.OADD, ir.OSUB, ir.OMUL, ir.ODIV, ir.OMOD, ir.OOR, ir.OXOR, ir.OAND, ir.OANDNOT:
@ -427,21 +428,21 @@ func evalConst(n ir.Node) ir.Node {
if n.Op() == ir.ODIV && n.Type().IsInteger() {
tok = token.QUO_ASSIGN // integer division
}
return origConst(n, constant.BinaryOp(nl.Val(), tok, rval))
return OrigConst(n, constant.BinaryOp(nl.Val(), tok, rval))
}
case ir.OOROR, ir.OANDAND:
n := n.(*ir.LogicalExpr)
nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origConst(n, constant.BinaryOp(nl.Val(), tokenForOp[n.Op()], nr.Val()))
return OrigConst(n, constant.BinaryOp(nl.Val(), tokenForOp[n.Op()], nr.Val()))
}
case ir.OEQ, ir.ONE, ir.OLT, ir.OLE, ir.OGT, ir.OGE:
n := n.(*ir.BinaryExpr)
nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origBoolConst(n, constant.Compare(nl.Val(), tokenForOp[n.Op()], nr.Val()))
return OrigBool(n, constant.Compare(nl.Val(), tokenForOp[n.Op()], nr.Val()))
}
case ir.OLSH, ir.ORSH:
@ -456,14 +457,14 @@ func evalConst(n ir.Node) ir.Node {
n.SetType(nil)
break
}
return origConst(n, constant.Shift(toint(nl.Val()), tokenForOp[n.Op()], uint(s)))
return OrigConst(n, constant.Shift(toint(nl.Val()), tokenForOp[n.Op()], uint(s)))
}
case ir.OCONV, ir.ORUNESTR:
n := n.(*ir.ConvExpr)
nl := n.X
if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
return origConst(n, convertVal(nl.Val(), n.Type(), true))
return OrigConst(n, convertVal(nl.Val(), n.Type(), true))
}
case ir.OCONVNOP:
@ -472,7 +473,7 @@ func evalConst(n ir.Node) ir.Node {
if ir.OKForConst[n.Type().Kind()] && nl.Op() == ir.OLITERAL {
// set so n.Orig gets OCONV instead of OCONVNOP
n.SetOp(ir.OCONV)
return origConst(n, nl.Val())
return OrigConst(n, nl.Val())
}
case ir.OADDSTR:
@ -494,7 +495,7 @@ func evalConst(n ir.Node) ir.Node {
for _, c := range s {
strs = append(strs, ir.StringVal(c))
}
return origConst(n, constant.MakeString(strings.Join(strs, "")))
return OrigConst(n, constant.MakeString(strings.Join(strs, "")))
}
newList := make([]ir.Node, 0, need)
for i := 0; i < len(s); i++ {
@ -509,7 +510,7 @@ func evalConst(n ir.Node) ir.Node {
nl := ir.Copy(n).(*ir.AddStringExpr)
nl.List.Set(s[i:i2])
newList = append(newList, origConst(nl, constant.MakeString(strings.Join(strs, ""))))
newList = append(newList, OrigConst(nl, constant.MakeString(strings.Join(strs, ""))))
i = i2 - 1
} else {
newList = append(newList, s[i])
@ -526,37 +527,37 @@ func evalConst(n ir.Node) ir.Node {
switch nl.Type().Kind() {
case types.TSTRING:
if ir.IsConst(nl, constant.String) {
return origIntConst(n, int64(len(ir.StringVal(nl))))
return OrigInt(n, int64(len(ir.StringVal(nl))))
}
case types.TARRAY:
if !anyCallOrChan(nl) {
return origIntConst(n, nl.Type().NumElem())
return OrigInt(n, nl.Type().NumElem())
}
}
case ir.OALIGNOF, ir.OOFFSETOF, ir.OSIZEOF:
n := n.(*ir.UnaryExpr)
return origIntConst(n, evalunsafe(n))
return OrigInt(n, evalunsafe(n))
case ir.OREAL:
n := n.(*ir.UnaryExpr)
nl := n.X
if nl.Op() == ir.OLITERAL {
return origConst(n, constant.Real(nl.Val()))
return OrigConst(n, constant.Real(nl.Val()))
}
case ir.OIMAG:
n := n.(*ir.UnaryExpr)
nl := n.X
if nl.Op() == ir.OLITERAL {
return origConst(n, constant.Imag(nl.Val()))
return OrigConst(n, constant.Imag(nl.Val()))
}
case ir.OCOMPLEX:
n := n.(*ir.BinaryExpr)
nl, nr := n.X, n.Y
if nl.Op() == ir.OLITERAL && nr.Op() == ir.OLITERAL {
return origConst(n, makeComplex(nl.Val(), nr.Val()))
return OrigConst(n, makeComplex(nl.Val(), nr.Val()))
}
}
@ -598,8 +599,8 @@ var overflowNames = [...]string{
ir.OBITNOT: "bitwise complement",
}
// origConst returns an OLITERAL with orig n and value v.
func origConst(n ir.Node, v constant.Value) ir.Node {
// OrigConst returns an OLITERAL with orig n and value v.
func OrigConst(n ir.Node, v constant.Value) ir.Node {
lno := ir.SetPos(n)
v = convertVal(v, n.Type(), false)
base.Pos = lno
@ -623,12 +624,12 @@ func origConst(n ir.Node, v constant.Value) ir.Node {
return ir.NewConstExpr(v, n)
}
func origBoolConst(n ir.Node, v bool) ir.Node {
return origConst(n, constant.MakeBool(v))
func OrigBool(n ir.Node, v bool) ir.Node {
return OrigConst(n, constant.MakeBool(v))
}
func origIntConst(n ir.Node, v int64) ir.Node {
return origConst(n, constant.MakeInt64(v))
func OrigInt(n ir.Node, v int64) ir.Node {
return OrigConst(n, constant.MakeInt64(v))
}
// defaultlit on both nodes simultaneously;
@ -722,12 +723,12 @@ func defaultType(t *types.Type) *types.Type {
return nil
}
// indexconst checks if Node n contains a constant expression
// IndexConst checks if Node n contains a constant expression
// representable as a non-negative int and returns its value.
// If n is not a constant expression, not representable as an
// integer, or negative, it returns -1. If n is too large, it
// returns -2.
func indexconst(n ir.Node) int64 {
func IndexConst(n ir.Node) int64 {
if n.Op() != ir.OLITERAL {
return -1
}
@ -862,3 +863,82 @@ func nodeAndVal(n ir.Node) string {
}
return show
}
// evalunsafe evaluates a package unsafe operation and returns the result.
func evalunsafe(n ir.Node) int64 {
switch n.Op() {
case ir.OALIGNOF, ir.OSIZEOF:
n := n.(*ir.UnaryExpr)
n.X = Expr(n.X)
n.X = DefaultLit(n.X, nil)
tr := n.X.Type()
if tr == nil {
return 0
}
types.CalcSize(tr)
if n.Op() == ir.OALIGNOF {
return int64(tr.Align)
}
return tr.Width
case ir.OOFFSETOF:
// must be a selector.
n := n.(*ir.UnaryExpr)
if n.X.Op() != ir.OXDOT {
base.Errorf("invalid expression %v", n)
return 0
}
sel := n.X.(*ir.SelectorExpr)
// Remember base of selector to find it back after dot insertion.
// Since r->left may be mutated by typechecking, check it explicitly
// first to track it correctly.
sel.X = Expr(sel.X)
sbase := sel.X
tsel := Expr(sel)
n.X = tsel
if tsel.Type() == nil {
return 0
}
switch tsel.Op() {
case ir.ODOT, ir.ODOTPTR:
break
case ir.OCALLPART:
base.Errorf("invalid expression %v: argument is a method value", n)
return 0
default:
base.Errorf("invalid expression %v", n)
return 0
}
// Sum offsets for dots until we reach sbase.
var v int64
var next ir.Node
for r := tsel; r != sbase; r = next {
switch r.Op() {
case ir.ODOTPTR:
// For Offsetof(s.f), s may itself be a pointer,
// but accessing f must not otherwise involve
// indirection via embedded pointer types.
r := r.(*ir.SelectorExpr)
if r.X != sbase {
base.Errorf("invalid expression %v: selector implies indirection of embedded %v", n, r.X)
return 0
}
fallthrough
case ir.ODOT:
r := r.(*ir.SelectorExpr)
v += r.Offset
next = r.X
default:
ir.Dump("unsafenmagic", tsel)
base.Fatalf("impossible %v node after dot insertion", r.Op())
}
}
return v
}
base.Fatalf("unexpected op %v", n.Op())
return 0
}

View file

@ -0,0 +1,705 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typecheck
import (
"fmt"
"strconv"
"strings"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
)
var DeclContext ir.Class // PEXTERN/PAUTO
func AssignDefn(left []ir.Node, defn ir.Node) {
for _, n := range left {
if n.Sym() != nil {
n.Sym().SetUniq(true)
}
}
var nnew, nerr int
for i, n := range left {
if ir.IsBlank(n) {
continue
}
if !assignableName(n) {
base.ErrorfAt(defn.Pos(), "non-name %v on left side of :=", n)
nerr++
continue
}
if !n.Sym().Uniq() {
base.ErrorfAt(defn.Pos(), "%v repeated on left side of :=", n.Sym())
n.SetDiag(true)
nerr++
continue
}
n.Sym().SetUniq(false)
if n.Sym().Block == types.Block {
continue
}
nnew++
n := NewName(n.Sym())
Declare(n, DeclContext)
n.Defn = defn
defn.PtrInit().Append(ir.NewDecl(base.Pos, ir.ODCL, n))
left[i] = n
}
if nnew == 0 && nerr == 0 {
base.ErrorfAt(defn.Pos(), "no new variables on left side of :=")
}
}
// := declarations
func assignableName(n ir.Node) bool {
switch n.Op() {
case ir.ONAME,
ir.ONONAME,
ir.OPACK,
ir.OTYPE,
ir.OLITERAL:
return n.Sym() != nil
}
return false
}
func DeclFunc(sym *types.Sym, tfn ir.Ntype) *ir.Func {
if tfn.Op() != ir.OTFUNC {
base.Fatalf("expected OTFUNC node, got %v", tfn)
}
fn := ir.NewFunc(base.Pos)
fn.Nname = ir.NewFuncNameAt(base.Pos, sym, fn)
fn.Nname.Defn = fn
fn.Nname.Ntype = tfn
ir.MarkFunc(fn.Nname)
StartFuncBody(fn)
fn.Nname.Ntype = typecheckNtype(fn.Nname.Ntype)
return fn
}
// declare variables from grammar
// new_name_list (type | [type] = expr_list)
func DeclVars(vl []*ir.Name, t ir.Ntype, el []ir.Node) []ir.Node {
var init []ir.Node
doexpr := len(el) > 0
if len(el) == 1 && len(vl) > 1 {
e := el[0]
as2 := ir.NewAssignListStmt(base.Pos, ir.OAS2, nil, nil)
as2.Rhs = []ir.Node{e}
for _, v := range vl {
as2.Lhs.Append(v)
Declare(v, DeclContext)
v.Ntype = t
v.Defn = as2
if ir.CurFunc != nil {
init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
}
}
return append(init, as2)
}
for i, v := range vl {
var e ir.Node
if doexpr {
if i >= len(el) {
base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
break
}
e = el[i]
}
Declare(v, DeclContext)
v.Ntype = t
if e != nil || ir.CurFunc != nil || ir.IsBlank(v) {
if ir.CurFunc != nil {
init = append(init, ir.NewDecl(base.Pos, ir.ODCL, v))
}
as := ir.NewAssignStmt(base.Pos, v, e)
init = append(init, as)
if e != nil {
v.Defn = as
}
}
}
if len(el) > len(vl) {
base.Errorf("assignment mismatch: %d variables but %d values", len(vl), len(el))
}
return init
}
// Declare records that Node n declares symbol n.Sym in the specified
// declaration context.
func Declare(n *ir.Name, ctxt ir.Class) {
if ir.IsBlank(n) {
return
}
s := n.Sym()
// kludgy: typecheckok means we're past parsing. Eg genwrapper may declare out of package names later.
if !inimport && !TypecheckAllowed && s.Pkg != types.LocalPkg {
base.ErrorfAt(n.Pos(), "cannot declare name %v", s)
}
gen := 0
if ctxt == ir.PEXTERN {
if s.Name == "init" {
base.ErrorfAt(n.Pos(), "cannot declare init - must be func")
}
if s.Name == "main" && s.Pkg.Name == "main" {
base.ErrorfAt(n.Pos(), "cannot declare main - must be func")
}
Target.Externs = append(Target.Externs, n)
} else {
if ir.CurFunc == nil && ctxt == ir.PAUTO {
base.Pos = n.Pos()
base.Fatalf("automatic outside function")
}
if ir.CurFunc != nil && ctxt != ir.PFUNC && n.Op() == ir.ONAME {
ir.CurFunc.Dcl = append(ir.CurFunc.Dcl, n)
}
if n.Op() == ir.OTYPE {
declare_typegen++
gen = declare_typegen
} else if n.Op() == ir.ONAME && ctxt == ir.PAUTO && !strings.Contains(s.Name, "·") {
vargen++
gen = vargen
}
types.Pushdcl(s)
n.Curfn = ir.CurFunc
}
if ctxt == ir.PAUTO {
n.SetFrameOffset(0)
}
if s.Block == types.Block {
// functype will print errors about duplicate function arguments.
// Don't repeat the error here.
if ctxt != ir.PPARAM && ctxt != ir.PPARAMOUT {
Redeclared(n.Pos(), s, "in this block")
}
}
s.Block = types.Block
s.Lastlineno = base.Pos
s.Def = n
n.Vargen = int32(gen)
n.Class_ = ctxt
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
}
autoexport(n, ctxt)
}
// Export marks n for export (or reexport).
func Export(n *ir.Name) {
if n.Sym().OnExportList() {
return
}
n.Sym().SetOnExportList(true)
if base.Flag.E != 0 {
fmt.Printf("export symbol %v\n", n.Sym())
}
Target.Exports = append(Target.Exports, n)
}
// Redeclared emits a diagnostic about symbol s being redeclared at pos.
func Redeclared(pos src.XPos, s *types.Sym, where string) {
if !s.Lastlineno.IsKnown() {
pkgName := DotImportRefs[s.Def.(*ir.Ident)]
base.ErrorfAt(pos, "%v redeclared %s\n"+
"\t%v: previous declaration during import %q", s, where, base.FmtPos(pkgName.Pos()), pkgName.Pkg.Path)
} else {
prevPos := s.Lastlineno
// When an import and a declaration collide in separate files,
// present the import as the "redeclared", because the declaration
// is visible where the import is, but not vice versa.
// See issue 4510.
if s.Def == nil {
pos, prevPos = prevPos, pos
}
base.ErrorfAt(pos, "%v redeclared %s\n"+
"\t%v: previous declaration", s, where, base.FmtPos(prevPos))
}
}
// declare the function proper
// and declare the arguments.
// called in extern-declaration context
// returns in auto-declaration context.
func StartFuncBody(fn *ir.Func) {
// change the declaration context from extern to auto
funcStack = append(funcStack, funcStackEnt{ir.CurFunc, DeclContext})
ir.CurFunc = fn
DeclContext = ir.PAUTO
types.Markdcl()
if fn.Nname.Ntype != nil {
funcargs(fn.Nname.Ntype.(*ir.FuncType))
} else {
funcargs2(fn.Type())
}
}
// finish the body.
// called in auto-declaration context.
// returns in extern-declaration context.
func FinishFuncBody() {
// change the declaration context from auto to previous context
types.Popdcl()
var e funcStackEnt
funcStack, e = funcStack[:len(funcStack)-1], funcStack[len(funcStack)-1]
ir.CurFunc, DeclContext = e.curfn, e.dclcontext
}
func CheckFuncStack() {
if len(funcStack) != 0 {
base.Fatalf("funcStack is non-empty: %v", len(funcStack))
}
}
// turn a parsed function declaration into a type
func NewFuncType(nrecv *ir.Field, nparams, nresults []*ir.Field) *types.Type {
funarg := func(n *ir.Field) *types.Field {
lno := base.Pos
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
f := types.NewField(n.Pos, n.Sym, n.Type)
f.SetIsDDD(n.IsDDD)
if n.Decl != nil {
n.Decl.SetType(f.Type)
f.Nname = n.Decl
}
base.Pos = lno
return f
}
funargs := func(nn []*ir.Field) []*types.Field {
res := make([]*types.Field, len(nn))
for i, n := range nn {
res[i] = funarg(n)
}
return res
}
var recv *types.Field
if nrecv != nil {
recv = funarg(nrecv)
}
t := types.NewSignature(types.LocalPkg, recv, funargs(nparams), funargs(nresults))
checkdupfields("argument", t.Recvs().FieldSlice(), t.Params().FieldSlice(), t.Results().FieldSlice())
return t
}
// convert a parsed id/type list into
// a type for struct/interface/arglist
func NewStructType(l []*ir.Field) *types.Type {
lno := base.Pos
fields := make([]*types.Field, len(l))
for i, n := range l {
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
f := types.NewField(n.Pos, n.Sym, n.Type)
if n.Embedded {
checkembeddedtype(n.Type)
f.Embedded = 1
}
f.Note = n.Note
fields[i] = f
}
checkdupfields("field", fields)
base.Pos = lno
return types.NewStruct(types.LocalPkg, fields)
}
// Add a method, declared as a function.
// - msym is the method symbol
// - t is function type (with receiver)
// Returns a pointer to the existing or added Field; or nil if there's an error.
func addmethod(n *ir.Func, msym *types.Sym, t *types.Type, local, nointerface bool) *types.Field {
if msym == nil {
base.Fatalf("no method symbol")
}
// get parent type sym
rf := t.Recv() // ptr to this structure
if rf == nil {
base.Errorf("missing receiver")
return nil
}
mt := types.ReceiverBaseType(rf.Type)
if mt == nil || mt.Sym() == nil {
pa := rf.Type
t := pa
if t != nil && t.IsPtr() {
if t.Sym() != nil {
base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
return nil
}
t = t.Elem()
}
switch {
case t == nil || t.Broke():
// rely on typecheck having complained before
case t.Sym() == nil:
base.Errorf("invalid receiver type %v (%v is not a defined type)", pa, t)
case t.IsPtr():
base.Errorf("invalid receiver type %v (%v is a pointer type)", pa, t)
case t.IsInterface():
base.Errorf("invalid receiver type %v (%v is an interface type)", pa, t)
default:
// Should have picked off all the reasons above,
// but just in case, fall back to generic error.
base.Errorf("invalid receiver type %v (%L / %L)", pa, pa, t)
}
return nil
}
if local && mt.Sym().Pkg != types.LocalPkg {
base.Errorf("cannot define new methods on non-local type %v", mt)
return nil
}
if msym.IsBlank() {
return nil
}
if mt.IsStruct() {
for _, f := range mt.Fields().Slice() {
if f.Sym == msym {
base.Errorf("type %v has both field and method named %v", mt, msym)
f.SetBroke(true)
return nil
}
}
}
for _, f := range mt.Methods().Slice() {
if msym.Name != f.Sym.Name {
continue
}
// types.Identical only checks that incoming and result parameters match,
// so explicitly check that the receiver parameters match too.
if !types.Identical(t, f.Type) || !types.Identical(t.Recv().Type, f.Type.Recv().Type) {
base.Errorf("method redeclared: %v.%v\n\t%v\n\t%v", mt, msym, f.Type, t)
}
return f
}
f := types.NewField(base.Pos, msym, t)
f.Nname = n.Nname
f.SetNointerface(nointerface)
mt.Methods().Append(f)
return f
}
func autoexport(n *ir.Name, ctxt ir.Class) {
if n.Sym().Pkg != types.LocalPkg {
return
}
if (ctxt != ir.PEXTERN && ctxt != ir.PFUNC) || DeclContext != ir.PEXTERN {
return
}
if n.Type() != nil && n.Type().IsKind(types.TFUNC) && ir.IsMethod(n) {
return
}
if types.IsExported(n.Sym().Name) || initname(n.Sym().Name) {
Export(n)
}
if base.Flag.AsmHdr != "" && !n.Sym().Asm() {
n.Sym().SetAsm(true)
Target.Asms = append(Target.Asms, n)
}
}
// checkdupfields emits errors for duplicately named fields or methods in
// a list of struct or interface types.
func checkdupfields(what string, fss ...[]*types.Field) {
seen := make(map[*types.Sym]bool)
for _, fs := range fss {
for _, f := range fs {
if f.Sym == nil || f.Sym.IsBlank() {
continue
}
if seen[f.Sym] {
base.ErrorfAt(f.Pos, "duplicate %s %s", what, f.Sym.Name)
continue
}
seen[f.Sym] = true
}
}
}
// structs, functions, and methods.
// they don't belong here, but where do they belong?
func checkembeddedtype(t *types.Type) {
if t == nil {
return
}
if t.Sym() == nil && t.IsPtr() {
t = t.Elem()
if t.IsInterface() {
base.Errorf("embedded type cannot be a pointer to interface")
}
}
if t.IsPtr() || t.IsUnsafePtr() {
base.Errorf("embedded type cannot be a pointer")
} else if t.Kind() == types.TFORW && !t.ForwardType().Embedlineno.IsKnown() {
t.ForwardType().Embedlineno = base.Pos
}
}
// declare individual names - var, typ, const
var declare_typegen int
func fakeRecvField() *types.Field {
return types.NewField(src.NoXPos, nil, types.FakeRecvType())
}
var funcStack []funcStackEnt // stack of previous values of Curfn/dclcontext
type funcStackEnt struct {
curfn *ir.Func
dclcontext ir.Class
}
func funcarg(n *ir.Field, ctxt ir.Class) {
if n.Sym == nil {
return
}
name := ir.NewNameAt(n.Pos, n.Sym)
n.Decl = name
name.Ntype = n.Ntype
name.SetIsDDD(n.IsDDD)
Declare(name, ctxt)
vargen++
n.Decl.Vargen = int32(vargen)
}
func funcarg2(f *types.Field, ctxt ir.Class) {
if f.Sym == nil {
return
}
n := ir.NewNameAt(f.Pos, f.Sym)
f.Nname = n
n.SetType(f.Type)
n.SetIsDDD(f.IsDDD())
Declare(n, ctxt)
}
func funcargs(nt *ir.FuncType) {
if nt.Op() != ir.OTFUNC {
base.Fatalf("funcargs %v", nt.Op())
}
// re-start the variable generation number
// we want to use small numbers for the return variables,
// so let them have the chunk starting at 1.
//
// TODO(mdempsky): This is ugly, and only necessary because
// esc.go uses Vargen to figure out result parameters' index
// within the result tuple.
vargen = len(nt.Results)
// declare the receiver and in arguments.
if nt.Recv != nil {
funcarg(nt.Recv, ir.PPARAM)
}
for _, n := range nt.Params {
funcarg(n, ir.PPARAM)
}
oldvargen := vargen
vargen = 0
// declare the out arguments.
gen := len(nt.Params)
for _, n := range nt.Results {
if n.Sym == nil {
// Name so that escape analysis can track it. ~r stands for 'result'.
n.Sym = LookupNum("~r", gen)
gen++
}
if n.Sym.IsBlank() {
// Give it a name so we can assign to it during return. ~b stands for 'blank'.
// The name must be different from ~r above because if you have
// func f() (_ int)
// func g() int
// f is allowed to use a plain 'return' with no arguments, while g is not.
// So the two cases must be distinguished.
n.Sym = LookupNum("~b", gen)
gen++
}
funcarg(n, ir.PPARAMOUT)
}
vargen = oldvargen
}
// Same as funcargs, except run over an already constructed TFUNC.
// This happens during import, where the hidden_fndcl rule has
// used functype directly to parse the function's type.
func funcargs2(t *types.Type) {
if t.Kind() != types.TFUNC {
base.Fatalf("funcargs2 %v", t)
}
for _, f := range t.Recvs().Fields().Slice() {
funcarg2(f, ir.PPARAM)
}
for _, f := range t.Params().Fields().Slice() {
funcarg2(f, ir.PPARAM)
}
for _, f := range t.Results().Fields().Slice() {
funcarg2(f, ir.PPARAMOUT)
}
}
func initname(s string) bool {
return s == "init"
}
func tointerface(nmethods []*ir.Field) *types.Type {
if len(nmethods) == 0 {
return types.Types[types.TINTER]
}
lno := base.Pos
methods := make([]*types.Field, len(nmethods))
for i, n := range nmethods {
base.Pos = n.Pos
if n.Ntype != nil {
n.Type = typecheckNtype(n.Ntype).Type()
n.Ntype = nil
}
methods[i] = types.NewField(n.Pos, n.Sym, n.Type)
}
base.Pos = lno
return types.NewInterface(types.LocalPkg, methods)
}
var vargen int
func Temp(t *types.Type) *ir.Name {
return TempAt(base.Pos, ir.CurFunc, t)
}
// make a new Node off the books
func TempAt(pos src.XPos, curfn *ir.Func, t *types.Type) *ir.Name {
if curfn == nil {
base.Fatalf("no curfn for tempAt")
}
if curfn.Op() == ir.OCLOSURE {
ir.Dump("tempAt", curfn)
base.Fatalf("adding tempAt to wrong closure function")
}
if t == nil {
base.Fatalf("tempAt called with nil type")
}
s := &types.Sym{
Name: autotmpname(len(curfn.Dcl)),
Pkg: types.LocalPkg,
}
n := ir.NewNameAt(pos, s)
s.Def = n
n.SetType(t)
n.Class_ = ir.PAUTO
n.SetEsc(ir.EscNever)
n.Curfn = curfn
n.SetUsed(true)
n.SetAutoTemp(true)
curfn.Dcl = append(curfn.Dcl, n)
types.CalcSize(t)
return n
}
// autotmpname returns the name for an autotmp variable numbered n.
func autotmpname(n int) string {
// Give each tmp a different name so that they can be registerized.
// Add a preceding . to avoid clashing with legal names.
const prefix = ".autotmp_"
// Start with a buffer big enough to hold a large n.
b := []byte(prefix + " ")[:len(prefix)]
b = strconv.AppendInt(b, int64(n), 10)
return types.InternString(b)
}
// f is method type, with receiver.
// return function type, receiver as first argument (or not).
func NewMethodType(f *types.Type, receiver *types.Type) *types.Type {
inLen := f.Params().Fields().Len()
if receiver != nil {
inLen++
}
in := make([]*ir.Field, 0, inLen)
if receiver != nil {
d := ir.NewField(base.Pos, nil, nil, receiver)
in = append(in, d)
}
for _, t := range f.Params().Fields().Slice() {
d := ir.NewField(base.Pos, nil, nil, t.Type)
d.IsDDD = t.IsDDD()
in = append(in, d)
}
outLen := f.Results().Fields().Len()
out := make([]*ir.Field, 0, outLen)
for _, t := range f.Results().Fields().Slice() {
d := ir.NewField(base.Pos, nil, nil, t.Type)
out = append(out, d)
}
return NewFuncType(nil, in, out)
}

View file

@ -0,0 +1,79 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typecheck
import (
"go/constant"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
)
// importalias declares symbol s as an imported type alias with type t.
// ipkg is the package being imported
func importalias(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
return importobj(ipkg, pos, s, ir.OTYPE, ir.PEXTERN, t)
}
// importconst declares symbol s as an imported constant with type t and value val.
// ipkg is the package being imported
func importconst(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type, val constant.Value) *ir.Name {
n := importobj(ipkg, pos, s, ir.OLITERAL, ir.PEXTERN, t)
n.SetVal(val)
return n
}
// importfunc declares symbol s as an imported function with type t.
// ipkg is the package being imported
func importfunc(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
n := importobj(ipkg, pos, s, ir.ONAME, ir.PFUNC, t)
fn := ir.NewFunc(pos)
fn.SetType(t)
n.SetFunc(fn)
fn.Nname = n
return n
}
// importobj declares symbol s as an imported object representable by op.
// ipkg is the package being imported
func importobj(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class, t *types.Type) *ir.Name {
n := importsym(ipkg, pos, s, op, ctxt)
n.SetType(t)
if ctxt == ir.PFUNC {
n.Sym().SetFunc(true)
}
return n
}
func importsym(ipkg *types.Pkg, pos src.XPos, s *types.Sym, op ir.Op, ctxt ir.Class) *ir.Name {
if n := s.PkgDef(); n != nil {
base.Fatalf("importsym of symbol that already exists: %v", n)
}
n := ir.NewDeclNameAt(pos, op, s)
n.Class_ = ctxt // TODO(mdempsky): Move this into NewDeclNameAt too?
s.SetPkgDef(n)
s.Importdef = ipkg
return n
}
// importtype returns the named type declared by symbol s.
// If no such type has been declared yet, a forward declaration is returned.
// ipkg is the package being imported
func importtype(ipkg *types.Pkg, pos src.XPos, s *types.Sym) *ir.Name {
n := importsym(ipkg, pos, s, ir.OTYPE, ir.PEXTERN)
n.SetType(types.NewNamed(n))
return n
}
// importvar declares symbol s as an imported variable with type t.
// ipkg is the package being imported
func importvar(ipkg *types.Pkg, pos src.XPos, s *types.Sym, t *types.Type) *ir.Name {
return importobj(ipkg, pos, s, ir.ONAME, ir.PEXTERN, t)
}

View file

@ -0,0 +1,398 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typecheck
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"fmt"
)
// package all the arguments that match a ... T parameter into a []T.
func MakeDotArgs(typ *types.Type, args []ir.Node) ir.Node {
var n ir.Node
if len(args) == 0 {
n = NodNil()
n.SetType(typ)
} else {
lit := ir.NewCompLitExpr(base.Pos, ir.OCOMPLIT, ir.TypeNode(typ).(ir.Ntype), nil)
lit.List.Append(args...)
lit.SetImplicit(true)
n = lit
}
n = Expr(n)
if n.Type() == nil {
base.Fatalf("mkdotargslice: typecheck failed")
}
return n
}
// FixVariadicCall rewrites calls to variadic functions to use an
// explicit ... argument if one is not already present.
func FixVariadicCall(call *ir.CallExpr) {
fntype := call.X.Type()
if !fntype.IsVariadic() || call.IsDDD {
return
}
vi := fntype.NumParams() - 1
vt := fntype.Params().Field(vi).Type
args := call.Args
extra := args[vi:]
slice := MakeDotArgs(vt, extra)
for i := range extra {
extra[i] = nil // allow GC
}
call.Args.Set(append(args[:vi], slice))
call.IsDDD = true
}
// ClosureType returns the struct type used to hold all the information
// needed in the closure for clo (clo must be a OCLOSURE node).
// The address of a variable of the returned type can be cast to a func.
func ClosureType(clo *ir.ClosureExpr) *types.Type {
// Create closure in the form of a composite literal.
// supposing the closure captures an int i and a string s
// and has one float64 argument and no results,
// the generated code looks like:
//
// clos = &struct{.F uintptr; i *int; s *string}{func.1, &i, &s}
//
// The use of the struct provides type information to the garbage
// collector so that it can walk the closure. We could use (in this case)
// [3]unsafe.Pointer instead, but that would leave the gc in the dark.
// The information appears in the binary in the form of type descriptors;
// the struct is unnamed so that closures in multiple packages with the
// same struct type can share the descriptor.
fields := []*ir.Field{
ir.NewField(base.Pos, Lookup(".F"), nil, types.Types[types.TUINTPTR]),
}
for _, v := range clo.Func.ClosureVars {
typ := v.Type()
if !v.Byval() {
typ = types.NewPtr(typ)
}
fields = append(fields, ir.NewField(base.Pos, v.Sym(), nil, typ))
}
typ := NewStructType(fields)
typ.SetNoalg(true)
return typ
}
// PartialCallType returns the struct type used to hold all the information
// needed in the closure for n (n must be a OCALLPART node).
// The address of a variable of the returned type can be cast to a func.
func PartialCallType(n *ir.CallPartExpr) *types.Type {
t := NewStructType([]*ir.Field{
ir.NewField(base.Pos, Lookup("F"), nil, types.Types[types.TUINTPTR]),
ir.NewField(base.Pos, Lookup("R"), nil, n.X.Type()),
})
t.SetNoalg(true)
return t
}
// CaptureVars is called in a separate phase after all typechecking is done.
// It decides whether each variable captured by a closure should be captured
// by value or by reference.
// We use value capturing for values <= 128 bytes that are never reassigned
// after capturing (effectively constant).
func CaptureVars(fn *ir.Func) {
lno := base.Pos
base.Pos = fn.Pos()
cvars := fn.ClosureVars
out := cvars[:0]
for _, v := range cvars {
if v.Type() == nil {
// If v.Type is nil, it means v looked like it
// was going to be used in the closure, but
// isn't. This happens in struct literals like
// s{f: x} where we can't distinguish whether
// f is a field identifier or expression until
// resolving s.
continue
}
out = append(out, v)
// type check the & of closed variables outside the closure,
// so that the outer frame also grabs them and knows they escape.
types.CalcSize(v.Type())
var outer ir.Node
outer = v.Outer
outermost := v.Defn.(*ir.Name)
// out parameters will be assigned to implicitly upon return.
if outermost.Class_ != ir.PPARAMOUT && !outermost.Name().Addrtaken() && !outermost.Name().Assigned() && v.Type().Width <= 128 {
v.SetByval(true)
} else {
outermost.Name().SetAddrtaken(true)
outer = NodAddr(outer)
}
if base.Flag.LowerM > 1 {
var name *types.Sym
if v.Curfn != nil && v.Curfn.Nname != nil {
name = v.Curfn.Sym()
}
how := "ref"
if v.Byval() {
how = "value"
}
base.WarnfAt(v.Pos(), "%v capturing by %s: %v (addr=%v assign=%v width=%d)", name, how, v.Sym(), outermost.Name().Addrtaken(), outermost.Name().Assigned(), int32(v.Type().Width))
}
outer = Expr(outer)
fn.ClosureEnter.Append(outer)
}
fn.ClosureVars = out
base.Pos = lno
}
// typecheckclosure typechecks an OCLOSURE node. It also creates the named
// function associated with the closure.
// TODO: This creation of the named function should probably really be done in a
// separate pass from type-checking.
func typecheckclosure(clo *ir.ClosureExpr, top int) {
fn := clo.Func
// Set current associated iota value, so iota can be used inside
// function in ConstSpec, see issue #22344
if x := getIotaValue(); x >= 0 {
fn.Iota = x
}
fn.ClosureType = check(fn.ClosureType, ctxType)
clo.SetType(fn.ClosureType.Type())
fn.SetClosureCalled(top&ctxCallee != 0)
// Do not typecheck fn twice, otherwise, we will end up pushing
// fn to Target.Decls multiple times, causing initLSym called twice.
// See #30709
if fn.Typecheck() == 1 {
return
}
for _, ln := range fn.ClosureVars {
n := ln.Defn
if !n.Name().Captured() {
n.Name().SetCaptured(true)
if n.Name().Decldepth == 0 {
base.Fatalf("typecheckclosure: var %v does not have decldepth assigned", n)
}
// Ignore assignments to the variable in straightline code
// preceding the first capturing by a closure.
if n.Name().Decldepth == decldepth {
n.Name().SetAssigned(false)
}
}
}
fn.Nname.SetSym(closurename(ir.CurFunc))
ir.MarkFunc(fn.Nname)
Func(fn)
// Type check the body now, but only if we're inside a function.
// At top level (in a variable initialization: curfn==nil) we're not
// ready to type check code yet; we'll check it later, because the
// underlying closure function we create is added to Target.Decls.
if ir.CurFunc != nil && clo.Type() != nil {
oldfn := ir.CurFunc
ir.CurFunc = fn
olddd := decldepth
decldepth = 1
Stmts(fn.Body)
decldepth = olddd
ir.CurFunc = oldfn
}
Target.Decls = append(Target.Decls, fn)
}
// Lazy typechecking of imported bodies. For local functions, caninl will set ->typecheck
// because they're a copy of an already checked body.
func ImportedBody(fn *ir.Func) {
lno := ir.SetPos(fn.Nname)
ImportBody(fn)
// typecheckinl is only for imported functions;
// their bodies may refer to unsafe as long as the package
// was marked safe during import (which was checked then).
// the ->inl of a local function has been typechecked before caninl copied it.
pkg := fnpkg(fn.Nname)
if pkg == types.LocalPkg || pkg == nil {
return // typecheckinl on local function
}
if base.Flag.LowerM > 2 || base.Debug.Export != 0 {
fmt.Printf("typecheck import [%v] %L { %v }\n", fn.Sym(), fn, ir.Nodes(fn.Inl.Body))
}
savefn := ir.CurFunc
ir.CurFunc = fn
Stmts(fn.Inl.Body)
ir.CurFunc = savefn
// During expandInline (which imports fn.Func.Inl.Body),
// declarations are added to fn.Func.Dcl by funcHdr(). Move them
// to fn.Func.Inl.Dcl for consistency with how local functions
// behave. (Append because typecheckinl may be called multiple
// times.)
fn.Inl.Dcl = append(fn.Inl.Dcl, fn.Dcl...)
fn.Dcl = nil
base.Pos = lno
}
// Get the function's package. For ordinary functions it's on the ->sym, but for imported methods
// the ->sym can be re-used in the local package, so peel it off the receiver's type.
func fnpkg(fn *ir.Name) *types.Pkg {
if ir.IsMethod(fn) {
// method
rcvr := fn.Type().Recv().Type
if rcvr.IsPtr() {
rcvr = rcvr.Elem()
}
if rcvr.Sym() == nil {
base.Fatalf("receiver with no sym: [%v] %L (%v)", fn.Sym(), fn, rcvr)
}
return rcvr.Sym().Pkg
}
// non-method
return fn.Sym().Pkg
}
// CaptureVarsComplete is set to true when the capturevars phase is done.
var CaptureVarsComplete bool
// closurename generates a new unique name for a closure within
// outerfunc.
func closurename(outerfunc *ir.Func) *types.Sym {
outer := "glob."
prefix := "func"
gen := &globClosgen
if outerfunc != nil {
if outerfunc.OClosure != nil {
prefix = ""
}
outer = ir.FuncName(outerfunc)
// There may be multiple functions named "_". In those
// cases, we can't use their individual Closgens as it
// would lead to name clashes.
if !ir.IsBlank(outerfunc.Nname) {
gen = &outerfunc.Closgen
}
}
*gen++
return Lookup(fmt.Sprintf("%s.%s%d", outer, prefix, *gen))
}
// globClosgen is like Func.Closgen, but for the global scope.
var globClosgen int32
// makepartialcall returns a DCLFUNC node representing the wrapper function (*-fm) needed
// for partial calls.
func makepartialcall(dot *ir.SelectorExpr, t0 *types.Type, meth *types.Sym) *ir.Func {
rcvrtype := dot.X.Type()
sym := ir.MethodSymSuffix(rcvrtype, meth, "-fm")
if sym.Uniq() {
return sym.Def.(*ir.Func)
}
sym.SetUniq(true)
savecurfn := ir.CurFunc
saveLineNo := base.Pos
ir.CurFunc = nil
// Set line number equal to the line number where the method is declared.
var m *types.Field
if lookdot0(meth, rcvrtype, &m, false) == 1 && m.Pos.IsKnown() {
base.Pos = m.Pos
}
// Note: !m.Pos.IsKnown() happens for method expressions where
// the method is implicitly declared. The Error method of the
// built-in error type is one such method. We leave the line
// number at the use of the method expression in this
// case. See issue 29389.
tfn := ir.NewFuncType(base.Pos, nil,
NewFuncParams(t0.Params(), true),
NewFuncParams(t0.Results(), false))
fn := DeclFunc(sym, tfn)
fn.SetDupok(true)
fn.SetNeedctxt(true)
// Declare and initialize variable holding receiver.
cr := ir.NewClosureRead(rcvrtype, types.Rnd(int64(types.PtrSize), int64(rcvrtype.Align)))
ptr := NewName(Lookup(".this"))
Declare(ptr, ir.PAUTO)
ptr.SetUsed(true)
var body []ir.Node
if rcvrtype.IsPtr() || rcvrtype.IsInterface() {
ptr.SetType(rcvrtype)
body = append(body, ir.NewAssignStmt(base.Pos, ptr, cr))
} else {
ptr.SetType(types.NewPtr(rcvrtype))
body = append(body, ir.NewAssignStmt(base.Pos, ptr, NodAddr(cr)))
}
call := ir.NewCallExpr(base.Pos, ir.OCALL, ir.NewSelectorExpr(base.Pos, ir.OXDOT, ptr, meth), nil)
call.Args.Set(ir.ParamNames(tfn.Type()))
call.IsDDD = tfn.Type().IsVariadic()
if t0.NumResults() != 0 {
ret := ir.NewReturnStmt(base.Pos, nil)
ret.Results = []ir.Node{call}
body = append(body, ret)
} else {
body = append(body, call)
}
fn.Body.Set(body)
FinishFuncBody()
Func(fn)
// Need to typecheck the body of the just-generated wrapper.
// typecheckslice() requires that Curfn is set when processing an ORETURN.
ir.CurFunc = fn
Stmts(fn.Body)
sym.Def = fn
Target.Decls = append(Target.Decls, fn)
ir.CurFunc = savecurfn
base.Pos = saveLineNo
return fn
}
func typecheckpartialcall(n ir.Node, sym *types.Sym) *ir.CallPartExpr {
switch n.Op() {
case ir.ODOTINTER, ir.ODOTMETH:
break
default:
base.Fatalf("invalid typecheckpartialcall")
}
dot := n.(*ir.SelectorExpr)
// Create top-level function.
fn := makepartialcall(dot, dot.Type(), sym)
fn.SetWrapper(true)
return ir.NewCallPartExpr(dot.Pos(), dot.X, dot.Selection, fn)
}

View file

@ -199,16 +199,11 @@
// they're expected to change much more rapidly, so they're omitted
// here. See exportWriter's varExt/funcExt/etc methods for details.
package gc
package typecheck
import (
"bufio"
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/goobj"
"cmd/internal/src"
"crypto/md5"
"encoding/binary"
"fmt"
@ -217,6 +212,12 @@ import (
"math/big"
"sort"
"strings"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/goobj"
"cmd/internal/src"
)
// Current indexed export format version. Increase with each format change.
@ -245,7 +246,7 @@ const (
interfaceType
)
func iexport(out *bufio.Writer) {
func WriteExports(out *bufio.Writer) {
p := iexporter{
allPkgs: map[*types.Pkg]bool{},
stringIndex: map[string]uint64{},
@ -455,7 +456,7 @@ func (p *iexporter) doDecl(n *ir.Name) {
case ir.OLITERAL:
// Constant.
// TODO(mdempsky): Do we still need this typecheck? If so, why?
n = typecheck(n, ctxExpr).(*ir.Name)
n = Expr(n).(*ir.Name)
w.tag('C')
w.pos(n.Pos())
w.value(n.Type(), n.Val())

View file

@ -5,16 +5,9 @@
// Indexed package import.
// See iexport.go for the export data format.
package gc
package typecheck
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/goobj"
"cmd/internal/obj"
"cmd/internal/src"
"encoding/binary"
"fmt"
"go/constant"
@ -22,6 +15,14 @@ import (
"math/big"
"os"
"strings"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/bio"
"cmd/internal/goobj"
"cmd/internal/obj"
"cmd/internal/src"
)
// An iimporterAndOffset identifies an importer and an offset within
@ -32,9 +33,9 @@ type iimporterAndOffset struct {
}
var (
// declImporter maps from imported identifiers to an importer
// DeclImporter maps from imported identifiers to an importer
// and offset where that identifier's declaration can be read.
declImporter = map[*types.Sym]iimporterAndOffset{}
DeclImporter = map[*types.Sym]iimporterAndOffset{}
// inlineImporter is like declImporter, but for inline bodies
// for function and method symbols.
@ -51,7 +52,7 @@ func expandDecl(n ir.Node) ir.Node {
return n.(*ir.Name)
}
r := importReaderFor(id.Sym(), declImporter)
r := importReaderFor(id.Sym(), DeclImporter)
if r == nil {
// Can happen if user tries to reference an undeclared name.
return n
@ -60,7 +61,7 @@ func expandDecl(n ir.Node) ir.Node {
return r.doDecl(n.Sym())
}
func expandInline(fn *ir.Func) {
func ImportBody(fn *ir.Func) {
if fn.Inl.Body != nil {
return
}
@ -105,7 +106,7 @@ func (r *intReader) uint64() uint64 {
return i
}
func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) {
func ReadImports(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType) {
ird := &intReader{in, pkg}
version := ird.uint64()
@ -170,8 +171,8 @@ func iimport(pkg *types.Pkg, in *bio.Reader) (fingerprint goobj.FingerprintType)
s := pkg.Lookup(p.stringAt(ird.uint64()))
off := ird.uint64()
if _, ok := declImporter[s]; !ok {
declImporter[s] = iimporterAndOffset{p, off}
if _, ok := DeclImporter[s]; !ok {
DeclImporter[s] = iimporterAndOffset{p, off}
}
}
}
@ -705,9 +706,9 @@ func (r *importReader) doInline(fn *ir.Func) {
base.Fatalf("%v already has inline body", fn)
}
funchdr(fn)
StartFuncBody(fn)
body := r.stmtList()
funcbody()
FinishFuncBody()
if body == nil {
//
// Make sure empty body is not interpreted as
@ -778,7 +779,7 @@ func (r *importReader) caseList(sw ir.Node) []ir.Node {
// names after import. That's okay: swt.go only needs
// Sym for diagnostics anyway.
caseVar := ir.NewNameAt(cas.Pos(), r.ident())
declare(caseVar, dclcontext)
Declare(caseVar, DeclContext)
cas.Vars = []ir.Node{caseVar}
caseVar.Defn = sw.(*ir.SwitchStmt).Tag
}
@ -820,7 +821,7 @@ func (r *importReader) node() ir.Node {
pos := r.pos()
typ := r.typ()
n := npos(pos, nodnil())
n := npos(pos, NodNil())
n.SetType(typ)
return n
@ -959,7 +960,7 @@ func (r *importReader) node() ir.Node {
return ir.NewUnaryExpr(r.pos(), op, r.expr())
case ir.OADDR:
return nodAddrAt(r.pos(), r.expr())
return NodAddrAt(r.pos(), r.expr())
case ir.ODEREF:
return ir.NewStarExpr(r.pos(), r.expr())
@ -991,7 +992,7 @@ func (r *importReader) node() ir.Node {
lhs := ir.NewDeclNameAt(pos, ir.ONAME, r.ident())
lhs.SetType(r.typ())
declare(lhs, ir.PAUTO)
Declare(lhs, ir.PAUTO)
var stmts ir.Nodes
stmts.Append(ir.NewDecl(base.Pos, ir.ODCL, lhs))
@ -1089,12 +1090,12 @@ func (r *importReader) node() ir.Node {
var sym *types.Sym
pos := r.pos()
if label := r.string(); label != "" {
sym = lookup(label)
sym = Lookup(label)
}
return ir.NewBranchStmt(pos, op, sym)
case ir.OLABEL:
return ir.NewLabelStmt(r.pos(), lookup(r.string()))
return ir.NewLabelStmt(r.pos(), Lookup(r.string()))
case ir.OEND:
return nil

View file

@ -4,7 +4,7 @@
// +build darwin dragonfly freebsd linux netbsd openbsd
package gc
package typecheck
import (
"os"

View file

@ -4,7 +4,7 @@
// +build !darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd
package gc
package typecheck
import (
"io"

View file

@ -33,7 +33,7 @@ func main() {
var b bytes.Buffer
fmt.Fprintln(&b, "// Code generated by mkbuiltin.go. DO NOT EDIT.")
fmt.Fprintln(&b)
fmt.Fprintln(&b, "package gc")
fmt.Fprintln(&b, "package typecheck")
fmt.Fprintln(&b)
fmt.Fprintln(&b, `import (`)
fmt.Fprintln(&b, ` "cmd/compile/internal/ir"`)

View file

@ -0,0 +1,435 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typecheck
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
)
// range
func typecheckrange(n *ir.RangeStmt) {
// Typechecking order is important here:
// 0. first typecheck range expression (slice/map/chan),
// it is evaluated only once and so logically it is not part of the loop.
// 1. typecheck produced values,
// this part can declare new vars and so it must be typechecked before body,
// because body can contain a closure that captures the vars.
// 2. decldepth++ to denote loop body.
// 3. typecheck body.
// 4. decldepth--.
typecheckrangeExpr(n)
// second half of dance, the first half being typecheckrangeExpr
n.SetTypecheck(1)
ls := n.Vars
for i1, n1 := range ls {
if n1.Typecheck() == 0 {
ls[i1] = AssignExpr(ls[i1])
}
}
decldepth++
Stmts(n.Body)
decldepth--
}
func typecheckrangeExpr(n *ir.RangeStmt) {
n.X = Expr(n.X)
t := n.X.Type()
if t == nil {
return
}
// delicate little dance. see typecheckas2
ls := n.Vars
for i1, n1 := range ls {
if !ir.DeclaredBy(n1, n) {
ls[i1] = AssignExpr(ls[i1])
}
}
if t.IsPtr() && t.Elem().IsArray() {
t = t.Elem()
}
n.SetType(t)
var t1, t2 *types.Type
toomany := false
switch t.Kind() {
default:
base.ErrorfAt(n.Pos(), "cannot range over %L", n.X)
return
case types.TARRAY, types.TSLICE:
t1 = types.Types[types.TINT]
t2 = t.Elem()
case types.TMAP:
t1 = t.Key()
t2 = t.Elem()
case types.TCHAN:
if !t.ChanDir().CanRecv() {
base.ErrorfAt(n.Pos(), "invalid operation: range %v (receive from send-only type %v)", n.X, n.X.Type())
return
}
t1 = t.Elem()
t2 = nil
if len(n.Vars) == 2 {
toomany = true
}
case types.TSTRING:
t1 = types.Types[types.TINT]
t2 = types.RuneType
}
if len(n.Vars) > 2 || toomany {
base.ErrorfAt(n.Pos(), "too many variables in range")
}
var v1, v2 ir.Node
if len(n.Vars) != 0 {
v1 = n.Vars[0]
}
if len(n.Vars) > 1 {
v2 = n.Vars[1]
}
// this is not only an optimization but also a requirement in the spec.
// "if the second iteration variable is the blank identifier, the range
// clause is equivalent to the same clause with only the first variable
// present."
if ir.IsBlank(v2) {
if v1 != nil {
n.Vars = []ir.Node{v1}
}
v2 = nil
}
if v1 != nil {
if ir.DeclaredBy(v1, n) {
v1.SetType(t1)
} else if v1.Type() != nil {
if op, why := assignop(t1, v1.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t1, v1, why)
}
}
checkassign(n, v1)
}
if v2 != nil {
if ir.DeclaredBy(v2, n) {
v2.SetType(t2)
} else if v2.Type() != nil {
if op, why := assignop(t2, v2.Type()); op == ir.OXXX {
base.ErrorfAt(n.Pos(), "cannot assign type %v to %L in range%s", t2, v2, why)
}
}
checkassign(n, v2)
}
}
// select
func typecheckselect(sel *ir.SelectStmt) {
var def ir.Node
lno := ir.SetPos(sel)
Stmts(sel.Init())
for _, ncase := range sel.Cases {
ncase := ncase.(*ir.CaseStmt)
if len(ncase.List) == 0 {
// default
if def != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in select (first at %v)", ir.Line(def))
} else {
def = ncase
}
} else if len(ncase.List) > 1 {
base.ErrorfAt(ncase.Pos(), "select cases cannot be lists")
} else {
ncase.List[0] = Stmt(ncase.List[0])
n := ncase.List[0]
ncase.Comm = n
ncase.List.Set(nil)
oselrecv2 := func(dst, recv ir.Node, colas bool) {
n := ir.NewAssignListStmt(n.Pos(), ir.OSELRECV2, nil, nil)
n.Lhs = []ir.Node{dst, ir.BlankNode}
n.Rhs = []ir.Node{recv}
n.Def = colas
n.SetTypecheck(1)
ncase.Comm = n
}
switch n.Op() {
default:
pos := n.Pos()
if n.Op() == ir.ONAME {
// We don't have the right position for ONAME nodes (see #15459 and
// others). Using ncase.Pos for now as it will provide the correct
// line number (assuming the expression follows the "case" keyword
// on the same line). This matches the approach before 1.10.
pos = ncase.Pos()
}
base.ErrorfAt(pos, "select case must be receive, send or assign recv")
case ir.OAS:
// convert x = <-c into x, _ = <-c
// remove implicit conversions; the eventual assignment
// will reintroduce them.
n := n.(*ir.AssignStmt)
if r := n.Y; r.Op() == ir.OCONVNOP || r.Op() == ir.OCONVIFACE {
r := r.(*ir.ConvExpr)
if r.Implicit() {
n.Y = r.X
}
}
if n.Y.Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
oselrecv2(n.X, n.Y, n.Def)
case ir.OAS2RECV:
n := n.(*ir.AssignListStmt)
if n.Rhs[0].Op() != ir.ORECV {
base.ErrorfAt(n.Pos(), "select assignment must have receive on right hand side")
break
}
n.SetOp(ir.OSELRECV2)
case ir.ORECV:
// convert <-c into _, _ = <-c
n := n.(*ir.UnaryExpr)
oselrecv2(ir.BlankNode, n, false)
case ir.OSEND:
break
}
}
Stmts(ncase.Body)
}
base.Pos = lno
}
type typeSet struct {
m map[string][]typeSetEntry
}
func (s *typeSet) add(pos src.XPos, typ *types.Type) {
if s.m == nil {
s.m = make(map[string][]typeSetEntry)
}
// LongString does not uniquely identify types, so we need to
// disambiguate collisions with types.Identical.
// TODO(mdempsky): Add a method that *is* unique.
ls := typ.LongString()
prevs := s.m[ls]
for _, prev := range prevs {
if types.Identical(typ, prev.typ) {
base.ErrorfAt(pos, "duplicate case %v in type switch\n\tprevious case at %s", typ, base.FmtPos(prev.pos))
return
}
}
s.m[ls] = append(prevs, typeSetEntry{pos, typ})
}
type typeSetEntry struct {
pos src.XPos
typ *types.Type
}
func typecheckExprSwitch(n *ir.SwitchStmt) {
t := types.Types[types.TBOOL]
if n.Tag != nil {
n.Tag = Expr(n.Tag)
n.Tag = DefaultLit(n.Tag, nil)
t = n.Tag.Type()
}
var nilonly string
if t != nil {
switch {
case t.IsMap():
nilonly = "map"
case t.Kind() == types.TFUNC:
nilonly = "func"
case t.IsSlice():
nilonly = "slice"
case !types.IsComparable(t):
if t.IsStruct() {
base.ErrorfAt(n.Pos(), "cannot switch on %L (struct containing %v cannot be compared)", n.Tag, types.IncomparableField(t).Type)
} else {
base.ErrorfAt(n.Pos(), "cannot switch on %L", n.Tag)
}
t = nil
}
}
var defCase ir.Node
var cs constSet
for _, ncase := range n.Cases {
ncase := ncase.(*ir.CaseStmt)
ls := ncase.List
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
} else {
defCase = ncase
}
}
for i := range ls {
ir.SetPos(ncase)
ls[i] = Expr(ls[i])
ls[i] = DefaultLit(ls[i], t)
n1 := ls[i]
if t == nil || n1.Type() == nil {
continue
}
if nilonly != "" && !ir.IsNil(n1) {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (can only compare %s %v to nil)", n1, nilonly, n.Tag)
} else if t.IsInterface() && !n1.Type().IsInterface() && !types.IsComparable(n1.Type()) {
base.ErrorfAt(ncase.Pos(), "invalid case %L in switch (incomparable type)", n1)
} else {
op1, _ := assignop(n1.Type(), t)
op2, _ := assignop(t, n1.Type())
if op1 == ir.OXXX && op2 == ir.OXXX {
if n.Tag != nil {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch on %v (mismatched types %v and %v)", n1, n.Tag, n1.Type(), t)
} else {
base.ErrorfAt(ncase.Pos(), "invalid case %v in switch (mismatched types %v and bool)", n1, n1.Type())
}
}
}
// Don't check for duplicate bools. Although the spec allows it,
// (1) the compiler hasn't checked it in the past, so compatibility mandates it, and
// (2) it would disallow useful things like
// case GOARCH == "arm" && GOARM == "5":
// case GOARCH == "arm":
// which would both evaluate to false for non-ARM compiles.
if !n1.Type().IsBoolean() {
cs.add(ncase.Pos(), n1, "case", "switch")
}
}
Stmts(ncase.Body)
}
}
func typecheckTypeSwitch(n *ir.SwitchStmt) {
guard := n.Tag.(*ir.TypeSwitchGuard)
guard.X = Expr(guard.X)
t := guard.X.Type()
if t != nil && !t.IsInterface() {
base.ErrorfAt(n.Pos(), "cannot type switch on non-interface value %L", guard.X)
t = nil
}
// We don't actually declare the type switch's guarded
// declaration itself. So if there are no cases, we won't
// notice that it went unused.
if v := guard.Tag; v != nil && !ir.IsBlank(v) && len(n.Cases) == 0 {
base.ErrorfAt(v.Pos(), "%v declared but not used", v.Sym())
}
var defCase, nilCase ir.Node
var ts typeSet
for _, ncase := range n.Cases {
ncase := ncase.(*ir.CaseStmt)
ls := ncase.List
if len(ls) == 0 { // default:
if defCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple defaults in switch (first at %v)", ir.Line(defCase))
} else {
defCase = ncase
}
}
for i := range ls {
ls[i] = check(ls[i], ctxExpr|ctxType)
n1 := ls[i]
if t == nil || n1.Type() == nil {
continue
}
var missing, have *types.Field
var ptr int
if ir.IsNil(n1) { // case nil:
if nilCase != nil {
base.ErrorfAt(ncase.Pos(), "multiple nil cases in type switch (first at %v)", ir.Line(nilCase))
} else {
nilCase = ncase
}
continue
}
if n1.Op() != ir.OTYPE {
base.ErrorfAt(ncase.Pos(), "%L is not a type", n1)
continue
}
if !n1.Type().IsInterface() && !implements(n1.Type(), t, &missing, &have, &ptr) && !missing.Broke() {
if have != nil && !have.Broke() {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (wrong type for %v method)\n\thave %v%S\n\twant %v%S", guard.X, n1.Type(), missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else if ptr != 0 {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (%v method has pointer receiver)", guard.X, n1.Type(), missing.Sym)
} else {
base.ErrorfAt(ncase.Pos(), "impossible type switch case: %L cannot have dynamic type %v"+
" (missing %v method)", guard.X, n1.Type(), missing.Sym)
}
continue
}
ts.add(ncase.Pos(), n1.Type())
}
if len(ncase.Vars) != 0 {
// Assign the clause variable's type.
vt := t
if len(ls) == 1 {
if ls[0].Op() == ir.OTYPE {
vt = ls[0].Type()
} else if !ir.IsNil(ls[0]) {
// Invalid single-type case;
// mark variable as broken.
vt = nil
}
}
nvar := ncase.Vars[0]
nvar.SetType(vt)
if vt != nil {
nvar = AssignExpr(nvar)
} else {
// Clause variable is broken; prevent typechecking.
nvar.SetTypecheck(1)
nvar.SetWalkdef(1)
}
ncase.Vars[0] = nvar
}
Stmts(ncase.Body)
}
}
// typecheckswitch typechecks a switch statement.
func typecheckswitch(n *ir.SwitchStmt) {
Stmts(n.Init())
if n.Tag != nil && n.Tag.Op() == ir.OTYPESW {
typecheckTypeSwitch(n)
} else {
typecheckExprSwitch(n)
}
}

View file

@ -0,0 +1,793 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typecheck
import (
"fmt"
"sort"
"strconv"
"strings"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
)
func AssignConv(n ir.Node, t *types.Type, context string) ir.Node {
return assignconvfn(n, t, func() string { return context })
}
// DotImportRefs maps idents introduced by importDot back to the
// ir.PkgName they were dot-imported through.
var DotImportRefs map[*ir.Ident]*ir.PkgName
// LookupNum looks up the symbol starting with prefix and ending with
// the decimal n. If prefix is too long, LookupNum panics.
func LookupNum(prefix string, n int) *types.Sym {
var buf [20]byte // plenty long enough for all current users
copy(buf[:], prefix)
b := strconv.AppendInt(buf[:len(prefix)], int64(n), 10)
return types.LocalPkg.LookupBytes(b)
}
// Given funarg struct list, return list of fn args.
func NewFuncParams(tl *types.Type, mustname bool) []*ir.Field {
var args []*ir.Field
gen := 0
for _, t := range tl.Fields().Slice() {
s := t.Sym
if mustname && (s == nil || s.Name == "_") {
// invent a name so that we can refer to it in the trampoline
s = LookupNum(".anon", gen)
gen++
}
a := ir.NewField(base.Pos, s, nil, t.Type)
a.Pos = t.Pos
a.IsDDD = t.IsDDD()
args = append(args, a)
}
return args
}
// newname returns a new ONAME Node associated with symbol s.
func NewName(s *types.Sym) *ir.Name {
n := ir.NewNameAt(base.Pos, s)
n.Curfn = ir.CurFunc
return n
}
// NodAddr returns a node representing &n at base.Pos.
func NodAddr(n ir.Node) *ir.AddrExpr {
return NodAddrAt(base.Pos, n)
}
// nodAddrPos returns a node representing &n at position pos.
func NodAddrAt(pos src.XPos, n ir.Node) *ir.AddrExpr {
return ir.NewAddrExpr(pos, n)
}
func NodNil() ir.Node {
n := ir.NewNilExpr(base.Pos)
n.SetType(types.Types[types.TNIL])
return n
}
// in T.field
// find missing fields that
// will give shortest unique addressing.
// modify the tree with missing type names.
func AddImplicitDots(n *ir.SelectorExpr) *ir.SelectorExpr {
n.X = check(n.X, ctxType|ctxExpr)
if n.X.Diag() {
n.SetDiag(true)
}
t := n.X.Type()
if t == nil {
return n
}
if n.X.Op() == ir.OTYPE {
return n
}
s := n.Sel
if s == nil {
return n
}
switch path, ambig := dotpath(s, t, nil, false); {
case path != nil:
// rebuild elided dots
for c := len(path) - 1; c >= 0; c-- {
dot := ir.NewSelectorExpr(base.Pos, ir.ODOT, n.X, path[c].field.Sym)
dot.SetImplicit(true)
dot.SetType(path[c].field.Type)
n.X = dot
}
case ambig:
base.Errorf("ambiguous selector %v", n)
n.X = nil
}
return n
}
func CalcMethods(t *types.Type) {
if t == nil || t.AllMethods().Len() != 0 {
return
}
// mark top-level method symbols
// so that expand1 doesn't consider them.
for _, f := range t.Methods().Slice() {
f.Sym.SetUniq(true)
}
// generate all reachable methods
slist = slist[:0]
expand1(t, true)
// check each method to be uniquely reachable
var ms []*types.Field
for i, sl := range slist {
slist[i].field = nil
sl.field.Sym.SetUniq(false)
var f *types.Field
path, _ := dotpath(sl.field.Sym, t, &f, false)
if path == nil {
continue
}
// dotpath may have dug out arbitrary fields, we only want methods.
if !f.IsMethod() {
continue
}
// add it to the base type method list
f = f.Copy()
f.Embedded = 1 // needs a trampoline
for _, d := range path {
if d.field.Type.IsPtr() {
f.Embedded = 2
break
}
}
ms = append(ms, f)
}
for _, f := range t.Methods().Slice() {
f.Sym.SetUniq(false)
}
ms = append(ms, t.Methods().Slice()...)
sort.Sort(types.MethodsByName(ms))
t.AllMethods().Set(ms)
}
// adddot1 returns the number of fields or methods named s at depth d in Type t.
// If exactly one exists, it will be returned in *save (if save is not nil),
// and dotlist will contain the path of embedded fields traversed to find it,
// in reverse order. If none exist, more will indicate whether t contains any
// embedded fields at depth d, so callers can decide whether to retry at
// a greater depth.
func adddot1(s *types.Sym, t *types.Type, d int, save **types.Field, ignorecase bool) (c int, more bool) {
if t.Recur() {
return
}
t.SetRecur(true)
defer t.SetRecur(false)
var u *types.Type
d--
if d < 0 {
// We've reached our target depth. If t has any fields/methods
// named s, then we're done. Otherwise, we still need to check
// below for embedded fields.
c = lookdot0(s, t, save, ignorecase)
if c != 0 {
return c, false
}
}
u = t
if u.IsPtr() {
u = u.Elem()
}
if !u.IsStruct() && !u.IsInterface() {
return c, false
}
for _, f := range u.Fields().Slice() {
if f.Embedded == 0 || f.Sym == nil {
continue
}
if d < 0 {
// Found an embedded field at target depth.
return c, true
}
a, more1 := adddot1(s, f.Type, d, save, ignorecase)
if a != 0 && c == 0 {
dotlist[d].field = f
}
c += a
if more1 {
more = true
}
}
return c, more
}
// dotlist is used by adddot1 to record the path of embedded fields
// used to access a target field or method.
// Must be non-nil so that dotpath returns a non-nil slice even if d is zero.
var dotlist = make([]dlist, 10)
// Convert node n for assignment to type t.
func assignconvfn(n ir.Node, t *types.Type, context func() string) ir.Node {
if n == nil || n.Type() == nil || n.Type().Broke() {
return n
}
if t.Kind() == types.TBLANK && n.Type().Kind() == types.TNIL {
base.Errorf("use of untyped nil")
}
n = convlit1(n, t, false, context)
if n.Type() == nil {
return n
}
if t.Kind() == types.TBLANK {
return n
}
// Convert ideal bool from comparison to plain bool
// if the next step is non-bool (like interface{}).
if n.Type() == types.UntypedBool && !t.IsBoolean() {
if n.Op() == ir.ONAME || n.Op() == ir.OLITERAL {
r := ir.NewConvExpr(base.Pos, ir.OCONVNOP, nil, n)
r.SetType(types.Types[types.TBOOL])
r.SetTypecheck(1)
r.SetImplicit(true)
n = r
}
}
if types.Identical(n.Type(), t) {
return n
}
op, why := assignop(n.Type(), t)
if op == ir.OXXX {
base.Errorf("cannot use %L as type %v in %s%s", n, t, context(), why)
op = ir.OCONV
}
r := ir.NewConvExpr(base.Pos, op, t, n)
r.SetTypecheck(1)
r.SetImplicit(true)
return r
}
// Is type src assignment compatible to type dst?
// If so, return op code to use in conversion.
// If not, return OXXX. In this case, the string return parameter may
// hold a reason why. In all other cases, it'll be the empty string.
func assignop(src, dst *types.Type) (ir.Op, string) {
if src == dst {
return ir.OCONVNOP, ""
}
if src == nil || dst == nil || src.Kind() == types.TFORW || dst.Kind() == types.TFORW || src.Underlying() == nil || dst.Underlying() == nil {
return ir.OXXX, ""
}
// 1. src type is identical to dst.
if types.Identical(src, dst) {
return ir.OCONVNOP, ""
}
// 2. src and dst have identical underlying types
// and either src or dst is not a named type or
// both are empty interface types.
// For assignable but different non-empty interface types,
// we want to recompute the itab. Recomputing the itab ensures
// that itabs are unique (thus an interface with a compile-time
// type I has an itab with interface type I).
if types.Identical(src.Underlying(), dst.Underlying()) {
if src.IsEmptyInterface() {
// Conversion between two empty interfaces
// requires no code.
return ir.OCONVNOP, ""
}
if (src.Sym() == nil || dst.Sym() == nil) && !src.IsInterface() {
// Conversion between two types, at least one unnamed,
// needs no conversion. The exception is nonempty interfaces
// which need to have their itab updated.
return ir.OCONVNOP, ""
}
}
// 3. dst is an interface type and src implements dst.
if dst.IsInterface() && src.Kind() != types.TNIL {
var missing, have *types.Field
var ptr int
if implements(src, dst, &missing, &have, &ptr) {
// Call itabname so that (src, dst)
// gets added to itabs early, which allows
// us to de-virtualize calls through this
// type/interface pair later. See peekitabs in reflect.go
if types.IsDirectIface(src) && !dst.IsEmptyInterface() {
NeedITab(src, dst)
}
return ir.OCONVIFACE, ""
}
// we'll have complained about this method anyway, suppress spurious messages.
if have != nil && have.Sym == missing.Sym && (have.Type.Broke() || missing.Type.Broke()) {
return ir.OCONVIFACE, ""
}
var why string
if isptrto(src, types.TINTER) {
why = fmt.Sprintf(":\n\t%v is pointer to interface, not interface", src)
} else if have != nil && have.Sym == missing.Sym && have.Nointerface() {
why = fmt.Sprintf(":\n\t%v does not implement %v (%v method is marked 'nointerface')", src, dst, missing.Sym)
} else if have != nil && have.Sym == missing.Sym {
why = fmt.Sprintf(":\n\t%v does not implement %v (wrong type for %v method)\n"+
"\t\thave %v%S\n\t\twant %v%S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else if ptr != 0 {
why = fmt.Sprintf(":\n\t%v does not implement %v (%v method has pointer receiver)", src, dst, missing.Sym)
} else if have != nil {
why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)\n"+
"\t\thave %v%S\n\t\twant %v%S", src, dst, missing.Sym, have.Sym, have.Type, missing.Sym, missing.Type)
} else {
why = fmt.Sprintf(":\n\t%v does not implement %v (missing %v method)", src, dst, missing.Sym)
}
return ir.OXXX, why
}
if isptrto(dst, types.TINTER) {
why := fmt.Sprintf(":\n\t%v is pointer to interface, not interface", dst)
return ir.OXXX, why
}
if src.IsInterface() && dst.Kind() != types.TBLANK {
var missing, have *types.Field
var ptr int
var why string
if implements(dst, src, &missing, &have, &ptr) {
why = ": need type assertion"
}
return ir.OXXX, why
}
// 4. src is a bidirectional channel value, dst is a channel type,
// src and dst have identical element types, and
// either src or dst is not a named type.
if src.IsChan() && src.ChanDir() == types.Cboth && dst.IsChan() {
if types.Identical(src.Elem(), dst.Elem()) && (src.Sym() == nil || dst.Sym() == nil) {
return ir.OCONVNOP, ""
}
}
// 5. src is the predeclared identifier nil and dst is a nillable type.
if src.Kind() == types.TNIL {
switch dst.Kind() {
case types.TPTR,
types.TFUNC,
types.TMAP,
types.TCHAN,
types.TINTER,
types.TSLICE:
return ir.OCONVNOP, ""
}
}
// 6. rule about untyped constants - already converted by defaultlit.
// 7. Any typed value can be assigned to the blank identifier.
if dst.Kind() == types.TBLANK {
return ir.OCONVNOP, ""
}
return ir.OXXX, ""
}
// Can we convert a value of type src to a value of type dst?
// If so, return op code to use in conversion (maybe OCONVNOP).
// If not, return OXXX. In this case, the string return parameter may
// hold a reason why. In all other cases, it'll be the empty string.
// srcConstant indicates whether the value of type src is a constant.
func convertop(srcConstant bool, src, dst *types.Type) (ir.Op, string) {
if src == dst {
return ir.OCONVNOP, ""
}
if src == nil || dst == nil {
return ir.OXXX, ""
}
// Conversions from regular to go:notinheap are not allowed
// (unless it's unsafe.Pointer). These are runtime-specific
// rules.
// (a) Disallow (*T) to (*U) where T is go:notinheap but U isn't.
if src.IsPtr() && dst.IsPtr() && dst.Elem().NotInHeap() && !src.Elem().NotInHeap() {
why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable), but %v is not", dst.Elem(), src.Elem())
return ir.OXXX, why
}
// (b) Disallow string to []T where T is go:notinheap.
if src.IsString() && dst.IsSlice() && dst.Elem().NotInHeap() && (dst.Elem().Kind() == types.ByteType.Kind() || dst.Elem().Kind() == types.RuneType.Kind()) {
why := fmt.Sprintf(":\n\t%v is incomplete (or unallocatable)", dst.Elem())
return ir.OXXX, why
}
// 1. src can be assigned to dst.
op, why := assignop(src, dst)
if op != ir.OXXX {
return op, why
}
// The rules for interfaces are no different in conversions
// than assignments. If interfaces are involved, stop now
// with the good message from assignop.
// Otherwise clear the error.
if src.IsInterface() || dst.IsInterface() {
return ir.OXXX, why
}
// 2. Ignoring struct tags, src and dst have identical underlying types.
if types.IdenticalIgnoreTags(src.Underlying(), dst.Underlying()) {
return ir.OCONVNOP, ""
}
// 3. src and dst are unnamed pointer types and, ignoring struct tags,
// their base types have identical underlying types.
if src.IsPtr() && dst.IsPtr() && src.Sym() == nil && dst.Sym() == nil {
if types.IdenticalIgnoreTags(src.Elem().Underlying(), dst.Elem().Underlying()) {
return ir.OCONVNOP, ""
}
}
// 4. src and dst are both integer or floating point types.
if (src.IsInteger() || src.IsFloat()) && (dst.IsInteger() || dst.IsFloat()) {
if types.SimType[src.Kind()] == types.SimType[dst.Kind()] {
return ir.OCONVNOP, ""
}
return ir.OCONV, ""
}
// 5. src and dst are both complex types.
if src.IsComplex() && dst.IsComplex() {
if types.SimType[src.Kind()] == types.SimType[dst.Kind()] {
return ir.OCONVNOP, ""
}
return ir.OCONV, ""
}
// Special case for constant conversions: any numeric
// conversion is potentially okay. We'll validate further
// within evconst. See #38117.
if srcConstant && (src.IsInteger() || src.IsFloat() || src.IsComplex()) && (dst.IsInteger() || dst.IsFloat() || dst.IsComplex()) {
return ir.OCONV, ""
}
// 6. src is an integer or has type []byte or []rune
// and dst is a string type.
if src.IsInteger() && dst.IsString() {
return ir.ORUNESTR, ""
}
if src.IsSlice() && dst.IsString() {
if src.Elem().Kind() == types.ByteType.Kind() {
return ir.OBYTES2STR, ""
}
if src.Elem().Kind() == types.RuneType.Kind() {
return ir.ORUNES2STR, ""
}
}
// 7. src is a string and dst is []byte or []rune.
// String to slice.
if src.IsString() && dst.IsSlice() {
if dst.Elem().Kind() == types.ByteType.Kind() {
return ir.OSTR2BYTES, ""
}
if dst.Elem().Kind() == types.RuneType.Kind() {
return ir.OSTR2RUNES, ""
}
}
// 8. src is a pointer or uintptr and dst is unsafe.Pointer.
if (src.IsPtr() || src.IsUintptr()) && dst.IsUnsafePtr() {
return ir.OCONVNOP, ""
}
// 9. src is unsafe.Pointer and dst is a pointer or uintptr.
if src.IsUnsafePtr() && (dst.IsPtr() || dst.IsUintptr()) {
return ir.OCONVNOP, ""
}
// src is map and dst is a pointer to corresponding hmap.
// This rule is needed for the implementation detail that
// go gc maps are implemented as a pointer to a hmap struct.
if src.Kind() == types.TMAP && dst.IsPtr() &&
src.MapType().Hmap == dst.Elem() {
return ir.OCONVNOP, ""
}
return ir.OXXX, ""
}
// Code to resolve elided DOTs in embedded types.
// A dlist stores a pointer to a TFIELD Type embedded within
// a TSTRUCT or TINTER Type.
type dlist struct {
field *types.Field
}
// dotpath computes the unique shortest explicit selector path to fully qualify
// a selection expression x.f, where x is of type t and f is the symbol s.
// If no such path exists, dotpath returns nil.
// If there are multiple shortest paths to the same depth, ambig is true.
func dotpath(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) (path []dlist, ambig bool) {
// The embedding of types within structs imposes a tree structure onto
// types: structs parent the types they embed, and types parent their
// fields or methods. Our goal here is to find the shortest path to
// a field or method named s in the subtree rooted at t. To accomplish
// that, we iteratively perform depth-first searches of increasing depth
// until we either find the named field/method or exhaust the tree.
for d := 0; ; d++ {
if d > len(dotlist) {
dotlist = append(dotlist, dlist{})
}
if c, more := adddot1(s, t, d, save, ignorecase); c == 1 {
return dotlist[:d], false
} else if c > 1 {
return nil, true
} else if !more {
return nil, false
}
}
}
func expand0(t *types.Type) {
u := t
if u.IsPtr() {
u = u.Elem()
}
if u.IsInterface() {
for _, f := range u.Fields().Slice() {
if f.Sym.Uniq() {
continue
}
f.Sym.SetUniq(true)
slist = append(slist, symlink{field: f})
}
return
}
u = types.ReceiverBaseType(t)
if u != nil {
for _, f := range u.Methods().Slice() {
if f.Sym.Uniq() {
continue
}
f.Sym.SetUniq(true)
slist = append(slist, symlink{field: f})
}
}
}
func expand1(t *types.Type, top bool) {
if t.Recur() {
return
}
t.SetRecur(true)
if !top {
expand0(t)
}
u := t
if u.IsPtr() {
u = u.Elem()
}
if u.IsStruct() || u.IsInterface() {
for _, f := range u.Fields().Slice() {
if f.Embedded == 0 {
continue
}
if f.Sym == nil {
continue
}
expand1(f.Type, false)
}
}
t.SetRecur(false)
}
func ifacelookdot(s *types.Sym, t *types.Type, ignorecase bool) (m *types.Field, followptr bool) {
if t == nil {
return nil, false
}
path, ambig := dotpath(s, t, &m, ignorecase)
if path == nil {
if ambig {
base.Errorf("%v.%v is ambiguous", t, s)
}
return nil, false
}
for _, d := range path {
if d.field.Type.IsPtr() {
followptr = true
break
}
}
if !m.IsMethod() {
base.Errorf("%v.%v is a field, not a method", t, s)
return nil, followptr
}
return m, followptr
}
func implements(t, iface *types.Type, m, samename **types.Field, ptr *int) bool {
t0 := t
if t == nil {
return false
}
if t.IsInterface() {
i := 0
tms := t.Fields().Slice()
for _, im := range iface.Fields().Slice() {
for i < len(tms) && tms[i].Sym != im.Sym {
i++
}
if i == len(tms) {
*m = im
*samename = nil
*ptr = 0
return false
}
tm := tms[i]
if !types.Identical(tm.Type, im.Type) {
*m = im
*samename = tm
*ptr = 0
return false
}
}
return true
}
t = types.ReceiverBaseType(t)
var tms []*types.Field
if t != nil {
CalcMethods(t)
tms = t.AllMethods().Slice()
}
i := 0
for _, im := range iface.Fields().Slice() {
if im.Broke() {
continue
}
for i < len(tms) && tms[i].Sym != im.Sym {
i++
}
if i == len(tms) {
*m = im
*samename, _ = ifacelookdot(im.Sym, t, true)
*ptr = 0
return false
}
tm := tms[i]
if tm.Nointerface() || !types.Identical(tm.Type, im.Type) {
*m = im
*samename = tm
*ptr = 0
return false
}
followptr := tm.Embedded == 2
// if pointer receiver in method,
// the method does not exist for value types.
rcvr := tm.Type.Recv().Type
if rcvr.IsPtr() && !t0.IsPtr() && !followptr && !types.IsInterfaceMethod(tm.Type) {
if false && base.Flag.LowerR != 0 {
base.Errorf("interface pointer mismatch")
}
*m = im
*samename = nil
*ptr = 1
return false
}
}
return true
}
func isptrto(t *types.Type, et types.Kind) bool {
if t == nil {
return false
}
if !t.IsPtr() {
return false
}
t = t.Elem()
if t == nil {
return false
}
if t.Kind() != et {
return false
}
return true
}
// lookdot0 returns the number of fields or methods named s associated
// with Type t. If exactly one exists, it will be returned in *save
// (if save is not nil).
func lookdot0(s *types.Sym, t *types.Type, save **types.Field, ignorecase bool) int {
u := t
if u.IsPtr() {
u = u.Elem()
}
c := 0
if u.IsStruct() || u.IsInterface() {
for _, f := range u.Fields().Slice() {
if f.Sym == s || (ignorecase && f.IsMethod() && strings.EqualFold(f.Sym.Name, s.Name)) {
if save != nil {
*save = f
}
c++
}
}
}
u = t
if t.Sym() != nil && t.IsPtr() && !t.Elem().IsPtr() {
// If t is a defined pointer type, then x.m is shorthand for (*x).m.
u = t.Elem()
}
u = types.ReceiverBaseType(u)
if u != nil {
for _, f := range u.Methods().Slice() {
if f.Embedded == 0 && (f.Sym == s || (ignorecase && strings.EqualFold(f.Sym.Name, s.Name))) {
if save != nil {
*save = f
}
c++
}
}
}
return c
}
var slist []symlink
// Code to help generate trampoline functions for methods on embedded
// types. These are approx the same as the corresponding adddot
// routines except that they expect to be called with unique tasks and
// they return the actual methods.
type symlink struct {
field *types.Field
}

View file

@ -0,0 +1,104 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package typecheck
import (
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
)
func LookupRuntime(name string) *ir.Name {
s := ir.Pkgs.Runtime.Lookup(name)
if s == nil || s.Def == nil {
base.Fatalf("syslook: can't find runtime.%s", name)
}
return ir.AsNode(s.Def).(*ir.Name)
}
// SubstArgTypes substitutes the given list of types for
// successive occurrences of the "any" placeholder in the
// type syntax expression n.Type.
// The result of SubstArgTypes MUST be assigned back to old, e.g.
// n.Left = SubstArgTypes(n.Left, t1, t2)
func SubstArgTypes(old *ir.Name, types_ ...*types.Type) *ir.Name {
n := old.CloneName()
for _, t := range types_ {
types.CalcSize(t)
}
n.SetType(types.SubstAny(n.Type(), &types_))
if len(types_) > 0 {
base.Fatalf("substArgTypes: too many argument types")
}
return n
}
// AutoLabel generates a new Name node for use with
// an automatically generated label.
// prefix is a short mnemonic (e.g. ".s" for switch)
// to help with debugging.
// It should begin with "." to avoid conflicts with
// user labels.
func AutoLabel(prefix string) *types.Sym {
if prefix[0] != '.' {
base.Fatalf("autolabel prefix must start with '.', have %q", prefix)
}
fn := ir.CurFunc
if ir.CurFunc == nil {
base.Fatalf("autolabel outside function")
}
n := fn.Label
fn.Label++
return LookupNum(prefix, int(n))
}
func Lookup(name string) *types.Sym {
return types.LocalPkg.Lookup(name)
}
// loadsys loads the definitions for the low-level runtime functions,
// so that the compiler can generate calls to them,
// but does not make them visible to user code.
func loadsys() {
types.Block = 1
inimport = true
TypecheckAllowed = true
typs := runtimeTypes()
for _, d := range &runtimeDecls {
sym := ir.Pkgs.Runtime.Lookup(d.name)
typ := typs[d.typ]
switch d.tag {
case funcTag:
importfunc(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
case varTag:
importvar(ir.Pkgs.Runtime, src.NoXPos, sym, typ)
default:
base.Fatalf("unhandled declaration tag %v", d.tag)
}
}
TypecheckAllowed = false
inimport = false
}
// LookupRuntimeFunc looks up Go function name in package runtime. This function
// must follow the internal calling convention.
func LookupRuntimeFunc(name string) *obj.LSym {
s := ir.Pkgs.Runtime.Lookup(name)
s.SetFunc(true)
return s.Linksym()
}
// LookupRuntimeVar looks up a variable (or assembly function) name in package
// runtime. If this is a function, it may have a special calling
// convention.
func LookupRuntimeVar(name string) *obj.LSym {
return ir.Pkgs.Runtime.Lookup(name).Linksym()
}

View file

@ -2,4 +2,11 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gc
//go:generate go run mkbuiltin.go
package typecheck
import "cmd/compile/internal/ir"
// Target is the package being compiled.
var Target *ir.Package

View file

@ -2,16 +2,31 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// TODO(gri) This file should probably become part of package types.
package gc
package typecheck
import (
"go/constant"
"cmd/compile/internal/base"
"cmd/compile/internal/ir"
"cmd/compile/internal/types"
"cmd/internal/src"
"go/constant"
)
var (
okfor [ir.OEND][]bool
iscmp [ir.OEND]bool
)
var (
okforeq [types.NTYPE]bool
okforadd [types.NTYPE]bool
okforand [types.NTYPE]bool
okfornone [types.NTYPE]bool
okforbool [types.NTYPE]bool
okforcap [types.NTYPE]bool
okforlen [types.NTYPE]bool
okforarith [types.NTYPE]bool
)
var basicTypes = [...]struct {
@ -169,7 +184,7 @@ func initUniverse() {
s = types.BuiltinPkg.Lookup("false")
s.Def = ir.NewConstAt(src.NoXPos, s, types.UntypedBool, constant.MakeBool(false))
s = lookup("_")
s = Lookup("_")
types.BlankSym = s
s.Block = -100
s.Def = NewName(s)
@ -186,7 +201,7 @@ func initUniverse() {
types.Types[types.TNIL] = types.New(types.TNIL)
s = types.BuiltinPkg.Lookup("nil")
nnil := nodnil()
nnil := NodNil()
nnil.(*ir.NilExpr).SetSym(s)
s.Def = nnil
@ -317,12 +332,12 @@ func makeErrorInterface() *types.Type {
sig := types.NewSignature(types.NoPkg, fakeRecvField(), nil, []*types.Field{
types.NewField(src.NoXPos, nil, types.Types[types.TSTRING]),
})
method := types.NewField(src.NoXPos, lookup("Error"), sig)
method := types.NewField(src.NoXPos, Lookup("Error"), sig)
return types.NewInterface(types.NoPkg, []*types.Field{method})
}
// finishUniverse makes the universe block visible within the current package.
func finishUniverse() {
// declareUniverse makes the universe block visible within the current package.
func declareUniverse() {
// Operationally, this is similar to a dot import of builtinpkg, except
// that we silently skip symbols that are already declared in the
// package block rather than emitting a redeclared symbol error.
@ -331,7 +346,7 @@ func finishUniverse() {
if s.Def == nil {
continue
}
s1 := lookup(s.Name)
s1 := Lookup(s.Name)
if s1.Def != nil {
continue
}
@ -340,7 +355,7 @@ func finishUniverse() {
s1.Block = s.Block
}
ir.RegFP = NewName(lookup(".fp"))
ir.RegFP = NewName(Lookup(".fp"))
ir.RegFP.SetType(types.Types[types.TINT32])
ir.RegFP.Class_ = ir.PPARAM
ir.RegFP.SetUsed(true)