mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
cmd/compile: cleanup ir.Package
Decls used to contain initializer statement for package-level variables, but now it only contains ir.Funcs. So we might as well rename it to Funcs and tighten its type to []*ir.Func. Similarly, Externs always contains *ir.Names, so its type can be constrained too. Change-Id: I85b833e2f83d9d3559ab0ef8ab5d8324f4bc37b6 Reviewed-on: https://go-review.googlesource.com/c/go/+/517855 Reviewed-by: Cuong Manh Le <cuong.manhle.vn@gmail.com> Run-TryBot: Matthew Dempsky <mdempsky@google.com> Reviewed-by: Keith Randall <khr@golang.org> Reviewed-by: Keith Randall <khr@google.com> Auto-Submit: Matthew Dempsky <mdempsky@google.com> TryBot-Result: Gopher Robot <gobot@golang.org>
This commit is contained in:
parent
59037ac93a
commit
e7e5913161
16 changed files with 117 additions and 144 deletions
|
|
@ -53,11 +53,7 @@ func FixupVars() Names {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, n := range typecheck.Target.Externs {
|
for _, nm := range typecheck.Target.Externs {
|
||||||
nm, ok := n.(*ir.Name)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
s := nm.Sym()
|
s := nm.Sym()
|
||||||
switch s.Name {
|
switch s.Name {
|
||||||
case metaVarName:
|
case metaVarName:
|
||||||
|
|
@ -108,8 +104,8 @@ func FixupVars() Names {
|
||||||
// fixup. It adds calls to the pkg init function as appropriate to
|
// fixup. It adds calls to the pkg init function as appropriate to
|
||||||
// register coverage-related variables with the runtime.
|
// register coverage-related variables with the runtime.
|
||||||
func FixupInit(cnames Names) {
|
func FixupInit(cnames Names) {
|
||||||
for _, n := range typecheck.Target.Decls {
|
for _, fn := range typecheck.Target.Funcs {
|
||||||
if fn, ok := n.(*ir.Func); ok && ir.FuncName(fn) == "init" {
|
if ir.FuncName(fn) == "init" {
|
||||||
cnames.InitFn = fn
|
cnames.InitFn = fn
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -115,7 +115,7 @@ type escape struct {
|
||||||
loopDepth int
|
loopDepth int
|
||||||
}
|
}
|
||||||
|
|
||||||
func Funcs(all []ir.Node) {
|
func Funcs(all []*ir.Func) {
|
||||||
ir.VisitFuncsBottomUp(all, Batch)
|
ir.VisitFuncsBottomUp(all, Batch)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -233,7 +233,7 @@ func Main(archInit func(*ssagen.ArchInfo)) {
|
||||||
// We compute Addrtaken in bulk here.
|
// We compute Addrtaken in bulk here.
|
||||||
// After this phase, we maintain Addrtaken incrementally.
|
// After this phase, we maintain Addrtaken incrementally.
|
||||||
if typecheck.DirtyAddrtaken {
|
if typecheck.DirtyAddrtaken {
|
||||||
typecheck.ComputeAddrtaken(typecheck.Target.Decls)
|
typecheck.ComputeAddrtaken(typecheck.Target.Funcs)
|
||||||
typecheck.DirtyAddrtaken = false
|
typecheck.DirtyAddrtaken = false
|
||||||
}
|
}
|
||||||
typecheck.IncrementalAddrtaken = true
|
typecheck.IncrementalAddrtaken = true
|
||||||
|
|
@ -254,7 +254,7 @@ func Main(archInit func(*ssagen.ArchInfo)) {
|
||||||
// TODO(prattmic): No need to use bottom-up visit order. This
|
// TODO(prattmic): No need to use bottom-up visit order. This
|
||||||
// is mirroring the PGO IRGraph visit order, which also need
|
// is mirroring the PGO IRGraph visit order, which also need
|
||||||
// not be bottom-up.
|
// not be bottom-up.
|
||||||
ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
|
ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
|
||||||
for _, fn := range list {
|
for _, fn := range list {
|
||||||
devirtualize.ProfileGuided(fn, profile)
|
devirtualize.ProfileGuided(fn, profile)
|
||||||
}
|
}
|
||||||
|
|
@ -271,11 +271,9 @@ func Main(archInit func(*ssagen.ArchInfo)) {
|
||||||
|
|
||||||
// Devirtualize and get variable capture right in for loops
|
// Devirtualize and get variable capture right in for loops
|
||||||
var transformed []loopvar.VarAndLoop
|
var transformed []loopvar.VarAndLoop
|
||||||
for _, n := range typecheck.Target.Decls {
|
for _, n := range typecheck.Target.Funcs {
|
||||||
if n.Op() == ir.ODCLFUNC {
|
devirtualize.Static(n)
|
||||||
devirtualize.Static(n.(*ir.Func))
|
transformed = append(transformed, loopvar.ForCapture(n)...)
|
||||||
transformed = append(transformed, loopvar.ForCapture(n.(*ir.Func))...)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
ir.CurFunc = nil
|
ir.CurFunc = nil
|
||||||
|
|
||||||
|
|
@ -297,7 +295,7 @@ func Main(archInit func(*ssagen.ArchInfo)) {
|
||||||
// Large values are also moved off stack in escape analysis;
|
// Large values are also moved off stack in escape analysis;
|
||||||
// because large values may contain pointers, it must happen early.
|
// because large values may contain pointers, it must happen early.
|
||||||
base.Timer.Start("fe", "escapes")
|
base.Timer.Start("fe", "escapes")
|
||||||
escape.Funcs(typecheck.Target.Decls)
|
escape.Funcs(typecheck.Target.Funcs)
|
||||||
|
|
||||||
loopvar.LogTransformations(transformed)
|
loopvar.LogTransformations(transformed)
|
||||||
|
|
||||||
|
|
@ -315,15 +313,14 @@ func Main(archInit func(*ssagen.ArchInfo)) {
|
||||||
// Don't use range--walk can add functions to Target.Decls.
|
// Don't use range--walk can add functions to Target.Decls.
|
||||||
base.Timer.Start("be", "compilefuncs")
|
base.Timer.Start("be", "compilefuncs")
|
||||||
fcount := int64(0)
|
fcount := int64(0)
|
||||||
for i := 0; i < len(typecheck.Target.Decls); i++ {
|
for i := 0; i < len(typecheck.Target.Funcs); i++ {
|
||||||
if fn, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
|
fn := typecheck.Target.Funcs[i]
|
||||||
// Don't try compiling dead hidden closure.
|
// Don't try compiling dead hidden closure.
|
||||||
if fn.IsDeadcodeClosure() {
|
if fn.IsDeadcodeClosure() {
|
||||||
continue
|
continue
|
||||||
}
|
|
||||||
enqueueFunc(fn)
|
|
||||||
fcount++
|
|
||||||
}
|
}
|
||||||
|
enqueueFunc(fn)
|
||||||
|
fcount++
|
||||||
}
|
}
|
||||||
base.Timer.AddEvent(fcount, "funcs")
|
base.Timer.AddEvent(fcount, "funcs")
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -111,7 +111,7 @@ func dumpCompilerObj(bout *bio.Writer) {
|
||||||
|
|
||||||
func dumpdata() {
|
func dumpdata() {
|
||||||
numExterns := len(typecheck.Target.Externs)
|
numExterns := len(typecheck.Target.Externs)
|
||||||
numDecls := len(typecheck.Target.Decls)
|
numDecls := len(typecheck.Target.Funcs)
|
||||||
dumpglobls(typecheck.Target.Externs)
|
dumpglobls(typecheck.Target.Externs)
|
||||||
reflectdata.CollectPTabs()
|
reflectdata.CollectPTabs()
|
||||||
numExports := len(typecheck.Target.Exports)
|
numExports := len(typecheck.Target.Exports)
|
||||||
|
|
@ -131,15 +131,14 @@ func dumpdata() {
|
||||||
// In the typical case, we loop 0 or 1 times.
|
// In the typical case, we loop 0 or 1 times.
|
||||||
// It was not until issue 24761 that we found any code that required a loop at all.
|
// It was not until issue 24761 that we found any code that required a loop at all.
|
||||||
for {
|
for {
|
||||||
for i := numDecls; i < len(typecheck.Target.Decls); i++ {
|
for i := numDecls; i < len(typecheck.Target.Funcs); i++ {
|
||||||
if n, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
|
fn := typecheck.Target.Funcs[i]
|
||||||
enqueueFunc(n)
|
enqueueFunc(fn)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
numDecls = len(typecheck.Target.Decls)
|
numDecls = len(typecheck.Target.Funcs)
|
||||||
compileFunctions()
|
compileFunctions()
|
||||||
reflectdata.WriteRuntimeTypes()
|
reflectdata.WriteRuntimeTypes()
|
||||||
if numDecls == len(typecheck.Target.Decls) {
|
if numDecls == len(typecheck.Target.Funcs) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -201,7 +200,7 @@ func dumpGlobal(n *ir.Name) {
|
||||||
base.Ctxt.DwarfGlobal(base.Ctxt.Pkgpath, types.TypeSymName(n.Type()), n.Linksym())
|
base.Ctxt.DwarfGlobal(base.Ctxt.Pkgpath, types.TypeSymName(n.Type()), n.Linksym())
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpGlobalConst(n ir.Node) {
|
func dumpGlobalConst(n *ir.Name) {
|
||||||
// only export typed constants
|
// only export typed constants
|
||||||
t := n.Type()
|
t := n.Type()
|
||||||
if t == nil {
|
if t == nil {
|
||||||
|
|
@ -229,12 +228,12 @@ func dumpGlobalConst(n ir.Node) {
|
||||||
base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym().Name, types.TypeSymName(t), ir.IntVal(t, v))
|
base.Ctxt.DwarfIntConst(base.Ctxt.Pkgpath, n.Sym().Name, types.TypeSymName(t), ir.IntVal(t, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpglobls(externs []ir.Node) {
|
func dumpglobls(externs []*ir.Name) {
|
||||||
// add globals
|
// add globals
|
||||||
for _, n := range externs {
|
for _, n := range externs {
|
||||||
switch n.Op() {
|
switch n.Op() {
|
||||||
case ir.ONAME:
|
case ir.ONAME:
|
||||||
dumpGlobal(n.(*ir.Name))
|
dumpGlobal(n)
|
||||||
case ir.OLITERAL:
|
case ir.OLITERAL:
|
||||||
dumpGlobalConst(n)
|
dumpGlobalConst(n)
|
||||||
}
|
}
|
||||||
|
|
@ -336,7 +335,7 @@ func dumpembeds() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func addsignats(dcls []ir.Node) {
|
func addsignats(dcls []*ir.Name) {
|
||||||
// copy types from dcl list to signatset
|
// copy types from dcl list to signatset
|
||||||
for _, n := range dcls {
|
for _, n := range dcls {
|
||||||
if n.Op() == ir.OTYPE {
|
if n.Op() == ir.OTYPE {
|
||||||
|
|
|
||||||
|
|
@ -78,7 +78,7 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
// pgoInlinePrologue records the hot callsites from ir-graph.
|
// pgoInlinePrologue records the hot callsites from ir-graph.
|
||||||
func pgoInlinePrologue(p *pgo.Profile, decls []ir.Node) {
|
func pgoInlinePrologue(p *pgo.Profile, funcs []*ir.Func) {
|
||||||
if base.Debug.PGOInlineCDFThreshold != "" {
|
if base.Debug.PGOInlineCDFThreshold != "" {
|
||||||
if s, err := strconv.ParseFloat(base.Debug.PGOInlineCDFThreshold, 64); err == nil && s >= 0 && s <= 100 {
|
if s, err := strconv.ParseFloat(base.Debug.PGOInlineCDFThreshold, 64); err == nil && s >= 0 && s <= 100 {
|
||||||
inlineCDFHotCallSiteThresholdPercent = s
|
inlineCDFHotCallSiteThresholdPercent = s
|
||||||
|
|
@ -161,7 +161,7 @@ func InlinePackage(p *pgo.Profile) {
|
||||||
p = nil
|
p = nil
|
||||||
}
|
}
|
||||||
|
|
||||||
InlineDecls(p, typecheck.Target.Decls, true)
|
InlineDecls(p, typecheck.Target.Funcs, true)
|
||||||
|
|
||||||
// Perform a garbage collection of hidden closures functions that
|
// Perform a garbage collection of hidden closures functions that
|
||||||
// are no longer reachable from top-level functions following
|
// are no longer reachable from top-level functions following
|
||||||
|
|
@ -174,9 +174,9 @@ func InlinePackage(p *pgo.Profile) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// InlineDecls applies inlining to the given batch of declarations.
|
// InlineDecls applies inlining to the given batch of declarations.
|
||||||
func InlineDecls(p *pgo.Profile, decls []ir.Node, doInline bool) {
|
func InlineDecls(p *pgo.Profile, funcs []*ir.Func, doInline bool) {
|
||||||
if p != nil {
|
if p != nil {
|
||||||
pgoInlinePrologue(p, decls)
|
pgoInlinePrologue(p, funcs)
|
||||||
}
|
}
|
||||||
|
|
||||||
doCanInline := func(n *ir.Func, recursive bool, numfns int) {
|
doCanInline := func(n *ir.Func, recursive bool, numfns int) {
|
||||||
|
|
@ -192,7 +192,7 @@ func InlineDecls(p *pgo.Profile, decls []ir.Node, doInline bool) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ir.VisitFuncsBottomUp(decls, func(list []*ir.Func, recursive bool) {
|
ir.VisitFuncsBottomUp(funcs, func(list []*ir.Func, recursive bool) {
|
||||||
numfns := numNonClosures(list)
|
numfns := numNonClosures(list)
|
||||||
// We visit functions within an SCC in fairly arbitrary order,
|
// We visit functions within an SCC in fairly arbitrary order,
|
||||||
// so by computing inlinability for all functions in the SCC
|
// so by computing inlinability for all functions in the SCC
|
||||||
|
|
@ -235,33 +235,31 @@ func garbageCollectUnreferencedHiddenClosures() {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < len(typecheck.Target.Decls); i++ {
|
for i := 0; i < len(typecheck.Target.Funcs); i++ {
|
||||||
if fn, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
|
fn := typecheck.Target.Funcs[i]
|
||||||
if fn.IsHiddenClosure() {
|
if fn.IsHiddenClosure() {
|
||||||
continue
|
continue
|
||||||
}
|
|
||||||
markLiveFuncs(fn)
|
|
||||||
}
|
}
|
||||||
|
markLiveFuncs(fn)
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < len(typecheck.Target.Decls); i++ {
|
for i := 0; i < len(typecheck.Target.Funcs); i++ {
|
||||||
if fn, ok := typecheck.Target.Decls[i].(*ir.Func); ok {
|
fn := typecheck.Target.Funcs[i]
|
||||||
if !fn.IsHiddenClosure() {
|
if !fn.IsHiddenClosure() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if fn.IsDeadcodeClosure() {
|
if fn.IsDeadcodeClosure() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if liveFuncs[fn] {
|
if liveFuncs[fn] {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fn.SetIsDeadcodeClosure(true)
|
fn.SetIsDeadcodeClosure(true)
|
||||||
if base.Flag.LowerM > 2 {
|
if base.Flag.LowerM > 2 {
|
||||||
fmt.Printf("%v: unreferenced closure %v marked as dead\n", ir.Line(fn), fn)
|
fmt.Printf("%v: unreferenced closure %v marked as dead\n", ir.Line(fn), fn)
|
||||||
}
|
}
|
||||||
if fn.Inl != nil && fn.LSym == nil {
|
if fn.Inl != nil && fn.LSym == nil {
|
||||||
ir.InitLSym(fn, true)
|
ir.InitLSym(fn, true)
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -462,7 +462,7 @@ func UseClosure(clo *ClosureExpr, pkg *Package) Node {
|
||||||
}
|
}
|
||||||
|
|
||||||
if pkg != nil {
|
if pkg != nil {
|
||||||
pkg.Decls = append(pkg.Decls, fn)
|
pkg.Funcs = append(pkg.Funcs, fn)
|
||||||
}
|
}
|
||||||
|
|
||||||
if false && IsTrivialClosure(clo) {
|
if false && IsTrivialClosure(clo) {
|
||||||
|
|
|
||||||
|
|
@ -19,11 +19,13 @@ type Package struct {
|
||||||
// Init functions, listed in source order.
|
// Init functions, listed in source order.
|
||||||
Inits []*Func
|
Inits []*Func
|
||||||
|
|
||||||
// Top-level declarations.
|
// Funcs contains all (instantiated) functions, methods, and
|
||||||
Decls []Node
|
// function literals to be compiled.
|
||||||
|
Funcs []*Func
|
||||||
|
|
||||||
// Extern (package global) declarations.
|
// Externs holds constants, (non-generic) types, and variables
|
||||||
Externs []Node
|
// declared at package scope.
|
||||||
|
Externs []*Name
|
||||||
|
|
||||||
// Assembly function declarations.
|
// Assembly function declarations.
|
||||||
Asms []*Name
|
Asms []*Name
|
||||||
|
|
|
||||||
|
|
@ -49,16 +49,13 @@ type bottomUpVisitor struct {
|
||||||
// If recursive is false, the list consists of only a single function and its closures.
|
// If recursive is false, the list consists of only a single function and its closures.
|
||||||
// If recursive is true, the list may still contain only a single function,
|
// If recursive is true, the list may still contain only a single function,
|
||||||
// if that function is itself recursive.
|
// if that function is itself recursive.
|
||||||
func VisitFuncsBottomUp(list []Node, analyze func(list []*Func, recursive bool)) {
|
func VisitFuncsBottomUp(list []*Func, analyze func(list []*Func, recursive bool)) {
|
||||||
var v bottomUpVisitor
|
var v bottomUpVisitor
|
||||||
v.analyze = analyze
|
v.analyze = analyze
|
||||||
v.nodeID = make(map[*Func]uint32)
|
v.nodeID = make(map[*Func]uint32)
|
||||||
for _, n := range list {
|
for _, n := range list {
|
||||||
if n.Op() == ODCLFUNC {
|
if !n.IsHiddenClosure() {
|
||||||
n := n.(*Func)
|
v.visit(n)
|
||||||
if !n.IsHiddenClosure() {
|
|
||||||
v.visit(n)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3015,7 +3015,14 @@ func (r *reader) tempCopy(pos src.XPos, expr ir.Node, init *ir.Nodes) *ir.Name {
|
||||||
assign.Def = true
|
assign.Def = true
|
||||||
tmp.Defn = assign
|
tmp.Defn = assign
|
||||||
|
|
||||||
typecheck.Target.Decls = append(typecheck.Target.Decls, typecheck.Stmt(assign))
|
// TODO(mdempsky): This code doesn't work anymore, because we now
|
||||||
|
// rely on types2 to compute InitOrder. If it's going to be used
|
||||||
|
// for testing again, the assignment here probably needs to be
|
||||||
|
// added to typecheck.Target.InitOrder somewhere.
|
||||||
|
//
|
||||||
|
// Probably just easier to address the escape analysis limitation.
|
||||||
|
//
|
||||||
|
// typecheck.Target.Decls = append(typecheck.Target.Decls, typecheck.Stmt(assign))
|
||||||
|
|
||||||
return tmp
|
return tmp
|
||||||
}
|
}
|
||||||
|
|
@ -3353,14 +3360,14 @@ func (r *reader) pkgDecls(target *ir.Package) {
|
||||||
case declFunc:
|
case declFunc:
|
||||||
names := r.pkgObjs(target)
|
names := r.pkgObjs(target)
|
||||||
assert(len(names) == 1)
|
assert(len(names) == 1)
|
||||||
target.Decls = append(target.Decls, names[0].Func)
|
target.Funcs = append(target.Funcs, names[0].Func)
|
||||||
|
|
||||||
case declMethod:
|
case declMethod:
|
||||||
typ := r.typ()
|
typ := r.typ()
|
||||||
_, sym := r.selector()
|
_, sym := r.selector()
|
||||||
|
|
||||||
method := typecheck.Lookdot1(nil, sym, typ, typ.Methods(), 0)
|
method := typecheck.Lookdot1(nil, sym, typ, typ.Methods(), 0)
|
||||||
target.Decls = append(target.Decls, method.Nname.(*ir.Name).Func)
|
target.Funcs = append(target.Funcs, method.Nname.(*ir.Name).Func)
|
||||||
|
|
||||||
case declVar:
|
case declVar:
|
||||||
names := r.pkgObjs(target)
|
names := r.pkgObjs(target)
|
||||||
|
|
@ -3629,7 +3636,7 @@ func expandInline(fn *ir.Func, pri pkgReaderIndex) {
|
||||||
// with the same information some other way.
|
// with the same information some other way.
|
||||||
|
|
||||||
fndcls := len(fn.Dcl)
|
fndcls := len(fn.Dcl)
|
||||||
topdcls := len(typecheck.Target.Decls)
|
topdcls := len(typecheck.Target.Funcs)
|
||||||
|
|
||||||
tmpfn := ir.NewFunc(fn.Pos())
|
tmpfn := ir.NewFunc(fn.Pos())
|
||||||
tmpfn.Nname = ir.NewNameAt(fn.Nname.Pos(), fn.Sym())
|
tmpfn.Nname = ir.NewNameAt(fn.Nname.Pos(), fn.Sym())
|
||||||
|
|
@ -3661,7 +3668,7 @@ func expandInline(fn *ir.Func, pri pkgReaderIndex) {
|
||||||
// typecheck.Stmts may have added function literals to
|
// typecheck.Stmts may have added function literals to
|
||||||
// typecheck.Target.Decls. Remove them again so we don't risk trying
|
// typecheck.Target.Decls. Remove them again so we don't risk trying
|
||||||
// to compile them multiple times.
|
// to compile them multiple times.
|
||||||
typecheck.Target.Decls = typecheck.Target.Decls[:topdcls]
|
typecheck.Target.Funcs = typecheck.Target.Funcs[:topdcls]
|
||||||
}
|
}
|
||||||
|
|
||||||
// usedLocals returns a set of local variables that are used within body.
|
// usedLocals returns a set of local variables that are used within body.
|
||||||
|
|
@ -3925,7 +3932,7 @@ func finishWrapperFunc(fn *ir.Func, target *ir.Package) {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
target.Decls = append(target.Decls, fn)
|
target.Funcs = append(target.Funcs, fn)
|
||||||
}
|
}
|
||||||
|
|
||||||
// newWrapperType returns a copy of the given signature type, but with
|
// newWrapperType returns a copy of the given signature type, but with
|
||||||
|
|
|
||||||
|
|
@ -91,27 +91,17 @@ func unified(m posMap, noders []*noder) {
|
||||||
r := localPkgReader.newReader(pkgbits.RelocMeta, pkgbits.PrivateRootIdx, pkgbits.SyncPrivate)
|
r := localPkgReader.newReader(pkgbits.RelocMeta, pkgbits.PrivateRootIdx, pkgbits.SyncPrivate)
|
||||||
r.pkgInit(types.LocalPkg, target)
|
r.pkgInit(types.LocalPkg, target)
|
||||||
|
|
||||||
// Type-check any top-level assignments. We ignore non-assignments
|
|
||||||
// here because other declarations are typechecked as they're
|
|
||||||
// constructed.
|
|
||||||
for i, ndecls := 0, len(target.Decls); i < ndecls; i++ {
|
|
||||||
switch n := target.Decls[i]; n.Op() {
|
|
||||||
case ir.OAS, ir.OAS2:
|
|
||||||
target.Decls[i] = typecheck.Stmt(n)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
readBodies(target, false)
|
readBodies(target, false)
|
||||||
|
|
||||||
// Check that nothing snuck past typechecking.
|
// Check that nothing snuck past typechecking.
|
||||||
for _, n := range target.Decls {
|
for _, fn := range target.Funcs {
|
||||||
if n.Typecheck() == 0 {
|
if fn.Typecheck() == 0 {
|
||||||
base.FatalfAt(n.Pos(), "missed typecheck: %v", n)
|
base.FatalfAt(fn.Pos(), "missed typecheck: %v", fn)
|
||||||
}
|
}
|
||||||
|
|
||||||
// For functions, check that at least their first statement (if
|
// For functions, check that at least their first statement (if
|
||||||
// any) was typechecked too.
|
// any) was typechecked too.
|
||||||
if fn, ok := n.(*ir.Func); ok && len(fn.Body) != 0 {
|
if len(fn.Body) != 0 {
|
||||||
if stmt := fn.Body[0]; stmt.Typecheck() == 0 {
|
if stmt := fn.Body[0]; stmt.Typecheck() == 0 {
|
||||||
base.FatalfAt(stmt.Pos(), "missed typecheck: %v", stmt)
|
base.FatalfAt(stmt.Pos(), "missed typecheck: %v", stmt)
|
||||||
}
|
}
|
||||||
|
|
@ -120,11 +110,9 @@ func unified(m posMap, noders []*noder) {
|
||||||
|
|
||||||
// For functions originally came from package runtime,
|
// For functions originally came from package runtime,
|
||||||
// mark as norace to prevent instrumenting, see issue #60439.
|
// mark as norace to prevent instrumenting, see issue #60439.
|
||||||
for _, n := range target.Decls {
|
for _, fn := range target.Funcs {
|
||||||
if fn, ok := n.(*ir.Func); ok {
|
if !base.Flag.CompilingRuntime && types.IsRuntimePkg(fn.Sym().Pkg) {
|
||||||
if !base.Flag.CompilingRuntime && types.IsRuntimePkg(fn.Sym().Pkg) {
|
fn.Pragma |= ir.Norace
|
||||||
fn.Pragma |= ir.Norace
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -138,7 +126,7 @@ func unified(m posMap, noders []*noder) {
|
||||||
// necessary on instantiations of imported generic functions, so their
|
// necessary on instantiations of imported generic functions, so their
|
||||||
// inlining costs can be computed.
|
// inlining costs can be computed.
|
||||||
func readBodies(target *ir.Package, duringInlining bool) {
|
func readBodies(target *ir.Package, duringInlining bool) {
|
||||||
var inlDecls []ir.Node
|
var inlDecls []*ir.Func
|
||||||
|
|
||||||
// Don't use range--bodyIdx can add closures to todoBodies.
|
// Don't use range--bodyIdx can add closures to todoBodies.
|
||||||
for {
|
for {
|
||||||
|
|
@ -175,7 +163,7 @@ func readBodies(target *ir.Package, duringInlining bool) {
|
||||||
if duringInlining && canSkipNonGenericMethod {
|
if duringInlining && canSkipNonGenericMethod {
|
||||||
inlDecls = append(inlDecls, fn)
|
inlDecls = append(inlDecls, fn)
|
||||||
} else {
|
} else {
|
||||||
target.Decls = append(target.Decls, fn)
|
target.Funcs = append(target.Funcs, fn)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -208,7 +196,7 @@ func readBodies(target *ir.Package, duringInlining bool) {
|
||||||
base.Flag.LowerM = oldLowerM
|
base.Flag.LowerM = oldLowerM
|
||||||
|
|
||||||
for _, fn := range inlDecls {
|
for _, fn := range inlDecls {
|
||||||
fn.(*ir.Func).Body = nil // free memory
|
fn.Body = nil // free memory
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -261,7 +261,7 @@ func (p *Profile) processprofileGraph(g *graph.Graph) error {
|
||||||
// of a package.
|
// of a package.
|
||||||
func (p *Profile) initializeIRGraph() {
|
func (p *Profile) initializeIRGraph() {
|
||||||
// Bottomup walk over the function to create IRGraph.
|
// Bottomup walk over the function to create IRGraph.
|
||||||
ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
|
ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
|
||||||
for _, fn := range list {
|
for _, fn := range list {
|
||||||
p.VisitIR(fn)
|
p.VisitIR(fn)
|
||||||
}
|
}
|
||||||
|
|
@ -467,7 +467,7 @@ func (p *Profile) PrintWeightedCallGraphDOT(edgeThreshold float64) {
|
||||||
|
|
||||||
// List of functions in this package.
|
// List of functions in this package.
|
||||||
funcs := make(map[string]struct{})
|
funcs := make(map[string]struct{})
|
||||||
ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
|
ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
|
||||||
for _, f := range list {
|
for _, f := range list {
|
||||||
name := ir.LinkFuncName(f)
|
name := ir.LinkFuncName(f)
|
||||||
funcs[name] = struct{}{}
|
funcs[name] = struct{}{}
|
||||||
|
|
@ -511,7 +511,7 @@ func (p *Profile) PrintWeightedCallGraphDOT(edgeThreshold float64) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Print edges.
|
// Print edges.
|
||||||
ir.VisitFuncsBottomUp(typecheck.Target.Decls, func(list []*ir.Func, recursive bool) {
|
ir.VisitFuncsBottomUp(typecheck.Target.Funcs, func(list []*ir.Func, recursive bool) {
|
||||||
for _, f := range list {
|
for _, f := range list {
|
||||||
name := ir.LinkFuncName(f)
|
name := ir.LinkFuncName(f)
|
||||||
if n, ok := p.WeightedCG.IRNodes[name]; ok {
|
if n, ok := p.WeightedCG.IRNodes[name]; ok {
|
||||||
|
|
|
||||||
|
|
@ -56,7 +56,7 @@ func MakeInit() {
|
||||||
ir.WithFunc(fn, func() {
|
ir.WithFunc(fn, func() {
|
||||||
typecheck.Stmts(nf)
|
typecheck.Stmts(nf)
|
||||||
})
|
})
|
||||||
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
|
typecheck.Target.Funcs = append(typecheck.Target.Funcs, fn)
|
||||||
if base.Debug.WrapGlobalMapDbg > 1 {
|
if base.Debug.WrapGlobalMapDbg > 1 {
|
||||||
fmt.Fprintf(os.Stderr, "=-= len(newfuncs) is %d for %v\n",
|
fmt.Fprintf(os.Stderr, "=-= len(newfuncs) is %d for %v\n",
|
||||||
len(newfuncs), fn)
|
len(newfuncs), fn)
|
||||||
|
|
@ -65,7 +65,7 @@ func MakeInit() {
|
||||||
if base.Debug.WrapGlobalMapDbg > 1 {
|
if base.Debug.WrapGlobalMapDbg > 1 {
|
||||||
fmt.Fprintf(os.Stderr, "=-= add to target.decls %v\n", nfn)
|
fmt.Fprintf(os.Stderr, "=-= add to target.decls %v\n", nfn)
|
||||||
}
|
}
|
||||||
typecheck.Target.Decls = append(typecheck.Target.Decls, ir.Node(nfn))
|
typecheck.Target.Funcs = append(typecheck.Target.Funcs, nfn)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Prepend to Inits, so it runs first, before any user-declared init
|
// Prepend to Inits, so it runs first, before any user-declared init
|
||||||
|
|
@ -150,7 +150,7 @@ func Task() *ir.Name {
|
||||||
typecheck.Stmts(fnInit.Body)
|
typecheck.Stmts(fnInit.Body)
|
||||||
ir.CurFunc = nil
|
ir.CurFunc = nil
|
||||||
|
|
||||||
typecheck.Target.Decls = append(typecheck.Target.Decls, fnInit)
|
typecheck.Target.Funcs = append(typecheck.Target.Funcs, fnInit)
|
||||||
typecheck.Target.Inits = append(typecheck.Target.Inits, fnInit)
|
typecheck.Target.Inits = append(typecheck.Target.Inits, fnInit)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -242,7 +242,7 @@ func hashFunc(t *types.Type) *ir.Func {
|
||||||
})
|
})
|
||||||
|
|
||||||
fn.SetNilCheckDisabled(true)
|
fn.SetNilCheckDisabled(true)
|
||||||
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
|
typecheck.Target.Funcs = append(typecheck.Target.Funcs, fn)
|
||||||
|
|
||||||
return fn
|
return fn
|
||||||
}
|
}
|
||||||
|
|
@ -630,7 +630,7 @@ func eqFunc(t *types.Type) *ir.Func {
|
||||||
// neither of which can be nil, and our comparisons
|
// neither of which can be nil, and our comparisons
|
||||||
// are shallow.
|
// are shallow.
|
||||||
fn.SetNilCheckDisabled(true)
|
fn.SetNilCheckDisabled(true)
|
||||||
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
|
typecheck.Target.Funcs = append(typecheck.Target.Funcs, fn)
|
||||||
return fn
|
return fn
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -125,11 +125,7 @@ func (s *SymABIs) GenABIWrappers() {
|
||||||
// This may generate new decls for the wrappers, but we
|
// This may generate new decls for the wrappers, but we
|
||||||
// specifically *don't* want to visit those, lest we create
|
// specifically *don't* want to visit those, lest we create
|
||||||
// wrappers for wrappers.
|
// wrappers for wrappers.
|
||||||
for _, fn := range typecheck.Target.Decls {
|
for _, fn := range typecheck.Target.Funcs {
|
||||||
if fn.Op() != ir.ODCLFUNC {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
fn := fn.(*ir.Func)
|
|
||||||
nam := fn.Nname
|
nam := fn.Nname
|
||||||
if ir.IsBlank(nam) {
|
if ir.IsBlank(nam) {
|
||||||
continue
|
continue
|
||||||
|
|
@ -332,7 +328,7 @@ func makeABIWrapper(f *ir.Func, wrapperABI obj.ABI) {
|
||||||
ir.CurFunc = fn
|
ir.CurFunc = fn
|
||||||
typecheck.Stmts(fn.Body)
|
typecheck.Stmts(fn.Body)
|
||||||
|
|
||||||
typecheck.Target.Decls = append(typecheck.Target.Decls, fn)
|
typecheck.Target.Funcs = append(typecheck.Target.Funcs, fn)
|
||||||
|
|
||||||
// Restore previous context.
|
// Restore previous context.
|
||||||
base.Pos = savepos
|
base.Pos = savepos
|
||||||
|
|
|
||||||
|
|
@ -56,11 +56,8 @@ func newNowritebarrierrecChecker() *nowritebarrierrecChecker {
|
||||||
// important to handle it for this check, so we model it
|
// important to handle it for this check, so we model it
|
||||||
// directly. This has to happen before transforming closures in walk since
|
// directly. This has to happen before transforming closures in walk since
|
||||||
// it's a lot harder to work out the argument after.
|
// it's a lot harder to work out the argument after.
|
||||||
for _, n := range typecheck.Target.Decls {
|
for _, n := range typecheck.Target.Funcs {
|
||||||
if n.Op() != ir.ODCLFUNC {
|
c.curfn = n
|
||||||
continue
|
|
||||||
}
|
|
||||||
c.curfn = n.(*ir.Func)
|
|
||||||
if c.curfn.ABIWrapper() {
|
if c.curfn.ABIWrapper() {
|
||||||
// We only want "real" calls to these
|
// We only want "real" calls to these
|
||||||
// functions, not the generated ones within
|
// functions, not the generated ones within
|
||||||
|
|
@ -139,12 +136,7 @@ func (c *nowritebarrierrecChecker) check() {
|
||||||
// q is the queue of ODCLFUNC Nodes to visit in BFS order.
|
// q is the queue of ODCLFUNC Nodes to visit in BFS order.
|
||||||
var q ir.NameQueue
|
var q ir.NameQueue
|
||||||
|
|
||||||
for _, n := range typecheck.Target.Decls {
|
for _, fn := range typecheck.Target.Funcs {
|
||||||
if n.Op() != ir.ODCLFUNC {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
fn := n.(*ir.Func)
|
|
||||||
|
|
||||||
symToFunc[fn.LSym] = fn
|
symToFunc[fn.LSym] = fn
|
||||||
|
|
||||||
// Make nowritebarrierrec functions BFS roots.
|
// Make nowritebarrierrec functions BFS roots.
|
||||||
|
|
|
||||||
|
|
@ -96,25 +96,26 @@ var IncrementalAddrtaken = false
|
||||||
// have not yet been marked as Addrtaken.
|
// have not yet been marked as Addrtaken.
|
||||||
var DirtyAddrtaken = false
|
var DirtyAddrtaken = false
|
||||||
|
|
||||||
func ComputeAddrtaken(top []ir.Node) {
|
func ComputeAddrtaken(funcs []*ir.Func) {
|
||||||
for _, n := range top {
|
var doVisit func(n ir.Node)
|
||||||
var doVisit func(n ir.Node)
|
doVisit = func(n ir.Node) {
|
||||||
doVisit = func(n ir.Node) {
|
if n.Op() == ir.OADDR {
|
||||||
if n.Op() == ir.OADDR {
|
if x := ir.OuterValue(n.(*ir.AddrExpr).X); x.Op() == ir.ONAME {
|
||||||
if x := ir.OuterValue(n.(*ir.AddrExpr).X); x.Op() == ir.ONAME {
|
x.Name().SetAddrtaken(true)
|
||||||
x.Name().SetAddrtaken(true)
|
if x.Name().IsClosureVar() {
|
||||||
if x.Name().IsClosureVar() {
|
// Mark the original variable as Addrtaken so that capturevars
|
||||||
// Mark the original variable as Addrtaken so that capturevars
|
// knows not to pass it by value.
|
||||||
// knows not to pass it by value.
|
x.Name().Defn.Name().SetAddrtaken(true)
|
||||||
x.Name().Defn.Name().SetAddrtaken(true)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if n.Op() == ir.OCLOSURE {
|
|
||||||
ir.VisitList(n.(*ir.ClosureExpr).Func.Body, doVisit)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
ir.Visit(n, doVisit)
|
if n.Op() == ir.OCLOSURE {
|
||||||
|
ir.VisitList(n.(*ir.ClosureExpr).Func.Body, doVisit)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fn := range funcs {
|
||||||
|
ir.Visit(fn, doVisit)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue