mirror of
https://github.com/golang/go.git
synced 2025-12-08 06:10:04 +00:00
[dev.regabi] cmd/compile: split out package liveness [generated]
[git-generate] cd src/cmd/compile/internal/gc rf ' # AutoVar is essentially an ssa helper; move it there. mv AutoVar value.go mv value.go cmd/compile/internal/ssa # Export liveness API and unexport non-API. mv LivenessMap Map mv Map.vals Map.Vals mv Map.deferreturn Map.DeferReturn mv livenessShouldTrack ShouldTrack mv onebitwalktype1 SetTypeBits mv allUnsafe IsUnsafe mv liveness Compute mv BlockEffects blockEffects mv Liveness liveness mv liveness _liveness # make room for import mv emitptrargsmap WriteFuncMap mv WriteFuncMap plive.go mv bvset.go plive.go cmd/compile/internal/liveness ' cd ../liveness rf ' mv _liveness liveness ' Change-Id: I3b86e5025bd9d32a7e19f44714fa16be4125059e Reviewed-on: https://go-review.googlesource.com/c/go/+/279311 Trust: Russ Cox <rsc@golang.org> Run-TryBot: Russ Cox <rsc@golang.org> Reviewed-by: Matthew Dempsky <mdempsky@google.com>
This commit is contained in:
parent
0ced54062e
commit
071ab0a14c
6 changed files with 121 additions and 117 deletions
|
|
@ -6,8 +6,8 @@ package gc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"cmd/compile/internal/base"
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/bitvec"
|
|
||||||
"cmd/compile/internal/ir"
|
"cmd/compile/internal/ir"
|
||||||
|
"cmd/compile/internal/liveness"
|
||||||
"cmd/compile/internal/objw"
|
"cmd/compile/internal/objw"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/typecheck"
|
"cmd/compile/internal/typecheck"
|
||||||
|
|
@ -30,36 +30,6 @@ var (
|
||||||
compilequeue []*ir.Func // functions waiting to be compiled
|
compilequeue []*ir.Func // functions waiting to be compiled
|
||||||
)
|
)
|
||||||
|
|
||||||
func emitptrargsmap(fn *ir.Func) {
|
|
||||||
if ir.FuncName(fn) == "_" || fn.Sym().Linkname != "" {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
lsym := base.Ctxt.Lookup(fn.LSym.Name + ".args_stackmap")
|
|
||||||
nptr := int(fn.Type().ArgWidth() / int64(types.PtrSize))
|
|
||||||
bv := bitvec.New(int32(nptr) * 2)
|
|
||||||
nbitmap := 1
|
|
||||||
if fn.Type().NumResults() > 0 {
|
|
||||||
nbitmap = 2
|
|
||||||
}
|
|
||||||
off := objw.Uint32(lsym, 0, uint32(nbitmap))
|
|
||||||
off = objw.Uint32(lsym, off, uint32(bv.N))
|
|
||||||
|
|
||||||
if ir.IsMethod(fn) {
|
|
||||||
onebitwalktype1(fn.Type().Recvs(), 0, bv)
|
|
||||||
}
|
|
||||||
if fn.Type().NumParams() > 0 {
|
|
||||||
onebitwalktype1(fn.Type().Params(), 0, bv)
|
|
||||||
}
|
|
||||||
off = objw.BitVec(lsym, off, bv)
|
|
||||||
|
|
||||||
if fn.Type().NumResults() > 0 {
|
|
||||||
onebitwalktype1(fn.Type().Results(), 0, bv)
|
|
||||||
off = objw.BitVec(lsym, off, bv)
|
|
||||||
}
|
|
||||||
|
|
||||||
objw.Global(lsym, int32(off), obj.RODATA|obj.LOCAL)
|
|
||||||
}
|
|
||||||
|
|
||||||
// cmpstackvarlt reports whether the stack variable a sorts before b.
|
// cmpstackvarlt reports whether the stack variable a sorts before b.
|
||||||
//
|
//
|
||||||
// Sort the list of stack variables. Autos after anything else,
|
// Sort the list of stack variables. Autos after anything else,
|
||||||
|
|
@ -213,7 +183,7 @@ func funccompile(fn *ir.Func) {
|
||||||
if len(fn.Body) == 0 {
|
if len(fn.Body) == 0 {
|
||||||
// Initialize ABI wrappers if necessary.
|
// Initialize ABI wrappers if necessary.
|
||||||
initLSym(fn, false)
|
initLSym(fn, false)
|
||||||
emitptrargsmap(fn)
|
liveness.WriteFuncMap(fn)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -254,7 +224,7 @@ func compile(fn *ir.Func) {
|
||||||
for _, n := range fn.Dcl {
|
for _, n := range fn.Dcl {
|
||||||
switch n.Class_ {
|
switch n.Class_ {
|
||||||
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
|
case ir.PPARAM, ir.PPARAMOUT, ir.PAUTO:
|
||||||
if livenessShouldTrack(n) && n.Addrtaken() {
|
if liveness.ShouldTrack(n) && n.Addrtaken() {
|
||||||
dtypesym(n.Type())
|
dtypesym(n.Type())
|
||||||
// Also make sure we allocate a linker symbol
|
// Also make sure we allocate a linker symbol
|
||||||
// for the stack object data, for the same reason.
|
// for the stack object data, for the same reason.
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"cmd/compile/internal/base"
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/bitvec"
|
"cmd/compile/internal/bitvec"
|
||||||
"cmd/compile/internal/ir"
|
"cmd/compile/internal/ir"
|
||||||
|
"cmd/compile/internal/liveness"
|
||||||
"cmd/compile/internal/objw"
|
"cmd/compile/internal/objw"
|
||||||
"cmd/compile/internal/typecheck"
|
"cmd/compile/internal/typecheck"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
|
|
@ -1591,7 +1592,7 @@ func fillptrmask(t *types.Type, ptrmask []byte) {
|
||||||
}
|
}
|
||||||
|
|
||||||
vec := bitvec.New(8 * int32(len(ptrmask)))
|
vec := bitvec.New(8 * int32(len(ptrmask)))
|
||||||
onebitwalktype1(t, 0, vec)
|
liveness.SetTypeBits(t, 0, vec)
|
||||||
|
|
||||||
nptr := types.PtrDataSize(t) / int64(types.PtrSize)
|
nptr := types.PtrDataSize(t) / int64(types.PtrSize)
|
||||||
for i := int64(0); i < nptr; i++ {
|
for i := int64(0); i < nptr; i++ {
|
||||||
|
|
|
||||||
|
|
@ -18,6 +18,7 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"cmd/compile/internal/base"
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/ir"
|
"cmd/compile/internal/ir"
|
||||||
|
"cmd/compile/internal/liveness"
|
||||||
"cmd/compile/internal/objw"
|
"cmd/compile/internal/objw"
|
||||||
"cmd/compile/internal/ssa"
|
"cmd/compile/internal/ssa"
|
||||||
"cmd/compile/internal/typecheck"
|
"cmd/compile/internal/typecheck"
|
||||||
|
|
@ -6315,7 +6316,7 @@ type SSAGenState struct {
|
||||||
|
|
||||||
// Map from GC safe points to liveness index, generated by
|
// Map from GC safe points to liveness index, generated by
|
||||||
// liveness analysis.
|
// liveness analysis.
|
||||||
livenessMap LivenessMap
|
livenessMap liveness.Map
|
||||||
|
|
||||||
// lineRunStart records the beginning of the current run of instructions
|
// lineRunStart records the beginning of the current run of instructions
|
||||||
// within a single block sharing the same line number
|
// within a single block sharing the same line number
|
||||||
|
|
@ -6401,7 +6402,7 @@ func (s byXoffset) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
|
||||||
func emitStackObjects(e *ssafn, pp *objw.Progs) {
|
func emitStackObjects(e *ssafn, pp *objw.Progs) {
|
||||||
var vars []*ir.Name
|
var vars []*ir.Name
|
||||||
for _, n := range e.curfn.Dcl {
|
for _, n := range e.curfn.Dcl {
|
||||||
if livenessShouldTrack(n) && n.Addrtaken() {
|
if liveness.ShouldTrack(n) && n.Addrtaken() {
|
||||||
vars = append(vars, n)
|
vars = append(vars, n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -6448,7 +6449,7 @@ func genssa(f *ssa.Func, pp *objw.Progs) {
|
||||||
|
|
||||||
e := f.Frontend().(*ssafn)
|
e := f.Frontend().(*ssafn)
|
||||||
|
|
||||||
s.livenessMap = liveness(e.curfn, f, e.stkptrsize, pp)
|
s.livenessMap = liveness.Compute(e.curfn, f, e.stkptrsize, pp)
|
||||||
emitStackObjects(e, pp)
|
emitStackObjects(e, pp)
|
||||||
|
|
||||||
openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
|
openDeferInfo := e.curfn.LSym.Func().OpenCodedDeferInfo
|
||||||
|
|
@ -6519,7 +6520,7 @@ func genssa(f *ssa.Func, pp *objw.Progs) {
|
||||||
// instruction. We won't use the actual liveness map on a
|
// instruction. We won't use the actual liveness map on a
|
||||||
// control instruction. Just mark it something that is
|
// control instruction. Just mark it something that is
|
||||||
// preemptible, unless this function is "all unsafe".
|
// preemptible, unless this function is "all unsafe".
|
||||||
s.pp.NextLive = objw.LivenessIndex{StackMapIndex: -1, IsUnsafePoint: allUnsafe(f)}
|
s.pp.NextLive = objw.LivenessIndex{StackMapIndex: -1, IsUnsafePoint: liveness.IsUnsafe(f)}
|
||||||
|
|
||||||
// Emit values in block
|
// Emit values in block
|
||||||
thearch.SSAMarkMoves(&s, b)
|
thearch.SSAMarkMoves(&s, b)
|
||||||
|
|
@ -6624,7 +6625,7 @@ func genssa(f *ssa.Func, pp *objw.Progs) {
|
||||||
// When doing open-coded defers, generate a disconnected call to
|
// When doing open-coded defers, generate a disconnected call to
|
||||||
// deferreturn and a return. This will be used to during panic
|
// deferreturn and a return. This will be used to during panic
|
||||||
// recovery to unwind the stack and return back to the runtime.
|
// recovery to unwind the stack and return back to the runtime.
|
||||||
s.pp.NextLive = s.livenessMap.deferreturn
|
s.pp.NextLive = s.livenessMap.DeferReturn
|
||||||
gencallret(pp, ir.Syms.Deferreturn)
|
gencallret(pp, ir.Syms.Deferreturn)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -7012,18 +7013,8 @@ func CheckLoweredGetClosurePtr(v *ssa.Value) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// AutoVar returns a *Name and int64 representing the auto variable and offset within it
|
|
||||||
// where v should be spilled.
|
|
||||||
func AutoVar(v *ssa.Value) (*ir.Name, int64) {
|
|
||||||
loc := v.Block.Func.RegAlloc[v.ID].(ssa.LocalSlot)
|
|
||||||
if v.Type.Size() > loc.Type.Size() {
|
|
||||||
v.Fatalf("spill/restore type %s doesn't fit in slot type %s", v.Type, loc.Type)
|
|
||||||
}
|
|
||||||
return loc.N, loc.Off
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddrAuto(a *obj.Addr, v *ssa.Value) {
|
func AddrAuto(a *obj.Addr, v *ssa.Value) {
|
||||||
n, off := AutoVar(v)
|
n, off := ssa.AutoVar(v)
|
||||||
a.Type = obj.TYPE_MEM
|
a.Type = obj.TYPE_MEM
|
||||||
a.Sym = n.Sym().Linksym()
|
a.Sym = n.Sym().Linksym()
|
||||||
a.Reg = int16(thearch.REGSP)
|
a.Reg = int16(thearch.REGSP)
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
// Use of this source code is governed by a BSD-style
|
// Use of this source code is governed by a BSD-style
|
||||||
// license that can be found in the LICENSE file.
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
package gc
|
package liveness
|
||||||
|
|
||||||
import "cmd/compile/internal/bitvec"
|
import "cmd/compile/internal/bitvec"
|
||||||
|
|
||||||
|
|
@ -12,9 +12,13 @@
|
||||||
//
|
//
|
||||||
// Each level includes the earlier output as well.
|
// Each level includes the earlier output as well.
|
||||||
|
|
||||||
package gc
|
package liveness
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"cmd/compile/internal/base"
|
"cmd/compile/internal/base"
|
||||||
"cmd/compile/internal/bitvec"
|
"cmd/compile/internal/bitvec"
|
||||||
"cmd/compile/internal/ir"
|
"cmd/compile/internal/ir"
|
||||||
|
|
@ -23,9 +27,6 @@ import (
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/obj"
|
"cmd/internal/obj"
|
||||||
"cmd/internal/objabi"
|
"cmd/internal/objabi"
|
||||||
"crypto/md5"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// OpVarDef is an annotation for the liveness analysis, marking a place
|
// OpVarDef is an annotation for the liveness analysis, marking a place
|
||||||
|
|
@ -83,8 +84,8 @@ import (
|
||||||
// so the compiler can allocate two temps to the same location. Here it's now
|
// so the compiler can allocate two temps to the same location. Here it's now
|
||||||
// useless, since the implementation of stack objects.
|
// useless, since the implementation of stack objects.
|
||||||
|
|
||||||
// BlockEffects summarizes the liveness effects on an SSA block.
|
// blockEffects summarizes the liveness effects on an SSA block.
|
||||||
type BlockEffects struct {
|
type blockEffects struct {
|
||||||
// Computed during Liveness.prologue using only the content of
|
// Computed during Liveness.prologue using only the content of
|
||||||
// individual blocks:
|
// individual blocks:
|
||||||
//
|
//
|
||||||
|
|
@ -102,14 +103,14 @@ type BlockEffects struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// A collection of global state used by liveness analysis.
|
// A collection of global state used by liveness analysis.
|
||||||
type Liveness struct {
|
type liveness struct {
|
||||||
fn *ir.Func
|
fn *ir.Func
|
||||||
f *ssa.Func
|
f *ssa.Func
|
||||||
vars []*ir.Name
|
vars []*ir.Name
|
||||||
idx map[*ir.Name]int32
|
idx map[*ir.Name]int32
|
||||||
stkptrsize int64
|
stkptrsize int64
|
||||||
|
|
||||||
be []BlockEffects
|
be []blockEffects
|
||||||
|
|
||||||
// allUnsafe indicates that all points in this function are
|
// allUnsafe indicates that all points in this function are
|
||||||
// unsafe-points.
|
// unsafe-points.
|
||||||
|
|
@ -127,40 +128,40 @@ type Liveness struct {
|
||||||
|
|
||||||
// livenessMap maps from safe points (i.e., CALLs) to their
|
// livenessMap maps from safe points (i.e., CALLs) to their
|
||||||
// liveness map indexes.
|
// liveness map indexes.
|
||||||
livenessMap LivenessMap
|
livenessMap Map
|
||||||
stackMapSet bvecSet
|
stackMapSet bvecSet
|
||||||
stackMaps []bitvec.BitVec
|
stackMaps []bitvec.BitVec
|
||||||
|
|
||||||
cache progeffectscache
|
cache progeffectscache
|
||||||
}
|
}
|
||||||
|
|
||||||
// LivenessMap maps from *ssa.Value to LivenessIndex.
|
// Map maps from *ssa.Value to LivenessIndex.
|
||||||
type LivenessMap struct {
|
type Map struct {
|
||||||
vals map[ssa.ID]objw.LivenessIndex
|
Vals map[ssa.ID]objw.LivenessIndex
|
||||||
// The set of live, pointer-containing variables at the deferreturn
|
// The set of live, pointer-containing variables at the DeferReturn
|
||||||
// call (only set when open-coded defers are used).
|
// call (only set when open-coded defers are used).
|
||||||
deferreturn objw.LivenessIndex
|
DeferReturn objw.LivenessIndex
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *LivenessMap) reset() {
|
func (m *Map) reset() {
|
||||||
if m.vals == nil {
|
if m.Vals == nil {
|
||||||
m.vals = make(map[ssa.ID]objw.LivenessIndex)
|
m.Vals = make(map[ssa.ID]objw.LivenessIndex)
|
||||||
} else {
|
} else {
|
||||||
for k := range m.vals {
|
for k := range m.Vals {
|
||||||
delete(m.vals, k)
|
delete(m.Vals, k)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
m.deferreturn = objw.LivenessDontCare
|
m.DeferReturn = objw.LivenessDontCare
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *LivenessMap) set(v *ssa.Value, i objw.LivenessIndex) {
|
func (m *Map) set(v *ssa.Value, i objw.LivenessIndex) {
|
||||||
m.vals[v.ID] = i
|
m.Vals[v.ID] = i
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m LivenessMap) Get(v *ssa.Value) objw.LivenessIndex {
|
func (m Map) Get(v *ssa.Value) objw.LivenessIndex {
|
||||||
// If v isn't in the map, then it's a "don't care" and not an
|
// If v isn't in the map, then it's a "don't care" and not an
|
||||||
// unsafe-point.
|
// unsafe-point.
|
||||||
if idx, ok := m.vals[v.ID]; ok {
|
if idx, ok := m.Vals[v.ID]; ok {
|
||||||
return idx
|
return idx
|
||||||
}
|
}
|
||||||
return objw.LivenessIndex{StackMapIndex: objw.StackMapDontCare, IsUnsafePoint: false}
|
return objw.LivenessIndex{StackMapIndex: objw.StackMapDontCare, IsUnsafePoint: false}
|
||||||
|
|
@ -172,13 +173,13 @@ type progeffectscache struct {
|
||||||
initialized bool
|
initialized bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// livenessShouldTrack reports whether the liveness analysis
|
// ShouldTrack reports whether the liveness analysis
|
||||||
// should track the variable n.
|
// should track the variable n.
|
||||||
// We don't care about variables that have no pointers,
|
// We don't care about variables that have no pointers,
|
||||||
// nor do we care about non-local variables,
|
// nor do we care about non-local variables,
|
||||||
// nor do we care about empty structs (handled by the pointer check),
|
// nor do we care about empty structs (handled by the pointer check),
|
||||||
// nor do we care about the fake PAUTOHEAP variables.
|
// nor do we care about the fake PAUTOHEAP variables.
|
||||||
func livenessShouldTrack(nn ir.Node) bool {
|
func ShouldTrack(nn ir.Node) bool {
|
||||||
if nn.Op() != ir.ONAME {
|
if nn.Op() != ir.ONAME {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
@ -191,7 +192,7 @@ func livenessShouldTrack(nn ir.Node) bool {
|
||||||
func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
|
func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
|
||||||
var vars []*ir.Name
|
var vars []*ir.Name
|
||||||
for _, n := range fn.Dcl {
|
for _, n := range fn.Dcl {
|
||||||
if livenessShouldTrack(n) {
|
if ShouldTrack(n) {
|
||||||
vars = append(vars, n)
|
vars = append(vars, n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -202,7 +203,7 @@ func getvariables(fn *ir.Func) ([]*ir.Name, map[*ir.Name]int32) {
|
||||||
return vars, idx
|
return vars, idx
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lv *Liveness) initcache() {
|
func (lv *liveness) initcache() {
|
||||||
if lv.cache.initialized {
|
if lv.cache.initialized {
|
||||||
base.Fatalf("liveness cache initialized twice")
|
base.Fatalf("liveness cache initialized twice")
|
||||||
return
|
return
|
||||||
|
|
@ -246,7 +247,7 @@ const (
|
||||||
// valueEffects returns the index of a variable in lv.vars and the
|
// valueEffects returns the index of a variable in lv.vars and the
|
||||||
// liveness effects v has on that variable.
|
// liveness effects v has on that variable.
|
||||||
// If v does not affect any tracked variables, it returns -1, 0.
|
// If v does not affect any tracked variables, it returns -1, 0.
|
||||||
func (lv *Liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
|
func (lv *liveness) valueEffects(v *ssa.Value) (int32, liveEffect) {
|
||||||
n, e := affectedNode(v)
|
n, e := affectedNode(v)
|
||||||
if e == 0 || n == nil || n.Op() != ir.ONAME { // cheapest checks first
|
if e == 0 || n == nil || n.Op() != ir.ONAME { // cheapest checks first
|
||||||
return -1, 0
|
return -1, 0
|
||||||
|
|
@ -293,10 +294,10 @@ func affectedNode(v *ssa.Value) (ir.Node, ssa.SymEffect) {
|
||||||
// Special cases.
|
// Special cases.
|
||||||
switch v.Op {
|
switch v.Op {
|
||||||
case ssa.OpLoadReg:
|
case ssa.OpLoadReg:
|
||||||
n, _ := AutoVar(v.Args[0])
|
n, _ := ssa.AutoVar(v.Args[0])
|
||||||
return n, ssa.SymRead
|
return n, ssa.SymRead
|
||||||
case ssa.OpStoreReg:
|
case ssa.OpStoreReg:
|
||||||
n, _ := AutoVar(v)
|
n, _ := ssa.AutoVar(v)
|
||||||
return n, ssa.SymWrite
|
return n, ssa.SymWrite
|
||||||
|
|
||||||
case ssa.OpVarLive:
|
case ssa.OpVarLive:
|
||||||
|
|
@ -304,7 +305,7 @@ func affectedNode(v *ssa.Value) (ir.Node, ssa.SymEffect) {
|
||||||
case ssa.OpVarDef, ssa.OpVarKill:
|
case ssa.OpVarDef, ssa.OpVarKill:
|
||||||
return v.Aux.(*ir.Name), ssa.SymWrite
|
return v.Aux.(*ir.Name), ssa.SymWrite
|
||||||
case ssa.OpKeepAlive:
|
case ssa.OpKeepAlive:
|
||||||
n, _ := AutoVar(v.Args[0])
|
n, _ := ssa.AutoVar(v.Args[0])
|
||||||
return n, ssa.SymRead
|
return n, ssa.SymRead
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -326,15 +327,15 @@ func affectedNode(v *ssa.Value) (ir.Node, ssa.SymEffect) {
|
||||||
}
|
}
|
||||||
|
|
||||||
type livenessFuncCache struct {
|
type livenessFuncCache struct {
|
||||||
be []BlockEffects
|
be []blockEffects
|
||||||
livenessMap LivenessMap
|
livenessMap Map
|
||||||
}
|
}
|
||||||
|
|
||||||
// Constructs a new liveness structure used to hold the global state of the
|
// Constructs a new liveness structure used to hold the global state of the
|
||||||
// liveness computation. The cfg argument is a slice of *BasicBlocks and the
|
// liveness computation. The cfg argument is a slice of *BasicBlocks and the
|
||||||
// vars argument is a slice of *Nodes.
|
// vars argument is a slice of *Nodes.
|
||||||
func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int32, stkptrsize int64) *Liveness {
|
func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int32, stkptrsize int64) *liveness {
|
||||||
lv := &Liveness{
|
lv := &liveness{
|
||||||
fn: fn,
|
fn: fn,
|
||||||
f: f,
|
f: f,
|
||||||
vars: vars,
|
vars: vars,
|
||||||
|
|
@ -352,11 +353,11 @@ func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int
|
||||||
if cap(lc.be) >= f.NumBlocks() {
|
if cap(lc.be) >= f.NumBlocks() {
|
||||||
lv.be = lc.be[:f.NumBlocks()]
|
lv.be = lc.be[:f.NumBlocks()]
|
||||||
}
|
}
|
||||||
lv.livenessMap = LivenessMap{vals: lc.livenessMap.vals, deferreturn: objw.LivenessDontCare}
|
lv.livenessMap = Map{Vals: lc.livenessMap.Vals, DeferReturn: objw.LivenessDontCare}
|
||||||
lc.livenessMap.vals = nil
|
lc.livenessMap.Vals = nil
|
||||||
}
|
}
|
||||||
if lv.be == nil {
|
if lv.be == nil {
|
||||||
lv.be = make([]BlockEffects, f.NumBlocks())
|
lv.be = make([]blockEffects, f.NumBlocks())
|
||||||
}
|
}
|
||||||
|
|
||||||
nblocks := int32(len(f.Blocks))
|
nblocks := int32(len(f.Blocks))
|
||||||
|
|
@ -376,14 +377,14 @@ func newliveness(fn *ir.Func, f *ssa.Func, vars []*ir.Name, idx map[*ir.Name]int
|
||||||
return lv
|
return lv
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lv *Liveness) blockEffects(b *ssa.Block) *BlockEffects {
|
func (lv *liveness) blockEffects(b *ssa.Block) *blockEffects {
|
||||||
return &lv.be[b.ID]
|
return &lv.be[b.ID]
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: The bitmap for a specific type t could be cached in t after
|
// NOTE: The bitmap for a specific type t could be cached in t after
|
||||||
// the first run and then simply copied into bv at the correct offset
|
// the first run and then simply copied into bv at the correct offset
|
||||||
// on future calls with the same type t.
|
// on future calls with the same type t.
|
||||||
func onebitwalktype1(t *types.Type, off int64, bv bitvec.BitVec) {
|
func SetTypeBits(t *types.Type, off int64, bv bitvec.BitVec) {
|
||||||
if t.Align > 0 && off&int64(t.Align-1) != 0 {
|
if t.Align > 0 && off&int64(t.Align-1) != 0 {
|
||||||
base.Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off)
|
base.Fatalf("onebitwalktype1: invalid initial alignment: type %v has alignment %d, but offset is %v", t, t.Align, off)
|
||||||
}
|
}
|
||||||
|
|
@ -442,13 +443,13 @@ func onebitwalktype1(t *types.Type, off int64, bv bitvec.BitVec) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
for i := int64(0); i < t.NumElem(); i++ {
|
for i := int64(0); i < t.NumElem(); i++ {
|
||||||
onebitwalktype1(elt, off, bv)
|
SetTypeBits(elt, off, bv)
|
||||||
off += elt.Width
|
off += elt.Width
|
||||||
}
|
}
|
||||||
|
|
||||||
case types.TSTRUCT:
|
case types.TSTRUCT:
|
||||||
for _, f := range t.Fields().Slice() {
|
for _, f := range t.Fields().Slice() {
|
||||||
onebitwalktype1(f.Type, off+f.Offset, bv)
|
SetTypeBits(f.Type, off+f.Offset, bv)
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
|
@ -459,7 +460,7 @@ func onebitwalktype1(t *types.Type, off int64, bv bitvec.BitVec) {
|
||||||
// Generates live pointer value maps for arguments and local variables. The
|
// Generates live pointer value maps for arguments and local variables. The
|
||||||
// this argument and the in arguments are always assumed live. The vars
|
// this argument and the in arguments are always assumed live. The vars
|
||||||
// argument is a slice of *Nodes.
|
// argument is a slice of *Nodes.
|
||||||
func (lv *Liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, locals bitvec.BitVec) {
|
func (lv *liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, locals bitvec.BitVec) {
|
||||||
for i := int32(0); ; i++ {
|
for i := int32(0); ; i++ {
|
||||||
i = liveout.Next(i)
|
i = liveout.Next(i)
|
||||||
if i < 0 {
|
if i < 0 {
|
||||||
|
|
@ -468,17 +469,17 @@ func (lv *Liveness) pointerMap(liveout bitvec.BitVec, vars []*ir.Name, args, loc
|
||||||
node := vars[i]
|
node := vars[i]
|
||||||
switch node.Class_ {
|
switch node.Class_ {
|
||||||
case ir.PAUTO:
|
case ir.PAUTO:
|
||||||
onebitwalktype1(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
|
SetTypeBits(node.Type(), node.FrameOffset()+lv.stkptrsize, locals)
|
||||||
|
|
||||||
case ir.PPARAM, ir.PPARAMOUT:
|
case ir.PPARAM, ir.PPARAMOUT:
|
||||||
onebitwalktype1(node.Type(), node.FrameOffset(), args)
|
SetTypeBits(node.Type(), node.FrameOffset(), args)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// allUnsafe indicates that all points in this function are
|
// IsUnsafe indicates that all points in this function are
|
||||||
// unsafe-points.
|
// unsafe-points.
|
||||||
func allUnsafe(f *ssa.Func) bool {
|
func IsUnsafe(f *ssa.Func) bool {
|
||||||
// The runtime assumes the only safe-points are function
|
// The runtime assumes the only safe-points are function
|
||||||
// prologues (because that's how it used to be). We could and
|
// prologues (because that's how it used to be). We could and
|
||||||
// should improve that, but for now keep consider all points
|
// should improve that, but for now keep consider all points
|
||||||
|
|
@ -492,8 +493,8 @@ func allUnsafe(f *ssa.Func) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// markUnsafePoints finds unsafe points and computes lv.unsafePoints.
|
// markUnsafePoints finds unsafe points and computes lv.unsafePoints.
|
||||||
func (lv *Liveness) markUnsafePoints() {
|
func (lv *liveness) markUnsafePoints() {
|
||||||
if allUnsafe(lv.f) {
|
if IsUnsafe(lv.f) {
|
||||||
// No complex analysis necessary.
|
// No complex analysis necessary.
|
||||||
lv.allUnsafe = true
|
lv.allUnsafe = true
|
||||||
return
|
return
|
||||||
|
|
@ -655,7 +656,7 @@ func (lv *Liveness) markUnsafePoints() {
|
||||||
// This does not necessarily mean the instruction is a safe-point. In
|
// This does not necessarily mean the instruction is a safe-point. In
|
||||||
// particular, call Values can have a stack map in case the callee
|
// particular, call Values can have a stack map in case the callee
|
||||||
// grows the stack, but not themselves be a safe-point.
|
// grows the stack, but not themselves be a safe-point.
|
||||||
func (lv *Liveness) hasStackMap(v *ssa.Value) bool {
|
func (lv *liveness) hasStackMap(v *ssa.Value) bool {
|
||||||
if !v.Op.IsCall() {
|
if !v.Op.IsCall() {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
@ -671,7 +672,7 @@ func (lv *Liveness) hasStackMap(v *ssa.Value) bool {
|
||||||
// Initializes the sets for solving the live variables. Visits all the
|
// Initializes the sets for solving the live variables. Visits all the
|
||||||
// instructions in each basic block to summarizes the information at each basic
|
// instructions in each basic block to summarizes the information at each basic
|
||||||
// block
|
// block
|
||||||
func (lv *Liveness) prologue() {
|
func (lv *liveness) prologue() {
|
||||||
lv.initcache()
|
lv.initcache()
|
||||||
|
|
||||||
for _, b := range lv.f.Blocks {
|
for _, b := range lv.f.Blocks {
|
||||||
|
|
@ -693,7 +694,7 @@ func (lv *Liveness) prologue() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Solve the liveness dataflow equations.
|
// Solve the liveness dataflow equations.
|
||||||
func (lv *Liveness) solve() {
|
func (lv *liveness) solve() {
|
||||||
// These temporary bitvectors exist to avoid successive allocations and
|
// These temporary bitvectors exist to avoid successive allocations and
|
||||||
// frees within the loop.
|
// frees within the loop.
|
||||||
nvars := int32(len(lv.vars))
|
nvars := int32(len(lv.vars))
|
||||||
|
|
@ -753,7 +754,7 @@ func (lv *Liveness) solve() {
|
||||||
|
|
||||||
// Visits all instructions in a basic block and computes a bit vector of live
|
// Visits all instructions in a basic block and computes a bit vector of live
|
||||||
// variables at each safe point locations.
|
// variables at each safe point locations.
|
||||||
func (lv *Liveness) epilogue() {
|
func (lv *liveness) epilogue() {
|
||||||
nvars := int32(len(lv.vars))
|
nvars := int32(len(lv.vars))
|
||||||
liveout := bitvec.New(nvars)
|
liveout := bitvec.New(nvars)
|
||||||
livedefer := bitvec.New(nvars) // always-live variables
|
livedefer := bitvec.New(nvars) // always-live variables
|
||||||
|
|
@ -882,9 +883,9 @@ func (lv *Liveness) epilogue() {
|
||||||
|
|
||||||
// If we have an open-coded deferreturn call, make a liveness map for it.
|
// If we have an open-coded deferreturn call, make a liveness map for it.
|
||||||
if lv.fn.OpenCodedDeferDisallowed() {
|
if lv.fn.OpenCodedDeferDisallowed() {
|
||||||
lv.livenessMap.deferreturn = objw.LivenessDontCare
|
lv.livenessMap.DeferReturn = objw.LivenessDontCare
|
||||||
} else {
|
} else {
|
||||||
lv.livenessMap.deferreturn = objw.LivenessIndex{
|
lv.livenessMap.DeferReturn = objw.LivenessIndex{
|
||||||
StackMapIndex: lv.stackMapSet.add(livedefer),
|
StackMapIndex: lv.stackMapSet.add(livedefer),
|
||||||
IsUnsafePoint: false,
|
IsUnsafePoint: false,
|
||||||
}
|
}
|
||||||
|
|
@ -920,7 +921,7 @@ func (lv *Liveness) epilogue() {
|
||||||
// is actually a net loss: we save about 50k of argument bitmaps but the new
|
// is actually a net loss: we save about 50k of argument bitmaps but the new
|
||||||
// PCDATA tables cost about 100k. So for now we keep using a single index for
|
// PCDATA tables cost about 100k. So for now we keep using a single index for
|
||||||
// both bitmap lists.
|
// both bitmap lists.
|
||||||
func (lv *Liveness) compact(b *ssa.Block) {
|
func (lv *liveness) compact(b *ssa.Block) {
|
||||||
pos := 0
|
pos := 0
|
||||||
if b == lv.f.Entry {
|
if b == lv.f.Entry {
|
||||||
// Handle entry stack map.
|
// Handle entry stack map.
|
||||||
|
|
@ -944,7 +945,7 @@ func (lv *Liveness) compact(b *ssa.Block) {
|
||||||
lv.livevars = lv.livevars[:0]
|
lv.livevars = lv.livevars[:0]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lv *Liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
|
func (lv *liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
|
||||||
if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
|
if base.Flag.Live == 0 || ir.FuncName(lv.fn) == "init" || strings.HasPrefix(ir.FuncName(lv.fn), ".") {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
@ -984,7 +985,7 @@ func (lv *Liveness) showlive(v *ssa.Value, live bitvec.BitVec) {
|
||||||
base.WarnfAt(pos, s)
|
base.WarnfAt(pos, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lv *Liveness) printbvec(printed bool, name string, live bitvec.BitVec) bool {
|
func (lv *liveness) printbvec(printed bool, name string, live bitvec.BitVec) bool {
|
||||||
if live.IsEmpty() {
|
if live.IsEmpty() {
|
||||||
return printed
|
return printed
|
||||||
}
|
}
|
||||||
|
|
@ -1008,7 +1009,7 @@ func (lv *Liveness) printbvec(printed bool, name string, live bitvec.BitVec) boo
|
||||||
}
|
}
|
||||||
|
|
||||||
// printeffect is like printbvec, but for valueEffects.
|
// printeffect is like printbvec, but for valueEffects.
|
||||||
func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bool {
|
func (lv *liveness) printeffect(printed bool, name string, pos int32, x bool) bool {
|
||||||
if !x {
|
if !x {
|
||||||
return printed
|
return printed
|
||||||
}
|
}
|
||||||
|
|
@ -1028,7 +1029,7 @@ func (lv *Liveness) printeffect(printed bool, name string, pos int32, x bool) bo
|
||||||
// Prints the computed liveness information and inputs, for debugging.
|
// Prints the computed liveness information and inputs, for debugging.
|
||||||
// This format synthesizes the information used during the multiple passes
|
// This format synthesizes the information used during the multiple passes
|
||||||
// into a single presentation.
|
// into a single presentation.
|
||||||
func (lv *Liveness) printDebug() {
|
func (lv *liveness) printDebug() {
|
||||||
fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
|
fmt.Printf("liveness: %s\n", ir.FuncName(lv.fn))
|
||||||
|
|
||||||
for i, b := range lv.f.Blocks {
|
for i, b := range lv.f.Blocks {
|
||||||
|
|
@ -1137,7 +1138,7 @@ func (lv *Liveness) printDebug() {
|
||||||
// first word dumped is the total number of bitmaps. The second word is the
|
// first word dumped is the total number of bitmaps. The second word is the
|
||||||
// length of the bitmaps. All bitmaps are assumed to be of equal length. The
|
// length of the bitmaps. All bitmaps are assumed to be of equal length. The
|
||||||
// remaining bytes are the raw bitmaps.
|
// remaining bytes are the raw bitmaps.
|
||||||
func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
|
func (lv *liveness) emit() (argsSym, liveSym *obj.LSym) {
|
||||||
// Size args bitmaps to be just large enough to hold the largest pointer.
|
// Size args bitmaps to be just large enough to hold the largest pointer.
|
||||||
// First, find the largest Xoffset node we care about.
|
// First, find the largest Xoffset node we care about.
|
||||||
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
|
// (Nodes without pointers aren't in lv.vars; see livenessShouldTrack.)
|
||||||
|
|
@ -1201,11 +1202,11 @@ func (lv *Liveness) emit() (argsSym, liveSym *obj.LSym) {
|
||||||
return makeSym(&argsSymTmp), makeSym(&liveSymTmp)
|
return makeSym(&argsSymTmp), makeSym(&liveSymTmp)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Entry pointer for liveness analysis. Solves for the liveness of
|
// Entry pointer for Compute analysis. Solves for the Compute of
|
||||||
// pointer variables in the function and emits a runtime data
|
// pointer variables in the function and emits a runtime data
|
||||||
// structure read by the garbage collector.
|
// structure read by the garbage collector.
|
||||||
// Returns a map from GC safe points to their corresponding stack map index.
|
// Returns a map from GC safe points to their corresponding stack map index.
|
||||||
func liveness(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs) LivenessMap {
|
func Compute(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs) Map {
|
||||||
// Construct the global liveness state.
|
// Construct the global liveness state.
|
||||||
vars, idx := getvariables(curfn)
|
vars, idx := getvariables(curfn)
|
||||||
lv := newliveness(curfn, f, vars, idx, stkptrsize)
|
lv := newliveness(curfn, f, vars, idx, stkptrsize)
|
||||||
|
|
@ -1233,11 +1234,11 @@ func liveness(curfn *ir.Func, f *ssa.Func, stkptrsize int64, pp *objw.Progs) Liv
|
||||||
cache := f.Cache.Liveness.(*livenessFuncCache)
|
cache := f.Cache.Liveness.(*livenessFuncCache)
|
||||||
if cap(lv.be) < 2000 { // Threshold from ssa.Cache slices.
|
if cap(lv.be) < 2000 { // Threshold from ssa.Cache slices.
|
||||||
for i := range lv.be {
|
for i := range lv.be {
|
||||||
lv.be[i] = BlockEffects{}
|
lv.be[i] = blockEffects{}
|
||||||
}
|
}
|
||||||
cache.be = lv.be
|
cache.be = lv.be
|
||||||
}
|
}
|
||||||
if len(lv.livenessMap.vals) < 2000 {
|
if len(lv.livenessMap.Vals) < 2000 {
|
||||||
cache.livenessMap = lv.livenessMap
|
cache.livenessMap = lv.livenessMap
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1298,3 +1299,33 @@ func isfat(t *types.Type) bool {
|
||||||
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func WriteFuncMap(fn *ir.Func) {
|
||||||
|
if ir.FuncName(fn) == "_" || fn.Sym().Linkname != "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
lsym := base.Ctxt.Lookup(fn.LSym.Name + ".args_stackmap")
|
||||||
|
nptr := int(fn.Type().ArgWidth() / int64(types.PtrSize))
|
||||||
|
bv := bitvec.New(int32(nptr) * 2)
|
||||||
|
nbitmap := 1
|
||||||
|
if fn.Type().NumResults() > 0 {
|
||||||
|
nbitmap = 2
|
||||||
|
}
|
||||||
|
off := objw.Uint32(lsym, 0, uint32(nbitmap))
|
||||||
|
off = objw.Uint32(lsym, off, uint32(bv.N))
|
||||||
|
|
||||||
|
if ir.IsMethod(fn) {
|
||||||
|
SetTypeBits(fn.Type().Recvs(), 0, bv)
|
||||||
|
}
|
||||||
|
if fn.Type().NumParams() > 0 {
|
||||||
|
SetTypeBits(fn.Type().Params(), 0, bv)
|
||||||
|
}
|
||||||
|
off = objw.BitVec(lsym, off, bv)
|
||||||
|
|
||||||
|
if fn.Type().NumResults() > 0 {
|
||||||
|
SetTypeBits(fn.Type().Results(), 0, bv)
|
||||||
|
off = objw.BitVec(lsym, off, bv)
|
||||||
|
}
|
||||||
|
|
||||||
|
objw.Global(lsym, int32(off), obj.RODATA|obj.LOCAL)
|
||||||
|
}
|
||||||
|
|
@ -5,6 +5,7 @@
|
||||||
package ssa
|
package ssa
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmd/compile/internal/ir"
|
||||||
"cmd/compile/internal/types"
|
"cmd/compile/internal/types"
|
||||||
"cmd/internal/src"
|
"cmd/internal/src"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
@ -495,3 +496,13 @@ func (v *Value) removeable() bool {
|
||||||
|
|
||||||
// TODO(mdempsky): Shouldn't be necessary; see discussion at golang.org/cl/275756
|
// TODO(mdempsky): Shouldn't be necessary; see discussion at golang.org/cl/275756
|
||||||
func (*Value) CanBeAnSSAAux() {}
|
func (*Value) CanBeAnSSAAux() {}
|
||||||
|
|
||||||
|
// AutoVar returns a *Name and int64 representing the auto variable and offset within it
|
||||||
|
// where v should be spilled.
|
||||||
|
func AutoVar(v *Value) (*ir.Name, int64) {
|
||||||
|
loc := v.Block.Func.RegAlloc[v.ID].(LocalSlot)
|
||||||
|
if v.Type.Size() > loc.Type.Size() {
|
||||||
|
v.Fatalf("spill/restore type %s doesn't fit in slot type %s", v.Type, loc.Type)
|
||||||
|
}
|
||||||
|
return loc.N, loc.Off
|
||||||
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue